ONNX Backends for Shape Inference#

Backend class: OnnxInferenceBackendShape.

<<<

import unittest
import sys
from datetime import datetime
from contextlib import redirect_stdout, redirect_stderr
from io import StringIO
from onnx.backend.test import BackendTest
from onnx import __version__ as onnx_version
from onnxruntime import __version__ as ort_version
from numpy import __version__ as npy_version
import mlprodict.onnxrt.backend_shape as backend

back_test = BackendTest(backend, __name__)
back_test.include('.*_cpu')
back_test.exclude('.*_blvc_.*')
back_test.exclude('.*_densenet_.*')
back_test.exclude('.*_densenet121_.*')
back_test.exclude('.*_inception_.*')
back_test.exclude('.*_resnet50_.*')
back_test.exclude('.*_shufflenet_.*')
back_test.exclude('.*_squeezenet_.*')
back_test.exclude('.*_vgg19_.*')
back_test.exclude('.*_zfnet512_.*')
globals().update(back_test.enable_report().test_cases)

print('---------------------------------')
print('python', sys.version)
print('onnx', onnx_version)
print('onnxruntime', ort_version)
print('numpy', npy_version)
print('---------------------------------')
print(datetime.now(), "BEGIN")
print('---------------------------------')

buffer = StringIO()
if True:
    with redirect_stdout(buffer):
        with redirect_stderr(buffer):
            res = unittest.main(verbosity=2, exit=False)
else:
    res = unittest.main(verbosity=2, exit=False)

testsRun = res.result.testsRun
errors = len(res.result.errors)
skipped = len(res.result.skipped)
unexpectedSuccesses = len(res.result.unexpectedSuccesses)
expectedFailures = len(res.result.expectedFailures)

print('---------------------------------')
print(datetime.now(), "END")
print('---------------------------------')

print("testsRun=%d errors=%d skipped=%d" % (testsRun, errors, skipped))
print("unexpectedSuccesses=%d expectedFailures=%d" % (
    unexpectedSuccesses, expectedFailures))
ran = testsRun - skipped
print("ratio=%f" % (1 - errors * 1.0 / ran))
print('---------------------------------')
lines = buffer.getvalue().split('\n')
print("\n".join(line for line in lines
      if "skipped 'no matched include pattern'" not in line))

>>>

    ---------------------------------
    python 3.9.1 (default, Jan 18 2021, 16:35:58) 
    [GCC 8.3.0]
    onnx 1.13.0
    onnxruntime 1.13.1
    numpy 1.23.5
    ---------------------------------
    2023-02-04 07:14:05.593666 BEGIN
    ---------------------------------
    ---------------------------------
    2023-02-04 07:14:16.529855 END
    ---------------------------------
    testsRun=2492 errors=965 skipped=1254
    unexpectedSuccesses=0 expectedFailures=0
    ratio=0.220517
    ---------------------------------
    test_abs_cpu (__main__.OnnxBackendNodeModelTest) ... somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/npy/xop.py:17: DeprecationWarning: Please use `coo_matrix` from the `scipy.sparse` namespace, the `scipy.sparse.coo` namespace is deprecated.
      from scipy.sparse.coo import coo_matrix
    somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/_op_numpy_helper.py:8: DeprecationWarning: Please use `coo_matrix` from the `scipy.sparse` namespace, the `scipy.sparse.coo` namespace is deprecated.
      from scipy.sparse.coo import coo_matrix
    somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py:207: DeprecationWarning: `np.object` is a deprecated alias for the builtin `object`. To silence this warning, use `object` by itself. Doing this will not modify any behavior and is safe. 
    Deprecated in NumPy 1.20; for more details and guidance: https://numpy.org/devdocs/release/1.20.0-notes.html#deprecations
      if ref_outputs[i].dtype == np.object:
    ok
    test_acos_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_acos_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_acosh_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_acosh_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_adagrad_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_adagrad_multiple_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_adam_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_adam_multiple_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_add_bcast_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_add_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_add_uint8_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_and2d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_and3d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_and4d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_and_bcast3v1d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_and_bcast3v2d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_and_bcast4v2d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_and_bcast4v3d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_and_bcast4v4d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_argmax_default_axis_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_argmax_default_axis_example_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_argmax_default_axis_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_argmax_default_axis_random_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_argmax_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_argmax_keepdims_example_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_argmax_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_argmax_keepdims_random_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_argmax_negative_axis_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_argmax_negative_axis_keepdims_example_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_argmax_negative_axis_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_argmax_negative_axis_keepdims_random_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_argmax_no_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_argmax_no_keepdims_example_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_argmax_no_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_argmax_no_keepdims_random_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_argmin_default_axis_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_argmin_default_axis_example_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_argmin_default_axis_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_argmin_default_axis_random_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_argmin_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_argmin_keepdims_example_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_argmin_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_argmin_keepdims_random_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_argmin_negative_axis_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_argmin_negative_axis_keepdims_example_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_argmin_negative_axis_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_argmin_negative_axis_keepdims_random_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_argmin_no_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_argmin_no_keepdims_example_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_argmin_no_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_argmin_no_keepdims_random_select_last_index_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_asin_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_asin_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_asinh_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_asinh_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_atan_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_atan_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_atanh_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_atanh_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_averagepool_1d_default_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_averagepool_2d_ceil_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_averagepool_2d_default_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_averagepool_2d_pads_count_include_pad_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_averagepool_2d_pads_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_averagepool_2d_precomputed_pads_count_include_pad_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_averagepool_2d_precomputed_pads_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_averagepool_2d_precomputed_same_upper_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_averagepool_2d_precomputed_strides_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_averagepool_2d_same_lower_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_averagepool_2d_same_upper_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_averagepool_2d_strides_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_averagepool_3d_default_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_basic_conv_with_padding_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_basic_conv_without_padding_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_batchnorm_epsilon_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_batchnorm_epsilon_training_mode_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_batchnorm_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_batchnorm_example_training_mode_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bernoulli_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bernoulli_double_cpu (__main__.OnnxBackendNodeModelTest) ... somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/op_random.py:69: DeprecationWarning: `mapping.TENSOR_TYPE_TO_NP_TYPE` is now deprecated and will be removed in the next release or so.To silence this warning, please use `helper.{self._future_function}` instead.
      TENSOR_TYPE_TO_NP_TYPE[self.dtype] if self.dtype > 0
    ERROR
    test_bernoulli_double_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/op_random.py:138: DeprecationWarning: `mapping.TENSOR_TYPE_TO_NP_TYPE` is now deprecated and will be removed in the next release or so.To silence this warning, please use `helper.{self._future_function}` instead.
      None if self.dtype == 0 else TENSOR_TYPE_TO_NP_TYPE[self.dtype])
    somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/op_cast.py:26: DeprecationWarning: `mapping.TENSOR_TYPE_TO_NP_TYPE` is now deprecated and will be removed in the next release or so.To silence this warning, please use `helper.{self._future_function}` instead.
      self._dtype = TENSOR_TYPE_TO_NP_TYPE[self.to]
    ERROR
    test_bernoulli_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bernoulli_seed_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bernoulli_seed_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bitshift_left_uint16_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bitshift_left_uint32_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bitshift_left_uint64_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bitshift_left_uint8_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bitshift_right_uint16_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bitshift_right_uint32_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bitshift_right_uint64_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bitshift_right_uint8_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bitwise_and_i16_3d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bitwise_and_i32_2d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bitwise_and_ui64_bcast_3v1d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bitwise_and_ui8_bcast_4v3d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bitwise_not_2d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bitwise_not_3d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bitwise_not_4d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bitwise_or_i16_4d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bitwise_or_i32_2d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bitwise_or_ui64_bcast_3v1d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bitwise_or_ui8_bcast_4v3d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bitwise_xor_i16_3d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bitwise_xor_i32_2d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bitwise_xor_ui64_bcast_3v1d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_bitwise_xor_ui8_bcast_4v3d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_blackmanwindow_cpu (__main__.OnnxBackendNodeModelTest) ... somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/op_window.py:48: DeprecationWarning: `mapping.TENSOR_TYPE_TO_NP_TYPE` is now deprecated and will be removed in the next release or so.To silence this warning, please use `helper.{self._future_function}` instead.
      self.dtype = TENSOR_TYPE_TO_NP_TYPE[self.output_datatype]
    ERROR
    test_blackmanwindow_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_blackmanwindow_symmetric_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_blackmanwindow_symmetric_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_cast_BFLOAT16_to_FLOAT_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_cast_DOUBLE_to_FLOAT16_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_cast_DOUBLE_to_FLOAT_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_cast_FLOAT16_to_DOUBLE_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_cast_FLOAT16_to_FLOAT_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_cast_FLOAT_to_BFLOAT16_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_cast_FLOAT_to_DOUBLE_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_cast_FLOAT_to_FLOAT16_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_cast_FLOAT_to_STRING_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_cast_STRING_to_FLOAT_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_castlike_BFLOAT16_to_FLOAT_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_castlike_BFLOAT16_to_FLOAT_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_castlike_DOUBLE_to_FLOAT16_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_castlike_DOUBLE_to_FLOAT16_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_castlike_DOUBLE_to_FLOAT_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_castlike_DOUBLE_to_FLOAT_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_castlike_FLOAT16_to_DOUBLE_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_castlike_FLOAT16_to_DOUBLE_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_castlike_FLOAT16_to_FLOAT_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_castlike_FLOAT16_to_FLOAT_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_castlike_FLOAT_to_BFLOAT16_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_castlike_FLOAT_to_BFLOAT16_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_castlike_FLOAT_to_DOUBLE_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_castlike_FLOAT_to_DOUBLE_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_castlike_FLOAT_to_FLOAT16_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_castlike_FLOAT_to_FLOAT16_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_castlike_FLOAT_to_STRING_cpu (__main__.OnnxBackendNodeModelTest) ... FAIL
    test_castlike_FLOAT_to_STRING_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_castlike_STRING_to_FLOAT_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_castlike_STRING_to_FLOAT_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_ceil_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_ceil_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_celu_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_celu_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_center_crop_pad_crop_and_pad_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_center_crop_pad_crop_and_pad_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_center_crop_pad_crop_axes_chw_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_center_crop_pad_crop_axes_chw_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_center_crop_pad_crop_axes_hwc_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_center_crop_pad_crop_axes_hwc_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_center_crop_pad_crop_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_center_crop_pad_crop_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_center_crop_pad_pad_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_center_crop_pad_pad_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_clip_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_clip_default_inbounds_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_clip_default_inbounds_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_clip_default_int8_inbounds_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_clip_default_int8_inbounds_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_clip_default_int8_max_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_clip_default_int8_max_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_clip_default_int8_min_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_clip_default_int8_min_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_clip_default_max_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_clip_default_max_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_clip_default_min_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_clip_default_min_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_clip_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_clip_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_clip_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_clip_inbounds_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_clip_inbounds_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_clip_outbounds_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_clip_outbounds_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_clip_splitbounds_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_clip_splitbounds_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_col2im_5d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_col2im_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_col2im_dilations_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_col2im_pads_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_col2im_strides_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_compress_0_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_compress_1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_compress_default_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_compress_negative_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_concat_1d_axis_0_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_concat_1d_axis_negative_1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_concat_2d_axis_0_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_concat_2d_axis_1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_concat_2d_axis_negative_1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_concat_2d_axis_negative_2_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_concat_3d_axis_0_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_concat_3d_axis_1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_concat_3d_axis_2_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_concat_3d_axis_negative_1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_concat_3d_axis_negative_2_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_concat_3d_axis_negative_3_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_constant_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_constant_pad_axes_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_constant_pad_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_constantofshape_float_ones_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_constantofshape_int_shape_zero_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_constantofshape_int_zeros_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_conv_with_autopad_same_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_conv_with_strides_and_asymmetric_padding_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_conv_with_strides_no_padding_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_conv_with_strides_padding_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_convinteger_with_padding_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_convinteger_without_padding_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_convtranspose_1d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_convtranspose_3d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_convtranspose_autopad_same_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_convtranspose_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_convtranspose_dilations_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_convtranspose_kernel_shape_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_convtranspose_output_shape_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_convtranspose_pad_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_convtranspose_pads_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_cos_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_cos_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_cosh_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_cosh_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_cumsum_1d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_cumsum_1d_exclusive_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_cumsum_1d_reverse_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_cumsum_1d_reverse_exclusive_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_cumsum_2d_axis_0_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_cumsum_2d_axis_1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_cumsum_2d_negative_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_depthtospace_crd_mode_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_depthtospace_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_dequantizelinear_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_dequantizelinear_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_det_2d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_det_nd_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_dft_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_dft_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_dft_inverse_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_div_bcast_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_div_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_div_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_div_uint8_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_dropout_default_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_dropout_default_mask_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_dropout_default_mask_ratio_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_dropout_default_old_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_dropout_default_ratio_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_dropout_random_old_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_dynamicquantizelinear_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_dynamicquantizelinear_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_dynamicquantizelinear_max_adjusted_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_dynamicquantizelinear_max_adjusted_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_dynamicquantizelinear_min_adjusted_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_dynamicquantizelinear_min_adjusted_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_edge_pad_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_einsum_batch_diagonal_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_einsum_batch_matmul_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_einsum_inner_prod_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_einsum_sum_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_einsum_transpose_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_elu_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_elu_default_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_elu_default_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_elu_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_elu_example_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_elu_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_equal_bcast_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_equal_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_erf_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_exp_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_exp_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_expand_dim_changed_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_expand_dim_unchanged_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_eyelike_populate_off_main_diagonal_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_eyelike_with_dtype_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_eyelike_without_dtype_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_flatten_axis0_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_flatten_axis1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_flatten_axis2_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_flatten_axis3_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_flatten_default_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_flatten_negative_axis1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_flatten_negative_axis2_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_flatten_negative_axis3_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_flatten_negative_axis4_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_floor_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_floor_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_gather_0_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_gather_1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_gather_2d_indices_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_gather_elements_0_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_gather_elements_1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_gather_elements_negative_indices_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_gather_negative_indices_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_gathernd_example_float32_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_gathernd_example_int32_batch_dim1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_gathernd_example_int32_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_gemm_all_attributes_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_gemm_alpha_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_gemm_beta_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_gemm_default_matrix_bias_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_gemm_default_no_bias_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_gemm_default_scalar_bias_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_gemm_default_single_elem_vector_bias_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_gemm_default_vector_bias_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_gemm_default_zero_bias_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_gemm_transposeA_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_gemm_transposeB_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_globalaveragepool_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_globalaveragepool_precomputed_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_globalmaxpool_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_globalmaxpool_precomputed_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_greater_bcast_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_greater_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_greater_equal_bcast_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_greater_equal_bcast_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_greater_equal_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_greater_equal_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_gridsample_aligncorners_true_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_gridsample_bicubic_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_gridsample_bilinear_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_gridsample_border_padding_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_gridsample_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_gridsample_nearest_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_gridsample_reflection_padding_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_gridsample_zeros_padding_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_group_normalization_epsilon_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_group_normalization_epsilon_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_group_normalization_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_group_normalization_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_gru_batchwise_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_gru_defaults_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_gru_seq_length_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_gru_with_initial_bias_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_hammingwindow_cpu (__main__.OnnxBackendNodeModelTest) ... somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/op_window.py:107: DeprecationWarning: `mapping.TENSOR_TYPE_TO_NP_TYPE` is now deprecated and will be removed in the next release or so.To silence this warning, please use `helper.{self._future_function}` instead.
      self.dtype = TENSOR_TYPE_TO_NP_TYPE[self.output_datatype]
    ERROR
    test_hammingwindow_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_hammingwindow_symmetric_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_hammingwindow_symmetric_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_hannwindow_cpu (__main__.OnnxBackendNodeModelTest) ... somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/op_window.py:80: DeprecationWarning: `mapping.TENSOR_TYPE_TO_NP_TYPE` is now deprecated and will be removed in the next release or so.To silence this warning, please use `helper.{self._future_function}` instead.
      self.dtype = TENSOR_TYPE_TO_NP_TYPE[self.output_datatype]
    ERROR
    test_hannwindow_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_hannwindow_symmetric_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_hannwindow_symmetric_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_hardmax_axis_0_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_hardmax_axis_1_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_hardmax_axis_2_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_hardmax_default_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_hardmax_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_hardmax_negative_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_hardmax_one_hot_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_hardsigmoid_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_hardsigmoid_default_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_hardsigmoid_default_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_hardsigmoid_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_hardsigmoid_example_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_hardsigmoid_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_hardswish_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_hardswish_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_identity_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_identity_opt_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_identity_sequence_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_if_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_if_opt_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_if_seq_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_instancenorm_epsilon_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_instancenorm_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_isinf_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_isinf_negative_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_isinf_positive_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_isnan_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_layer_normalization_2d_axis0_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_2d_axis0_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_2d_axis0_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_2d_axis1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_2d_axis1_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_2d_axis1_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_2d_axis_negative_1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_2d_axis_negative_1_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_2d_axis_negative_1_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_2d_axis_negative_2_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_2d_axis_negative_2_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_2d_axis_negative_2_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_3d_axis0_epsilon_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_3d_axis0_epsilon_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_3d_axis0_epsilon_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_3d_axis1_epsilon_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_3d_axis1_epsilon_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_3d_axis1_epsilon_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_3d_axis2_epsilon_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_3d_axis2_epsilon_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_3d_axis2_epsilon_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_3d_axis_negative_1_epsilon_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_3d_axis_negative_1_epsilon_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_3d_axis_negative_1_epsilon_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_3d_axis_negative_2_epsilon_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_3d_axis_negative_2_epsilon_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_3d_axis_negative_2_epsilon_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_3d_axis_negative_3_epsilon_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_3d_axis_negative_3_epsilon_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_3d_axis_negative_3_epsilon_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_4d_axis0_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_4d_axis0_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_4d_axis0_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_4d_axis1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_4d_axis1_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_4d_axis1_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_4d_axis2_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_4d_axis2_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_4d_axis2_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_4d_axis3_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_4d_axis3_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_4d_axis3_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_4d_axis_negative_1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_4d_axis_negative_1_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_4d_axis_negative_1_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_4d_axis_negative_2_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_4d_axis_negative_2_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_4d_axis_negative_2_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_4d_axis_negative_3_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_4d_axis_negative_3_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_4d_axis_negative_3_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_4d_axis_negative_4_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_4d_axis_negative_4_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_4d_axis_negative_4_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_default_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_default_axis_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_layer_normalization_default_axis_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_leakyrelu_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_leakyrelu_default_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_leakyrelu_default_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_leakyrelu_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_leakyrelu_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_leakyrelu_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_less_bcast_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_less_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_less_equal_bcast_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_less_equal_bcast_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_less_equal_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_less_equal_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_log_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_log_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_logsoftmax_axis_0_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_logsoftmax_axis_0_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_logsoftmax_axis_0_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_logsoftmax_axis_1_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_logsoftmax_axis_1_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_logsoftmax_axis_1_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_logsoftmax_axis_2_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_logsoftmax_axis_2_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_logsoftmax_axis_2_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_logsoftmax_default_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_logsoftmax_default_axis_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_logsoftmax_default_axis_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_logsoftmax_example_1_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_logsoftmax_example_1_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_logsoftmax_example_1_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_logsoftmax_large_number_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_logsoftmax_large_number_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_logsoftmax_large_number_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_logsoftmax_negative_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_logsoftmax_negative_axis_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_logsoftmax_negative_axis_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_loop11_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_loop13_seq_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_loop16_seq_none_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_lrn_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_lrn_default_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_lstm_batchwise_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_lstm_defaults_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_lstm_with_initial_bias_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_lstm_with_peepholes_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_matmul_2d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_matmul_3d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_matmul_4d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_matmulinteger_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_max_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_max_float16_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_max_float32_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_max_float64_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_max_int16_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_max_int32_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_max_int64_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_max_int8_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_max_one_input_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_max_two_inputs_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_max_uint16_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_max_uint32_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_max_uint64_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_max_uint8_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_maxpool_1d_default_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_maxpool_2d_ceil_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_maxpool_2d_default_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_maxpool_2d_dilations_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_maxpool_2d_pads_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_maxpool_2d_precomputed_pads_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_maxpool_2d_precomputed_same_upper_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_maxpool_2d_precomputed_strides_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_maxpool_2d_same_lower_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_maxpool_2d_same_upper_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_maxpool_2d_strides_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_maxpool_2d_uint8_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_maxpool_3d_default_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_maxpool_with_argmax_2d_precomputed_pads_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_maxpool_with_argmax_2d_precomputed_strides_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_maxunpool_export_with_output_shape_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_maxunpool_export_without_output_shape_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_mean_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_mean_one_input_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_mean_two_inputs_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_melweightmatrix_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_min_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_min_float16_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_min_float32_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_min_float64_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_min_int16_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_min_int32_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_min_int64_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_min_int8_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_min_one_input_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_min_two_inputs_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_min_uint16_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_min_uint32_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_min_uint64_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_min_uint8_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_mish_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_mish_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_mod_broadcast_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_mod_int64_fmod_cpu (__main__.OnnxBackendNodeModelTest) ... FAIL
    test_mod_mixed_sign_float16_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_mod_mixed_sign_float32_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_mod_mixed_sign_float64_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_mod_mixed_sign_int16_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_mod_mixed_sign_int32_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_mod_mixed_sign_int64_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_mod_mixed_sign_int8_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_mod_uint16_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_mod_uint32_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_mod_uint64_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_mod_uint8_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_momentum_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_momentum_multiple_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_mul_bcast_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_mul_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_mul_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_mul_uint8_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_mvn_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_mvn_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_mvn_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_neg_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_neg_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_nesterov_momentum_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nllloss_NC_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nllloss_NC_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nllloss_NCd1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nllloss_NCd1_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nllloss_NCd1_ii_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nllloss_NCd1_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nllloss_NCd1_mean_weight_negative_ii_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nllloss_NCd1_mean_weight_negative_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nllloss_NCd1_weight_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nllloss_NCd1_weight_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nllloss_NCd1_weight_ii_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nllloss_NCd1_weight_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nllloss_NCd1d2_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nllloss_NCd1d2_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nllloss_NCd1d2_no_weight_reduction_mean_ii_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nllloss_NCd1d2_no_weight_reduction_mean_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nllloss_NCd1d2_reduction_mean_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nllloss_NCd1d2_reduction_mean_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nllloss_NCd1d2_reduction_sum_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nllloss_NCd1d2_reduction_sum_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nllloss_NCd1d2_with_weight_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nllloss_NCd1d2_with_weight_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nllloss_NCd1d2_with_weight_reduction_mean_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nllloss_NCd1d2_with_weight_reduction_mean_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nllloss_NCd1d2_with_weight_reduction_sum_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nllloss_NCd1d2_with_weight_reduction_sum_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nllloss_NCd1d2_with_weight_reduction_sum_ii_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nllloss_NCd1d2_with_weight_reduction_sum_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nllloss_NCd1d2d3_none_no_weight_negative_ii_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nllloss_NCd1d2d3_none_no_weight_negative_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nllloss_NCd1d2d3_sum_weight_high_ii_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nllloss_NCd1d2d3_sum_weight_high_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nllloss_NCd1d2d3d4d5_mean_weight_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nllloss_NCd1d2d3d4d5_mean_weight_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nllloss_NCd1d2d3d4d5_none_no_weight_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nllloss_NCd1d2d3d4d5_none_no_weight_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nonmaxsuppression_center_point_box_format_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nonmaxsuppression_flipped_coordinates_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nonmaxsuppression_identical_boxes_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nonmaxsuppression_limit_output_size_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nonmaxsuppression_single_box_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nonmaxsuppression_suppress_by_IOU_and_scores_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nonmaxsuppression_suppress_by_IOU_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nonmaxsuppression_two_batches_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nonmaxsuppression_two_classes_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_nonzero_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_not_2d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_not_3d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_not_4d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_onehot_negative_indices_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_onehot_with_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_onehot_with_negative_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_onehot_without_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_optional_get_element_optional_sequence_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_optional_get_element_optional_tensor_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_optional_get_element_sequence_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_optional_get_element_tensor_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_optional_has_element_empty_no_input_name_optional_input_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_optional_has_element_empty_no_input_name_tensor_input_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_optional_has_element_empty_no_input_optional_input_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_optional_has_element_empty_no_input_tensor_input_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_optional_has_element_empty_optional_input_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_optional_has_element_optional_input_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_optional_has_element_tensor_input_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_or2d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_or3d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_or4d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_or_bcast3v1d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_or_bcast3v2d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_or_bcast4v2d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_or_bcast4v3d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_or_bcast4v4d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_pow_bcast_array_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_pow_bcast_scalar_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_pow_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_pow_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_pow_types_float32_int32_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_pow_types_float32_int64_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_pow_types_float32_uint32_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_pow_types_float32_uint64_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_pow_types_int32_float32_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_pow_types_int32_int32_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_pow_types_int64_float32_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_pow_types_int64_int64_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_prelu_broadcast_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_prelu_broadcast_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_prelu_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_prelu_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_qlinearconv_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_qlinearmatmul_2D_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_qlinearmatmul_3D_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_quantizelinear_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_quantizelinear_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_range_float_type_positive_delta_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_range_float_type_positive_delta_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_range_int32_type_negative_delta_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_range_int32_type_negative_delta_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reciprocal_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_reciprocal_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_reduce_l1_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l1_default_axes_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l1_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l1_default_axes_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l1_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l1_do_not_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l1_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l1_do_not_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l1_keep_dims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l1_keep_dims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l1_keep_dims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l1_keep_dims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l1_negative_axes_keep_dims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l1_negative_axes_keep_dims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l1_negative_axes_keep_dims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l1_negative_axes_keep_dims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l2_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l2_default_axes_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l2_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l2_default_axes_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l2_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l2_do_not_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l2_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l2_do_not_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l2_keep_dims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l2_keep_dims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l2_keep_dims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l2_keep_dims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l2_negative_axes_keep_dims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l2_negative_axes_keep_dims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l2_negative_axes_keep_dims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_l2_negative_axes_keep_dims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_asc_axes_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_asc_axes_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_default_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_default_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_desc_axes_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_desc_axes_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_exp_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_exp_default_axes_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_exp_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_exp_default_axes_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_exp_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_exp_do_not_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_exp_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_exp_do_not_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_exp_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_exp_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_exp_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_exp_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_exp_negative_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_exp_negative_axes_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_exp_negative_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_exp_negative_axes_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_negative_axes_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_log_sum_negative_axes_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_max_default_axes_keepdim_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_max_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_max_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_max_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_max_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_max_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_max_negative_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_max_negative_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_mean_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_mean_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_mean_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_mean_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_mean_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_mean_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_mean_negative_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_mean_negative_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_min_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_min_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_min_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_min_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_min_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_min_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_min_negative_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_min_negative_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_prod_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_prod_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_prod_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_prod_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_prod_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_prod_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_prod_negative_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_prod_negative_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_sum_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_sum_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_sum_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_sum_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_sum_empty_axes_input_noop_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_sum_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_sum_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_sum_negative_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_sum_negative_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_sum_square_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_sum_square_default_axes_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_sum_square_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_sum_square_default_axes_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_sum_square_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_sum_square_do_not_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_sum_square_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_sum_square_do_not_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_sum_square_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_sum_square_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_sum_square_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_sum_square_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_sum_square_negative_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_sum_square_negative_axes_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_sum_square_negative_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reduce_sum_square_negative_axes_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reflect_pad_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_relu_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_relu_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reshape_allowzero_reordered_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reshape_extended_dims_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reshape_negative_dim_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reshape_negative_extended_dims_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reshape_one_dim_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reshape_reduced_dims_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reshape_reordered_all_dims_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reshape_reordered_last_dims_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reshape_zero_and_negative_dim_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reshape_zero_dim_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_downsample_scales_cubic_A_n0p5_exclude_outside_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_downsample_scales_cubic_align_corners_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_downsample_scales_cubic_antialias_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_downsample_scales_cubic_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_downsample_scales_linear_align_corners_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_downsample_scales_linear_antialias_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_downsample_scales_linear_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_downsample_scales_nearest_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_downsample_sizes_cubic_antialias_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_downsample_sizes_cubic_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_downsample_sizes_linear_antialias_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_downsample_sizes_linear_pytorch_half_pixel_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_downsample_sizes_nearest_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_downsample_sizes_nearest_not_larger_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_downsample_sizes_nearest_not_smaller_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_tf_crop_and_resize_axes_2_3_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_tf_crop_and_resize_axes_3_2_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_tf_crop_and_resize_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_upsample_scales_cubic_A_n0p5_exclude_outside_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_upsample_scales_cubic_align_corners_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_upsample_scales_cubic_asymmetric_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_upsample_scales_cubic_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_upsample_scales_linear_align_corners_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_upsample_scales_linear_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_upsample_scales_nearest_axes_2_3_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_upsample_scales_nearest_axes_3_2_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_upsample_scales_nearest_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_upsample_sizes_cubic_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_upsample_sizes_nearest_axes_2_3_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_upsample_sizes_nearest_axes_3_2_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_upsample_sizes_nearest_ceil_half_pixel_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_upsample_sizes_nearest_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_upsample_sizes_nearest_floor_align_corners_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_upsample_sizes_nearest_not_larger_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_resize_upsample_sizes_nearest_round_prefer_ceil_asymmetric_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reversesequence_batch_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_reversesequence_time_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_rnn_seq_length_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_roialign_aligned_false_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_roialign_aligned_true_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_round_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_scan9_sum_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_scan_sum_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_scatter_elements_with_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_scatter_elements_with_duplicate_indices_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_scatter_elements_with_negative_indices_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_scatter_elements_with_reduction_max_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_scatter_elements_with_reduction_min_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_scatter_elements_without_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_scatter_with_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_scatter_without_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_scatternd_add_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_scatternd_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_scatternd_max_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_scatternd_min_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_scatternd_multiply_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_NCd1_mean_weight_negative_ii_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_NCd1_mean_weight_negative_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_NCd1_mean_weight_negative_ii_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_NCd1_mean_weight_negative_ii_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_NCd1d2d3_none_no_weight_negative_ii_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_NCd1d2d3_none_no_weight_negative_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_NCd1d2d3_none_no_weight_negative_ii_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_NCd1d2d3_none_no_weight_negative_ii_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_NCd1d2d3_sum_weight_high_ii_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_NCd1d2d3_sum_weight_high_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_NCd1d2d3_sum_weight_high_ii_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_NCd1d2d3_sum_weight_high_ii_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_NCd1d2d3d4d5_mean_weight_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_NCd1d2d3d4d5_mean_weight_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_NCd1d2d3d4d5_mean_weight_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_NCd1d2d3d4d5_mean_weight_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_NCd1d2d3d4d5_none_no_weight_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_NCd1d2d3d4d5_none_no_weight_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_NCd1d2d3d4d5_none_no_weight_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_NCd1d2d3d4d5_none_no_weight_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_mean_3d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_mean_3d_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_mean_3d_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_mean_3d_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_mean_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_mean_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_mean_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_mean_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_mean_no_weight_ii_3d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_mean_no_weight_ii_3d_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_mean_no_weight_ii_3d_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_mean_no_weight_ii_3d_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_mean_no_weight_ii_4d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_mean_no_weight_ii_4d_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_mean_no_weight_ii_4d_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_mean_no_weight_ii_4d_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_mean_no_weight_ii_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_mean_no_weight_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_mean_no_weight_ii_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_mean_no_weight_ii_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_mean_weight_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_mean_weight_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_mean_weight_ii_3d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_mean_weight_ii_3d_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_mean_weight_ii_3d_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_mean_weight_ii_3d_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_mean_weight_ii_4d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_mean_weight_ii_4d_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_mean_weight_ii_4d_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_mean_weight_ii_4d_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_mean_weight_ii_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_mean_weight_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_mean_weight_ii_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_mean_weight_ii_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_mean_weight_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_mean_weight_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_none_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_none_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_none_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_none_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_none_weights_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_none_weights_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_none_weights_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_none_weights_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_sum_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_sum_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_sum_log_prob_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sce_sum_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_selu_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_selu_default_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_selu_default_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_selu_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_selu_example_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_selu_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sequence_insert_at_back_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sequence_insert_at_front_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sequence_map_add_1_sequence_1_tensor_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sequence_map_add_1_sequence_1_tensor_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sequence_map_add_2_sequences_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sequence_map_add_2_sequences_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sequence_map_extract_shapes_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sequence_map_extract_shapes_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sequence_map_identity_1_sequence_1_tensor_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sequence_map_identity_1_sequence_1_tensor_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sequence_map_identity_1_sequence_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sequence_map_identity_1_sequence_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sequence_map_identity_2_sequences_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sequence_map_identity_2_sequences_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_shape_clip_end_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_shape_clip_start_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_shape_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_shape_end_1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_shape_end_negative_1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_shape_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_shape_start_1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_shape_start_1_end_2_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_shape_start_1_end_negative_1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_shape_start_negative_1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_shrink_hard_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_shrink_hard_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_shrink_soft_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_shrink_soft_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sigmoid_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sigmoid_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sign_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_simple_rnn_batchwise_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_simple_rnn_defaults_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_simple_rnn_with_initial_bias_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sin_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sin_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sinh_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sinh_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_size_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_size_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_slice_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_slice_default_axes_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_slice_default_steps_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_slice_end_out_of_bounds_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_slice_neg_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_slice_neg_steps_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_slice_negative_axes_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_slice_start_out_of_bounds_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_softmax_axis_0_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_softmax_axis_0_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_softmax_axis_0_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_softmax_axis_1_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_softmax_axis_1_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_softmax_axis_1_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_softmax_axis_2_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_softmax_axis_2_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_softmax_axis_2_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_softmax_default_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_softmax_default_axis_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_softmax_default_axis_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_softmax_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_softmax_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_softmax_example_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_softmax_large_number_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_softmax_large_number_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_softmax_large_number_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_softmax_negative_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_softmax_negative_axis_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_softmax_negative_axis_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_softplus_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_softplus_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_softplus_example_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_softplus_expanded_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_softsign_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_softsign_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_softsign_example_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_softsign_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_spacetodepth_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_spacetodepth_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_split_1d_uneven_split_opset18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_split_2d_uneven_split_opset18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_split_equal_parts_1d_opset13_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_split_equal_parts_1d_opset18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_split_equal_parts_2d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_split_equal_parts_2d_opset13_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_split_equal_parts_default_axis_opset13_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_split_equal_parts_default_axis_opset18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_split_variable_parts_1d_opset13_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_split_variable_parts_1d_opset18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_split_variable_parts_2d_opset13_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_split_variable_parts_2d_opset18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_split_variable_parts_default_axis_opset13_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_split_variable_parts_default_axis_opset18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_split_zero_size_splits_opset13_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_split_zero_size_splits_opset18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sqrt_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sqrt_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_squeeze_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_squeeze_negative_axes_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_stft_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_stft_with_window_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_strnormalizer_export_monday_casesensintive_lower_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_strnormalizer_export_monday_casesensintive_nochangecase_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_strnormalizer_export_monday_casesensintive_upper_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_strnormalizer_export_monday_empty_output_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_strnormalizer_export_monday_insensintive_upper_twodim_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_strnormalizer_nostopwords_nochangecase_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sub_bcast_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sub_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sub_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sub_uint8_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_sum_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sum_one_input_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_sum_two_inputs_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_tan_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_tan_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_tanh_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_tanh_example_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_tfidfvectorizer_tf_batch_onlybigrams_skip0_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_tfidfvectorizer_tf_batch_onlybigrams_skip5_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_tfidfvectorizer_tf_batch_uniandbigrams_skip5_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_tfidfvectorizer_tf_only_bigrams_skip0_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_tfidfvectorizer_tf_onlybigrams_levelempty_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_tfidfvectorizer_tf_onlybigrams_skip5_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_tfidfvectorizer_tf_uniandbigrams_skip5_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_thresholdedrelu_cpu (__main__.OnnxBackendNodeModelTest) ... FAIL
    test_thresholdedrelu_default_cpu (__main__.OnnxBackendNodeModelTest) ... FAIL
    test_thresholdedrelu_default_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_thresholdedrelu_example_cpu (__main__.OnnxBackendNodeModelTest) ... FAIL
    test_thresholdedrelu_example_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_thresholdedrelu_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_tile_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_tile_precomputed_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_top_k_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_top_k_negative_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_top_k_smallest_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_training_dropout_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_training_dropout_default_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_training_dropout_default_mask_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_training_dropout_mask_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_training_dropout_zero_ratio_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_training_dropout_zero_ratio_mask_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_transpose_all_permutations_0_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_transpose_all_permutations_1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_transpose_all_permutations_2_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_transpose_all_permutations_3_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_transpose_all_permutations_4_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_transpose_all_permutations_5_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_transpose_default_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_tril_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_tril_neg_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_tril_one_row_neg_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_tril_out_neg_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_tril_out_pos_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_tril_pos_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_tril_square_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_tril_square_neg_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_tril_zero_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_triu_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_triu_neg_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_triu_one_row_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_triu_out_neg_out_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_triu_out_pos_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_triu_pos_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_triu_square_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_triu_square_neg_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_triu_zero_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_unique_not_sorted_without_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_unique_sorted_with_axis_3d_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_unique_sorted_with_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_unique_sorted_with_negative_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_unique_sorted_without_axis_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_unsqueeze_axis_0_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_unsqueeze_axis_1_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_unsqueeze_axis_2_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_unsqueeze_negative_axes_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_unsqueeze_three_axes_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_unsqueeze_two_axes_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_unsqueeze_unsorted_axes_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_upsample_nearest_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_where_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_where_long_example_cpu (__main__.OnnxBackendNodeModelTest) ... ERROR
    test_xor2d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_xor3d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_xor4d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_xor_bcast3v1d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_xor_bcast3v2d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_xor_bcast4v2d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_xor_bcast4v3d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_xor_bcast4v4d_cpu (__main__.OnnxBackendNodeModelTest) ... ok
    test_AvgPool1d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_AvgPool1d_stride_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_AvgPool2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_AvgPool2d_stride_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_AvgPool3d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_AvgPool3d_stride1_pad0_gpu_input_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_AvgPool3d_stride_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_BatchNorm1d_3d_input_eval_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_BatchNorm2d_eval_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_BatchNorm2d_momentum_eval_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_BatchNorm3d_eval_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_BatchNorm3d_momentum_eval_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_ConstantPad2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_Conv1d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_Conv1d_dilated_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_Conv1d_groups_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_Conv1d_pad1_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_Conv1d_pad1size1_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_Conv1d_pad2_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_Conv1d_pad2size1_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_Conv1d_stride_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_Conv2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_Conv2d_depthwise_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_Conv2d_depthwise_padded_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_Conv2d_depthwise_strided_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_Conv2d_depthwise_with_multiplier_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_Conv2d_dilated_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_Conv2d_groups_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_Conv2d_groups_thnn_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_Conv2d_no_bias_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_Conv2d_padding_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_Conv2d_strided_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_Conv3d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_Conv3d_dilated_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_Conv3d_dilated_strided_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_Conv3d_groups_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_Conv3d_no_bias_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_Conv3d_stride_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_Conv3d_stride_padding_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_ConvTranspose2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_ConvTranspose2d_no_bias_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_ELU_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_Embedding_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_Embedding_sparse_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_GLU_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_GLU_dim_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_LeakyReLU_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_LeakyReLU_with_negval_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_Linear_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_Linear_no_bias_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_LogSoftmax_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_MaxPool1d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_MaxPool1d_stride_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_MaxPool1d_stride_padding_dilation_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_MaxPool2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_MaxPool2d_stride_padding_dilation_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_MaxPool3d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_MaxPool3d_stride_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_MaxPool3d_stride_padding_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_PReLU_1d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_PReLU_1d_multiparam_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_PReLU_2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_PReLU_2d_multiparam_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_PReLU_3d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_PReLU_3d_multiparam_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_PixelShuffle_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_PoissonNLLLLoss_no_reduce_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_ReLU_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_ReflectionPad2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_ReplicationPad2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_SELU_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_Sigmoid_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_Softmax_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_Softmin_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_Softplus_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_Softsign_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_Tanh_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_ZeroPad2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ERROR
    test_log_softmax_dim3_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_log_softmax_lastdim_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_softmax_functional_dim3_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_softmax_lastdim_cpu (__main__.OnnxBackendPyTorchConvertedModelTest) ... ok
    test_operator_add_broadcast_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ok
    test_operator_add_size1_broadcast_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ok
    test_operator_add_size1_right_broadcast_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ok
    test_operator_add_size1_singleton_broadcast_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ok
    test_operator_addconstant_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
    test_operator_addmm_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
    test_operator_basic_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ok
    test_operator_chunk_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
    test_operator_clip_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ok
    test_operator_concat2_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
    test_operator_conv_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
    test_operator_convtranspose_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
    test_operator_exp_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ok
    test_operator_flatten_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
    test_operator_index_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
    test_operator_max_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ok
    test_operator_maxpool_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
    test_operator_min_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ok
    test_operator_mm_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
    test_operator_non_float_params_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
    test_operator_pad_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
    test_operator_params_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
    test_operator_permute2_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
    test_operator_pow_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/op_pow.py:19: RuntimeWarning: invalid value encountered in power
      return (numpy.power(a, b).astype(a.dtype), )
    ok
    test_operator_reduced_mean_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
    test_operator_reduced_mean_keepdim_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
    test_operator_reduced_sum_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
    test_operator_reduced_sum_keepdim_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
    test_operator_repeat_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
    test_operator_repeat_dim_overflow_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
    test_operator_selu_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ok
    test_operator_sqrt_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/op_sqrt.py:22: RuntimeWarning: invalid value encountered in sqrt
      return (numpy.sqrt(x), )
    ok
    test_operator_symbolic_override_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
    test_operator_symbolic_override_nested_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
    test_operator_view_cpu (__main__.OnnxBackendPyTorchOperatorModelTest) ... ERROR
    test_bvlc_alexnet_cpu (__main__.OnnxBackendRealModelTest) ... ERROR
    test_densenet121_cpu (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_densenet121_.*"'
    test_densenet121_cuda (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_densenet121_.*"'
    test_inception_v1_cpu (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_inception_.*"'
    test_inception_v1_cuda (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_inception_.*"'
    test_inception_v2_cpu (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_inception_.*"'
    test_inception_v2_cuda (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_inception_.*"'
    test_resnet50_cpu (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_resnet50_.*"'
    test_resnet50_cuda (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_resnet50_.*"'
    test_shufflenet_cpu (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_shufflenet_.*"'
    test_shufflenet_cuda (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_shufflenet_.*"'
    test_squeezenet_cpu (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_squeezenet_.*"'
    test_squeezenet_cuda (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_squeezenet_.*"'
    test_vgg19_cpu (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_vgg19_.*"'
    test_vgg19_cuda (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_vgg19_.*"'
    test_zfnet512_cpu (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_zfnet512_.*"'
    test_zfnet512_cuda (__main__.OnnxBackendRealModelTest) ... skipped 'matched exclude pattern ".*_zfnet512_.*"'
    test_expand_shape_model1_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
    test_expand_shape_model2_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
    test_expand_shape_model3_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
    test_expand_shape_model4_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
    test_gradient_of_add_and_mul_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
    test_gradient_of_add_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
    test_sequence_model1_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
    test_sequence_model2_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
    test_sequence_model3_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
    test_sequence_model4_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
    test_sequence_model5_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
    test_sequence_model6_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
    test_sequence_model7_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
    test_sequence_model8_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
    test_shrink_cpu (__main__.OnnxBackendSimpleModelTest) ... ok
    test_sign_model_cpu (__main__.OnnxBackendSimpleModelTest) ... ok
    test_single_relu_model_cpu (__main__.OnnxBackendSimpleModelTest) ... ok
    test_strnorm_model_monday_casesensintive_lower_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
    test_strnorm_model_monday_casesensintive_nochangecase_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
    test_strnorm_model_monday_casesensintive_upper_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
    test_strnorm_model_monday_empty_output_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
    test_strnorm_model_monday_insensintive_upper_twodim_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
    test_strnorm_model_nostopwords_nochangecase_cpu (__main__.OnnxBackendSimpleModelTest) ... ERROR
    
    ======================================================================
    ERROR: test_adagrad_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Adagrad' domain='ai.onnx.preview.training', looking for 'shape_adagrad' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_adagrad_multiple_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Adagrad' domain='ai.onnx.preview.training', looking for 'shape_adagrad' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_adam_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Adam' domain='ai.onnx.preview.training', looking for 'shape_adam' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_adam_multiple_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Adam' domain='ai.onnx.preview.training', looking for 'shape_adam' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_argmax_default_axis_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ArgMax' domain='', looking for 'shape_argmax' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_argmax_default_axis_example_select_last_index_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ArgMax' domain='', looking for 'shape_argmax' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_argmax_default_axis_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ArgMax' domain='', looking for 'shape_argmax' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_argmax_default_axis_random_select_last_index_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ArgMax' domain='', looking for 'shape_argmax' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_argmax_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ArgMax' domain='', looking for 'shape_argmax' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_argmax_keepdims_example_select_last_index_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ArgMax' domain='', looking for 'shape_argmax' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_argmax_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ArgMax' domain='', looking for 'shape_argmax' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_argmax_keepdims_random_select_last_index_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ArgMax' domain='', looking for 'shape_argmax' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_argmax_negative_axis_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ArgMax' domain='', looking for 'shape_argmax' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_argmax_negative_axis_keepdims_example_select_last_index_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ArgMax' domain='', looking for 'shape_argmax' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_argmax_negative_axis_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ArgMax' domain='', looking for 'shape_argmax' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_argmax_negative_axis_keepdims_random_select_last_index_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ArgMax' domain='', looking for 'shape_argmax' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_argmax_no_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ArgMax' domain='', looking for 'shape_argmax' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_argmax_no_keepdims_example_select_last_index_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ArgMax' domain='', looking for 'shape_argmax' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_argmax_no_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ArgMax' domain='', looking for 'shape_argmax' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_argmax_no_keepdims_random_select_last_index_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ArgMax' domain='', looking for 'shape_argmax' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_argmin_default_axis_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ArgMin' domain='', looking for 'shape_argmin' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_argmin_default_axis_example_select_last_index_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ArgMin' domain='', looking for 'shape_argmin' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_argmin_default_axis_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ArgMin' domain='', looking for 'shape_argmin' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_argmin_default_axis_random_select_last_index_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ArgMin' domain='', looking for 'shape_argmin' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_argmin_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ArgMin' domain='', looking for 'shape_argmin' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_argmin_keepdims_example_select_last_index_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ArgMin' domain='', looking for 'shape_argmin' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_argmin_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ArgMin' domain='', looking for 'shape_argmin' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_argmin_keepdims_random_select_last_index_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ArgMin' domain='', looking for 'shape_argmin' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_argmin_negative_axis_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ArgMin' domain='', looking for 'shape_argmin' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_argmin_negative_axis_keepdims_example_select_last_index_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ArgMin' domain='', looking for 'shape_argmin' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_argmin_negative_axis_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ArgMin' domain='', looking for 'shape_argmin' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_argmin_negative_axis_keepdims_random_select_last_index_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ArgMin' domain='', looking for 'shape_argmin' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_argmin_no_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ArgMin' domain='', looking for 'shape_argmin' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_argmin_no_keepdims_example_select_last_index_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ArgMin' domain='', looking for 'shape_argmin' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_argmin_no_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ArgMin' domain='', looking for 'shape_argmin' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_argmin_no_keepdims_random_select_last_index_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ArgMin' domain='', looking for 'shape_argmin' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_averagepool_1d_default_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'AveragePool' domain='', looking for 'shape_averagepool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_averagepool_2d_ceil_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'AveragePool' domain='', looking for 'shape_averagepool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_averagepool_2d_default_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'AveragePool' domain='', looking for 'shape_averagepool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_averagepool_2d_pads_count_include_pad_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'AveragePool' domain='', looking for 'shape_averagepool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_averagepool_2d_pads_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'AveragePool' domain='', looking for 'shape_averagepool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_averagepool_2d_precomputed_pads_count_include_pad_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'AveragePool' domain='', looking for 'shape_averagepool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_averagepool_2d_precomputed_pads_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'AveragePool' domain='', looking for 'shape_averagepool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_averagepool_2d_precomputed_same_upper_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'AveragePool' domain='', looking for 'shape_averagepool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_averagepool_2d_precomputed_strides_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'AveragePool' domain='', looking for 'shape_averagepool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_averagepool_2d_same_lower_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'AveragePool' domain='', looking for 'shape_averagepool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_averagepool_2d_same_upper_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'AveragePool' domain='', looking for 'shape_averagepool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_averagepool_2d_strides_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'AveragePool' domain='', looking for 'shape_averagepool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_averagepool_3d_default_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'AveragePool' domain='', looking for 'shape_averagepool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_basic_conv_with_padding_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Conv' domain='', looking for 'shape_conv' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_basic_conv_without_padding_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Conv' domain='', looking for 'shape_conv' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_batchnorm_epsilon_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'BatchNormalization' domain='', looking for 'shape_batchnormalization' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_batchnorm_epsilon_training_mode_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'BatchNormalization' domain='', looking for 'shape_batchnormalization' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_batchnorm_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'BatchNormalization' domain='', looking for 'shape_batchnormalization' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_batchnorm_example_training_mode_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'BatchNormalization' domain='', looking for 'shape_batchnormalization' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_bernoulli_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Bernoulli' domain='', looking for 'shape_bernoulli' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_bernoulli_double_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Bernoulli' domain='', looking for 'shape_bernoulli' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_bernoulli_double_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'RandomUniformLike' domain='', looking for 'shape_randomuniformlike' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_bernoulli_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'RandomUniformLike' domain='', looking for 'shape_randomuniformlike' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_bernoulli_seed_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Bernoulli' domain='', looking for 'shape_bernoulli' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_bernoulli_seed_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'RandomUniformLike' domain='', looking for 'shape_randomuniformlike' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_bitshift_left_uint16_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'BitShift' domain='', looking for 'shape_bitshift' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_bitshift_left_uint32_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'BitShift' domain='', looking for 'shape_bitshift' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_bitshift_left_uint64_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'BitShift' domain='', looking for 'shape_bitshift' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_bitshift_left_uint8_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'BitShift' domain='', looking for 'shape_bitshift' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_bitshift_right_uint16_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'BitShift' domain='', looking for 'shape_bitshift' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_bitshift_right_uint32_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'BitShift' domain='', looking for 'shape_bitshift' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_bitshift_right_uint64_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'BitShift' domain='', looking for 'shape_bitshift' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_bitshift_right_uint8_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'BitShift' domain='', looking for 'shape_bitshift' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_bitwise_and_i16_3d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'BitwiseAnd' domain='', looking for 'shape_bitwiseand' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_bitwise_and_i32_2d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'BitwiseAnd' domain='', looking for 'shape_bitwiseand' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_bitwise_and_ui64_bcast_3v1d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'BitwiseAnd' domain='', looking for 'shape_bitwiseand' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_bitwise_and_ui8_bcast_4v3d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'BitwiseAnd' domain='', looking for 'shape_bitwiseand' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_bitwise_not_2d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'BitwiseNot' domain='', looking for 'shape_bitwisenot' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_bitwise_not_3d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'BitwiseNot' domain='', looking for 'shape_bitwisenot' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_bitwise_not_4d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'BitwiseNot' domain='', looking for 'shape_bitwisenot' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_bitwise_or_i16_4d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'BitwiseOr' domain='', looking for 'shape_bitwiseor' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_bitwise_or_i32_2d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'BitwiseOr' domain='', looking for 'shape_bitwiseor' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_bitwise_or_ui64_bcast_3v1d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'BitwiseOr' domain='', looking for 'shape_bitwiseor' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_bitwise_or_ui8_bcast_4v3d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'BitwiseOr' domain='', looking for 'shape_bitwiseor' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_bitwise_xor_i16_3d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'BitwiseXor' domain='', looking for 'shape_bitwisexor' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_bitwise_xor_i32_2d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'BitwiseXor' domain='', looking for 'shape_bitwisexor' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_bitwise_xor_ui64_bcast_3v1d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'BitwiseXor' domain='', looking for 'shape_bitwisexor' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_bitwise_xor_ui8_bcast_4v3d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'BitwiseXor' domain='', looking for 'shape_bitwisexor' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_blackmanwindow_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_blackmanwindow_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_blackmanwindow_symmetric_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_blackmanwindow_symmetric_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_cast_BFLOAT16_to_FLOAT_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
        return _CombineModels(OnnxInference(content),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 185, in _init
        self.graph_ = self.to_sequence(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 619, in to_sequence
        variables[obj.name] = _var_as_dict(obj)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 402, in _var_as_dict
        elem_type = _elem_type_as_str(t.elem_type)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 332, in _elem_type_as_str
        raise NotImplementedError(  # pragma: no cover
    NotImplementedError: elem_type '16' is unknown
    fields:
    ['__abs__',
     '__add__',
     '__and__',
     '__bool__',
     '__ceil__',
     '__class__',
     '__delattr__',
     '__dir__',
     '__divmod__',
     '__doc__',
     '__eq__',
     '__float__',
     '__floor__',
     '__floordiv__',
     '__format__',
     '__ge__',
     '__getattribute__',
     '__getnewargs__',
     '__gt__',
     '__hash__',
     '__index__',
     '__init__',
     '__init_subclass__',
     '__int__',
     '__invert__',
     '__le__',
     '__lshift__',
     '__lt__',
     '__mod__',
     '__mul__',
     '__ne__',
     '__neg__',
     '__new__',
     '__or__',
     '__pos__',
     '__pow__',
     '__radd__',
     '__rand__',
     '__rdivmod__',
     '__reduce__',
     '__reduce_ex__',
     '__repr__',
     '__rfloordiv__',
     '__rlshift__',
     '__rmod__',
     '__rmul__',
     '__ror__',
     '__round__',
     '__rpow__',
     '__rrshift__',
     '__rshift__',
     '__rsub__',
     '__rtruediv__',
     '__rxor__',
     '__setattr__',
     '__sizeof__',
     '__str__',
     '__sub__',
     '__subclasshook__',
     '__truediv__',
     '__trunc__',
     '__xor__',
     'as_integer_ratio',
     'bit_length',
     'conjugate',
     'denominator',
     'from_bytes',
     'imag',
     'numerator',
     'real',
     'to_bytes']
    -----
    <class 'int'>.
    
    ======================================================================
    ERROR: test_cast_DOUBLE_to_FLOAT16_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_cast_DOUBLE_to_FLOAT_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_cast_FLOAT16_to_DOUBLE_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_cast_FLOAT16_to_FLOAT_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_cast_FLOAT_to_BFLOAT16_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
        return _CombineModels(OnnxInference(content),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 185, in _init
        self.graph_ = self.to_sequence(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 629, in to_sequence
        outputs[obj.name] = _var_as_dict(obj)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 402, in _var_as_dict
        elem_type = _elem_type_as_str(t.elem_type)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 332, in _elem_type_as_str
        raise NotImplementedError(  # pragma: no cover
    NotImplementedError: elem_type '16' is unknown
    fields:
    ['__abs__',
     '__add__',
     '__and__',
     '__bool__',
     '__ceil__',
     '__class__',
     '__delattr__',
     '__dir__',
     '__divmod__',
     '__doc__',
     '__eq__',
     '__float__',
     '__floor__',
     '__floordiv__',
     '__format__',
     '__ge__',
     '__getattribute__',
     '__getnewargs__',
     '__gt__',
     '__hash__',
     '__index__',
     '__init__',
     '__init_subclass__',
     '__int__',
     '__invert__',
     '__le__',
     '__lshift__',
     '__lt__',
     '__mod__',
     '__mul__',
     '__ne__',
     '__neg__',
     '__new__',
     '__or__',
     '__pos__',
     '__pow__',
     '__radd__',
     '__rand__',
     '__rdivmod__',
     '__reduce__',
     '__reduce_ex__',
     '__repr__',
     '__rfloordiv__',
     '__rlshift__',
     '__rmod__',
     '__rmul__',
     '__ror__',
     '__round__',
     '__rpow__',
     '__rrshift__',
     '__rshift__',
     '__rsub__',
     '__rtruediv__',
     '__rxor__',
     '__setattr__',
     '__sizeof__',
     '__str__',
     '__sub__',
     '__subclasshook__',
     '__truediv__',
     '__trunc__',
     '__xor__',
     'as_integer_ratio',
     'bit_length',
     'conjugate',
     'denominator',
     'from_bytes',
     'imag',
     'numerator',
     'real',
     'to_bytes']
    -----
    <class 'int'>.
    
    ======================================================================
    ERROR: test_cast_FLOAT_to_DOUBLE_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_cast_FLOAT_to_FLOAT16_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_cast_FLOAT_to_STRING_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_cast_STRING_to_FLOAT_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_castlike_BFLOAT16_to_FLOAT_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
        return _CombineModels(OnnxInference(content),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 185, in _init
        self.graph_ = self.to_sequence(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 619, in to_sequence
        variables[obj.name] = _var_as_dict(obj)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 402, in _var_as_dict
        elem_type = _elem_type_as_str(t.elem_type)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 332, in _elem_type_as_str
        raise NotImplementedError(  # pragma: no cover
    NotImplementedError: elem_type '16' is unknown
    fields:
    ['__abs__',
     '__add__',
     '__and__',
     '__bool__',
     '__ceil__',
     '__class__',
     '__delattr__',
     '__dir__',
     '__divmod__',
     '__doc__',
     '__eq__',
     '__float__',
     '__floor__',
     '__floordiv__',
     '__format__',
     '__ge__',
     '__getattribute__',
     '__getnewargs__',
     '__gt__',
     '__hash__',
     '__index__',
     '__init__',
     '__init_subclass__',
     '__int__',
     '__invert__',
     '__le__',
     '__lshift__',
     '__lt__',
     '__mod__',
     '__mul__',
     '__ne__',
     '__neg__',
     '__new__',
     '__or__',
     '__pos__',
     '__pow__',
     '__radd__',
     '__rand__',
     '__rdivmod__',
     '__reduce__',
     '__reduce_ex__',
     '__repr__',
     '__rfloordiv__',
     '__rlshift__',
     '__rmod__',
     '__rmul__',
     '__ror__',
     '__round__',
     '__rpow__',
     '__rrshift__',
     '__rshift__',
     '__rsub__',
     '__rtruediv__',
     '__rxor__',
     '__setattr__',
     '__sizeof__',
     '__str__',
     '__sub__',
     '__subclasshook__',
     '__truediv__',
     '__trunc__',
     '__xor__',
     'as_integer_ratio',
     'bit_length',
     'conjugate',
     'denominator',
     'from_bytes',
     'imag',
     'numerator',
     'real',
     'to_bytes']
    -----
    <class 'int'>.
    
    ======================================================================
    ERROR: test_castlike_BFLOAT16_to_FLOAT_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
        return _CombineModels(OnnxInference(content),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 185, in _init
        self.graph_ = self.to_sequence(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 619, in to_sequence
        variables[obj.name] = _var_as_dict(obj)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 402, in _var_as_dict
        elem_type = _elem_type_as_str(t.elem_type)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 332, in _elem_type_as_str
        raise NotImplementedError(  # pragma: no cover
    NotImplementedError: elem_type '16' is unknown
    fields:
    ['__abs__',
     '__add__',
     '__and__',
     '__bool__',
     '__ceil__',
     '__class__',
     '__delattr__',
     '__dir__',
     '__divmod__',
     '__doc__',
     '__eq__',
     '__float__',
     '__floor__',
     '__floordiv__',
     '__format__',
     '__ge__',
     '__getattribute__',
     '__getnewargs__',
     '__gt__',
     '__hash__',
     '__index__',
     '__init__',
     '__init_subclass__',
     '__int__',
     '__invert__',
     '__le__',
     '__lshift__',
     '__lt__',
     '__mod__',
     '__mul__',
     '__ne__',
     '__neg__',
     '__new__',
     '__or__',
     '__pos__',
     '__pow__',
     '__radd__',
     '__rand__',
     '__rdivmod__',
     '__reduce__',
     '__reduce_ex__',
     '__repr__',
     '__rfloordiv__',
     '__rlshift__',
     '__rmod__',
     '__rmul__',
     '__ror__',
     '__round__',
     '__rpow__',
     '__rrshift__',
     '__rshift__',
     '__rsub__',
     '__rtruediv__',
     '__rxor__',
     '__setattr__',
     '__sizeof__',
     '__str__',
     '__sub__',
     '__subclasshook__',
     '__truediv__',
     '__trunc__',
     '__xor__',
     'as_integer_ratio',
     'bit_length',
     'conjugate',
     'denominator',
     'from_bytes',
     'imag',
     'numerator',
     'real',
     'to_bytes']
    -----
    <class 'int'>.
    
    ======================================================================
    ERROR: test_castlike_DOUBLE_to_FLOAT16_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_castlike_DOUBLE_to_FLOAT_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_castlike_FLOAT16_to_DOUBLE_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_castlike_FLOAT16_to_FLOAT_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_castlike_FLOAT_to_BFLOAT16_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
        return _CombineModels(OnnxInference(content),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 185, in _init
        self.graph_ = self.to_sequence(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 619, in to_sequence
        variables[obj.name] = _var_as_dict(obj)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 402, in _var_as_dict
        elem_type = _elem_type_as_str(t.elem_type)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 332, in _elem_type_as_str
        raise NotImplementedError(  # pragma: no cover
    NotImplementedError: elem_type '16' is unknown
    fields:
    ['__abs__',
     '__add__',
     '__and__',
     '__bool__',
     '__ceil__',
     '__class__',
     '__delattr__',
     '__dir__',
     '__divmod__',
     '__doc__',
     '__eq__',
     '__float__',
     '__floor__',
     '__floordiv__',
     '__format__',
     '__ge__',
     '__getattribute__',
     '__getnewargs__',
     '__gt__',
     '__hash__',
     '__index__',
     '__init__',
     '__init_subclass__',
     '__int__',
     '__invert__',
     '__le__',
     '__lshift__',
     '__lt__',
     '__mod__',
     '__mul__',
     '__ne__',
     '__neg__',
     '__new__',
     '__or__',
     '__pos__',
     '__pow__',
     '__radd__',
     '__rand__',
     '__rdivmod__',
     '__reduce__',
     '__reduce_ex__',
     '__repr__',
     '__rfloordiv__',
     '__rlshift__',
     '__rmod__',
     '__rmul__',
     '__ror__',
     '__round__',
     '__rpow__',
     '__rrshift__',
     '__rshift__',
     '__rsub__',
     '__rtruediv__',
     '__rxor__',
     '__setattr__',
     '__sizeof__',
     '__str__',
     '__sub__',
     '__subclasshook__',
     '__truediv__',
     '__trunc__',
     '__xor__',
     'as_integer_ratio',
     'bit_length',
     'conjugate',
     'denominator',
     'from_bytes',
     'imag',
     'numerator',
     'real',
     'to_bytes']
    -----
    <class 'int'>.
    
    ======================================================================
    ERROR: test_castlike_FLOAT_to_BFLOAT16_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
        return _CombineModels(OnnxInference(content),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 185, in _init
        self.graph_ = self.to_sequence(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 619, in to_sequence
        variables[obj.name] = _var_as_dict(obj)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 402, in _var_as_dict
        elem_type = _elem_type_as_str(t.elem_type)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 332, in _elem_type_as_str
        raise NotImplementedError(  # pragma: no cover
    NotImplementedError: elem_type '16' is unknown
    fields:
    ['__abs__',
     '__add__',
     '__and__',
     '__bool__',
     '__ceil__',
     '__class__',
     '__delattr__',
     '__dir__',
     '__divmod__',
     '__doc__',
     '__eq__',
     '__float__',
     '__floor__',
     '__floordiv__',
     '__format__',
     '__ge__',
     '__getattribute__',
     '__getnewargs__',
     '__gt__',
     '__hash__',
     '__index__',
     '__init__',
     '__init_subclass__',
     '__int__',
     '__invert__',
     '__le__',
     '__lshift__',
     '__lt__',
     '__mod__',
     '__mul__',
     '__ne__',
     '__neg__',
     '__new__',
     '__or__',
     '__pos__',
     '__pow__',
     '__radd__',
     '__rand__',
     '__rdivmod__',
     '__reduce__',
     '__reduce_ex__',
     '__repr__',
     '__rfloordiv__',
     '__rlshift__',
     '__rmod__',
     '__rmul__',
     '__ror__',
     '__round__',
     '__rpow__',
     '__rrshift__',
     '__rshift__',
     '__rsub__',
     '__rtruediv__',
     '__rxor__',
     '__setattr__',
     '__sizeof__',
     '__str__',
     '__sub__',
     '__subclasshook__',
     '__truediv__',
     '__trunc__',
     '__xor__',
     'as_integer_ratio',
     'bit_length',
     'conjugate',
     'denominator',
     'from_bytes',
     'imag',
     'numerator',
     'real',
     'to_bytes']
    -----
    <class 'int'>.
    
    ======================================================================
    ERROR: test_castlike_FLOAT_to_DOUBLE_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_castlike_FLOAT_to_FLOAT16_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_castlike_FLOAT_to_STRING_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_castlike_STRING_to_FLOAT_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_celu_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_center_crop_pad_crop_and_pad_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'CenterCropPad' domain='', looking for 'shape_centercroppad' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_center_crop_pad_crop_and_pad_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_center_crop_pad_crop_axes_chw_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'CenterCropPad' domain='', looking for 'shape_centercroppad' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_center_crop_pad_crop_axes_chw_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_center_crop_pad_crop_axes_hwc_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'CenterCropPad' domain='', looking for 'shape_centercroppad' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_center_crop_pad_crop_axes_hwc_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_center_crop_pad_crop_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'CenterCropPad' domain='', looking for 'shape_centercroppad' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_center_crop_pad_crop_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_center_crop_pad_pad_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'CenterCropPad' domain='', looking for 'shape_centercroppad' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_center_crop_pad_pad_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_clip_default_int8_max_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
        return fct_shape(known_shape, node)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 87, in shape_less
        return _element_wise(known_shapes, node, return_bool=True)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 46, in _element_wise
        ShapeResult.broadcast(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 364, in broadcast
        raise NotImplementedShapeInferenceError(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.NotImplementedShapeInferenceError: Broadcasting is only implemented for shape of the same size, shapes are ShapeResult('max', [], dtype('int8')) and ShapeResult('x', [3, 4, 5], dtype('int8')).
    
    ======================================================================
    ERROR: test_clip_default_int8_min_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
        return fct_shape(known_shape, node)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 87, in shape_less
        return _element_wise(known_shapes, node, return_bool=True)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 46, in _element_wise
        ShapeResult.broadcast(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 364, in broadcast
        raise NotImplementedShapeInferenceError(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.NotImplementedShapeInferenceError: Broadcasting is only implemented for shape of the same size, shapes are ShapeResult('x', [3, 4, 5], dtype('int8')) and ShapeResult('min', [], dtype('int8')).
    
    ======================================================================
    ERROR: test_clip_default_max_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
        return fct_shape(known_shape, node)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 87, in shape_less
        return _element_wise(known_shapes, node, return_bool=True)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 46, in _element_wise
        ShapeResult.broadcast(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 364, in broadcast
        raise NotImplementedShapeInferenceError(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.NotImplementedShapeInferenceError: Broadcasting is only implemented for shape of the same size, shapes are ShapeResult('max', [], dtype('float32')) and ShapeResult('x', [3, 4, 5], dtype('float32')).
    
    ======================================================================
    ERROR: test_clip_default_min_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
        return fct_shape(known_shape, node)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 87, in shape_less
        return _element_wise(known_shapes, node, return_bool=True)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 46, in _element_wise
        ShapeResult.broadcast(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 364, in broadcast
        raise NotImplementedShapeInferenceError(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.NotImplementedShapeInferenceError: Broadcasting is only implemented for shape of the same size, shapes are ShapeResult('x', [3, 4, 5], dtype('float32')) and ShapeResult('min', [], dtype('float32')).
    
    ======================================================================
    ERROR: test_clip_example_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
        return fct_shape(known_shape, node)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 87, in shape_less
        return _element_wise(known_shapes, node, return_bool=True)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 46, in _element_wise
        ShapeResult.broadcast(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 364, in broadcast
        raise NotImplementedShapeInferenceError(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.NotImplementedShapeInferenceError: Broadcasting is only implemented for shape of the same size, shapes are ShapeResult('x', [3], dtype('float32')) and ShapeResult('min', [], dtype('float32')).
    
    ======================================================================
    ERROR: test_clip_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
        return fct_shape(known_shape, node)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 87, in shape_less
        return _element_wise(known_shapes, node, return_bool=True)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 46, in _element_wise
        ShapeResult.broadcast(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 364, in broadcast
        raise NotImplementedShapeInferenceError(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.NotImplementedShapeInferenceError: Broadcasting is only implemented for shape of the same size, shapes are ShapeResult('x', [3, 4, 5], dtype('float32')) and ShapeResult('min', [], dtype('float32')).
    
    ======================================================================
    ERROR: test_clip_inbounds_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
        return fct_shape(known_shape, node)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 87, in shape_less
        return _element_wise(known_shapes, node, return_bool=True)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 46, in _element_wise
        ShapeResult.broadcast(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 364, in broadcast
        raise NotImplementedShapeInferenceError(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.NotImplementedShapeInferenceError: Broadcasting is only implemented for shape of the same size, shapes are ShapeResult('x', [3], dtype('float32')) and ShapeResult('min', [], dtype('float32')).
    
    ======================================================================
    ERROR: test_clip_outbounds_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
        return fct_shape(known_shape, node)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 87, in shape_less
        return _element_wise(known_shapes, node, return_bool=True)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 46, in _element_wise
        ShapeResult.broadcast(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 364, in broadcast
        raise NotImplementedShapeInferenceError(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.NotImplementedShapeInferenceError: Broadcasting is only implemented for shape of the same size, shapes are ShapeResult('x', [3], dtype('float32')) and ShapeResult('min', [], dtype('float32')).
    
    ======================================================================
    ERROR: test_clip_splitbounds_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
        return fct_shape(known_shape, node)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 87, in shape_less
        return _element_wise(known_shapes, node, return_bool=True)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 46, in _element_wise
        ShapeResult.broadcast(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 364, in broadcast
        raise NotImplementedShapeInferenceError(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.NotImplementedShapeInferenceError: Broadcasting is only implemented for shape of the same size, shapes are ShapeResult('x', [3], dtype('float32')) and ShapeResult('min', [], dtype('float32')).
    
    ======================================================================
    ERROR: test_col2im_5d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Col2Im' domain='', looking for 'shape_col2im' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_col2im_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Col2Im' domain='', looking for 'shape_col2im' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_col2im_dilations_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Col2Im' domain='', looking for 'shape_col2im' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_col2im_pads_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Col2Im' domain='', looking for 'shape_col2im' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_col2im_strides_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Col2Im' domain='', looking for 'shape_col2im' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_compress_0_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Compress' domain='', looking for 'shape_compress' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_compress_1_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Compress' domain='', looking for 'shape_compress' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_compress_default_axis_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Compress' domain='', looking for 'shape_compress' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_compress_negative_axis_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Compress' domain='', looking for 'shape_compress' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_concat_1d_axis_0_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Concat' domain='', looking for 'shape_concat' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_concat_1d_axis_negative_1_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Concat' domain='', looking for 'shape_concat' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_concat_2d_axis_0_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Concat' domain='', looking for 'shape_concat' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_concat_2d_axis_1_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Concat' domain='', looking for 'shape_concat' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_concat_2d_axis_negative_1_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Concat' domain='', looking for 'shape_concat' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_concat_2d_axis_negative_2_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Concat' domain='', looking for 'shape_concat' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_concat_3d_axis_0_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Concat' domain='', looking for 'shape_concat' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_concat_3d_axis_1_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Concat' domain='', looking for 'shape_concat' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_concat_3d_axis_2_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Concat' domain='', looking for 'shape_concat' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_concat_3d_axis_negative_1_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Concat' domain='', looking for 'shape_concat' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_concat_3d_axis_negative_2_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Concat' domain='', looking for 'shape_concat' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_concat_3d_axis_negative_3_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Concat' domain='', looking for 'shape_concat' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_constant_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_constant_pad_axes_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Pad' domain='', looking for 'shape_pad' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_constant_pad_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Pad' domain='', looking for 'shape_pad' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_constantofshape_float_ones_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ConstantOfShape' domain='', looking for 'shape_constantofshape' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_constantofshape_int_shape_zero_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ConstantOfShape' domain='', looking for 'shape_constantofshape' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_constantofshape_int_zeros_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ConstantOfShape' domain='', looking for 'shape_constantofshape' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_conv_with_autopad_same_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Conv' domain='', looking for 'shape_conv' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_conv_with_strides_and_asymmetric_padding_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Conv' domain='', looking for 'shape_conv' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_conv_with_strides_no_padding_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Conv' domain='', looking for 'shape_conv' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_conv_with_strides_padding_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Conv' domain='', looking for 'shape_conv' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_convinteger_with_padding_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ConvInteger' domain='', looking for 'shape_convinteger' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_convinteger_without_padding_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ConvInteger' domain='', looking for 'shape_convinteger' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_convtranspose_1d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ConvTranspose' domain='', looking for 'shape_convtranspose' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_convtranspose_3d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ConvTranspose' domain='', looking for 'shape_convtranspose' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_convtranspose_autopad_same_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ConvTranspose' domain='', looking for 'shape_convtranspose' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_convtranspose_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ConvTranspose' domain='', looking for 'shape_convtranspose' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_convtranspose_dilations_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ConvTranspose' domain='', looking for 'shape_convtranspose' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_convtranspose_kernel_shape_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ConvTranspose' domain='', looking for 'shape_convtranspose' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_convtranspose_output_shape_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ConvTranspose' domain='', looking for 'shape_convtranspose' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_convtranspose_pad_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ConvTranspose' domain='', looking for 'shape_convtranspose' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_convtranspose_pads_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ConvTranspose' domain='', looking for 'shape_convtranspose' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_cumsum_1d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'CumSum' domain='', looking for 'shape_cumsum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_cumsum_1d_exclusive_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'CumSum' domain='', looking for 'shape_cumsum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_cumsum_1d_reverse_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'CumSum' domain='', looking for 'shape_cumsum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_cumsum_1d_reverse_exclusive_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'CumSum' domain='', looking for 'shape_cumsum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_cumsum_2d_axis_0_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'CumSum' domain='', looking for 'shape_cumsum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_cumsum_2d_axis_1_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'CumSum' domain='', looking for 'shape_cumsum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_cumsum_2d_negative_axis_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'CumSum' domain='', looking for 'shape_cumsum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_depthtospace_crd_mode_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'DepthToSpace' domain='', looking for 'shape_depthtospace' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_depthtospace_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'DepthToSpace' domain='', looking for 'shape_depthtospace' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_dequantizelinear_axis_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'DequantizeLinear' domain='', looking for 'shape_dequantizelinear' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_dequantizelinear_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'DequantizeLinear' domain='', looking for 'shape_dequantizelinear' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_det_2d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 359, in run
        outputs = list(prepared_model.run(inputs))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 83, in run
        outs = self._session.run(feeds)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 47, in run
        raise RuntimeError(  # pragma: no cover
    RuntimeError: Incompatible shapes ShapeResult('y', [], dtype('float32')) and (1,) for output 'y'.
    
    ======================================================================
    ERROR: test_dft_axis_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'DFT' domain='', looking for 'shape_dft' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_dft_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'DFT' domain='', looking for 'shape_dft' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_dft_inverse_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'DFT' domain='', looking for 'shape_dft' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_dropout_default_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Dropout' domain='', looking for 'shape_dropout' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_dropout_default_mask_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Dropout' domain='', looking for 'shape_dropout' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_dropout_default_mask_ratio_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Dropout' domain='', looking for 'shape_dropout' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_dropout_default_old_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Dropout' domain='', looking for 'shape_dropout' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_dropout_default_ratio_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Dropout' domain='', looking for 'shape_dropout' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_dropout_random_old_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Dropout' domain='', looking for 'shape_dropout' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_dynamicquantizelinear_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_dynamicquantizelinear_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_dynamicquantizelinear_max_adjusted_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_dynamicquantizelinear_max_adjusted_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_dynamicquantizelinear_min_adjusted_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_dynamicquantizelinear_min_adjusted_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_edge_pad_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Pad' domain='', looking for 'shape_pad' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_einsum_batch_diagonal_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Einsum' domain='', looking for 'shape_einsum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_einsum_batch_matmul_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Einsum' domain='', looking for 'shape_einsum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_einsum_inner_prod_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Einsum' domain='', looking for 'shape_einsum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_einsum_sum_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Einsum' domain='', looking for 'shape_einsum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_einsum_transpose_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Einsum' domain='', looking for 'shape_einsum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_elu_default_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_elu_example_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_elu_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_expand_dim_changed_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Expand' domain='', looking for 'shape_expand' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_expand_dim_unchanged_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Expand' domain='', looking for 'shape_expand' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_eyelike_populate_off_main_diagonal_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'EyeLike' domain='', looking for 'shape_eyelike' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_eyelike_with_dtype_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'EyeLike' domain='', looking for 'shape_eyelike' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_eyelike_without_dtype_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'EyeLike' domain='', looking for 'shape_eyelike' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_flatten_axis0_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Flatten' domain='', looking for 'shape_flatten' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_flatten_axis1_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Flatten' domain='', looking for 'shape_flatten' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_flatten_axis2_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Flatten' domain='', looking for 'shape_flatten' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_flatten_axis3_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Flatten' domain='', looking for 'shape_flatten' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_flatten_default_axis_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Flatten' domain='', looking for 'shape_flatten' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_flatten_negative_axis1_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Flatten' domain='', looking for 'shape_flatten' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_flatten_negative_axis2_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Flatten' domain='', looking for 'shape_flatten' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_flatten_negative_axis3_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Flatten' domain='', looking for 'shape_flatten' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_flatten_negative_axis4_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Flatten' domain='', looking for 'shape_flatten' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_gather_0_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Gather' domain='', looking for 'shape_gather' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_gather_1_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Gather' domain='', looking for 'shape_gather' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_gather_2d_indices_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Gather' domain='', looking for 'shape_gather' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_gather_elements_0_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'GatherElements' domain='', looking for 'shape_gatherelements' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_gather_elements_1_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'GatherElements' domain='', looking for 'shape_gatherelements' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_gather_elements_negative_indices_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'GatherElements' domain='', looking for 'shape_gatherelements' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_gather_negative_indices_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Gather' domain='', looking for 'shape_gather' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_gathernd_example_float32_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'GatherND' domain='', looking for 'shape_gathernd' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_gathernd_example_int32_batch_dim1_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'GatherND' domain='', looking for 'shape_gathernd' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_gathernd_example_int32_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'GatherND' domain='', looking for 'shape_gathernd' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_gemm_all_attributes_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Gemm' domain='', looking for 'shape_gemm' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_gemm_alpha_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Gemm' domain='', looking for 'shape_gemm' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_gemm_beta_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Gemm' domain='', looking for 'shape_gemm' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_gemm_default_matrix_bias_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Gemm' domain='', looking for 'shape_gemm' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_gemm_default_no_bias_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Gemm' domain='', looking for 'shape_gemm' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_gemm_default_scalar_bias_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Gemm' domain='', looking for 'shape_gemm' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_gemm_default_single_elem_vector_bias_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Gemm' domain='', looking for 'shape_gemm' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_gemm_default_vector_bias_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Gemm' domain='', looking for 'shape_gemm' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_gemm_default_zero_bias_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Gemm' domain='', looking for 'shape_gemm' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_gemm_transposeA_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Gemm' domain='', looking for 'shape_gemm' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_gemm_transposeB_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Gemm' domain='', looking for 'shape_gemm' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_globalaveragepool_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'GlobalAveragePool' domain='', looking for 'shape_globalaveragepool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_globalaveragepool_precomputed_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'GlobalAveragePool' domain='', looking for 'shape_globalaveragepool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_globalmaxpool_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'GlobalMaxPool' domain='', looking for 'shape_globalmaxpool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_globalmaxpool_precomputed_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'GlobalMaxPool' domain='', looking for 'shape_globalmaxpool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_gridsample_aligncorners_true_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'GridSample' domain='', looking for 'shape_gridsample' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_gridsample_bicubic_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'GridSample' domain='', looking for 'shape_gridsample' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_gridsample_bilinear_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'GridSample' domain='', looking for 'shape_gridsample' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_gridsample_border_padding_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'GridSample' domain='', looking for 'shape_gridsample' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_gridsample_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'GridSample' domain='', looking for 'shape_gridsample' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_gridsample_nearest_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'GridSample' domain='', looking for 'shape_gridsample' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_gridsample_reflection_padding_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'GridSample' domain='', looking for 'shape_gridsample' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_gridsample_zeros_padding_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'GridSample' domain='', looking for 'shape_gridsample' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_group_normalization_epsilon_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
        return _CombineModels(OnnxInference(content),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 258, in _init
        node.setup_runtime(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 240, in setup_runtime
        self.ops_ = load_op(self.onnx_node, desc=self.desc,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
        return lo(onnx_node, desc=desc, options=options)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 91, in load_op
        cl = onnx_load_op(options.get('domain', ''),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/reference/ops/_op_list.py", line 294, in load_op
        raise RuntimeContextError(
    onnx.reference.op_run.RuntimeContextError: No registered implementation for operator 'GroupNormalization' and domain '', the operator has a context dependent function. but argument node or input_types is not defined.
    
    ======================================================================
    ERROR: test_group_normalization_epsilon_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_group_normalization_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
        return _CombineModels(OnnxInference(content),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 258, in _init
        node.setup_runtime(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 240, in setup_runtime
        self.ops_ = load_op(self.onnx_node, desc=self.desc,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
        return lo(onnx_node, desc=desc, options=options)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 91, in load_op
        cl = onnx_load_op(options.get('domain', ''),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/reference/ops/_op_list.py", line 294, in load_op
        raise RuntimeContextError(
    onnx.reference.op_run.RuntimeContextError: No registered implementation for operator 'GroupNormalization' and domain '', the operator has a context dependent function. but argument node or input_types is not defined.
    
    ======================================================================
    ERROR: test_group_normalization_example_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_gru_batchwise_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'GRU' domain='', looking for 'shape_gru' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_gru_defaults_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'GRU' domain='', looking for 'shape_gru' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_gru_seq_length_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'GRU' domain='', looking for 'shape_gru' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_gru_with_initial_bias_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'GRU' domain='', looking for 'shape_gru' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_hammingwindow_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_hammingwindow_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_hammingwindow_symmetric_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_hammingwindow_symmetric_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_hannwindow_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_hannwindow_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_hannwindow_symmetric_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_hannwindow_symmetric_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_hardsigmoid_default_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_hardsigmoid_example_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_hardsigmoid_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_identity_opt_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
        return _CombineModels(OnnxInference(content),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 185, in _init
        self.graph_ = self.to_sequence(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 619, in to_sequence
        variables[obj.name] = _var_as_dict(obj)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 459, in _var_as_dict
        dtype['optional'] = _var_as_dict(optional)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 558, in _var_as_dict
        return dict(optional=True, elem_type=_var_as_dict(var.elem_type))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 553, in _var_as_dict
        d[n] = _var_as_dict(at)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 560, in _var_as_dict
        raise NotImplementedError(  # pragma: no cover
    NotImplementedError: Unable to guess which object it is type is <class 'onnx.onnx_ml_pb2.Sequence'> value is 'elem_type {\n  tensor_type {\n    elem_type: 1\n    shape {\n      dim {\n        dim_value: 5\n      }\n    }\n  }\n}\n' (hasattr(var,'type')=False, var.type=None
    ByteSize
    Clear
    ClearExtension
    ClearField
    CopyFrom
    DESCRIPTOR
    DiscardUnknownFields
    Extensions
    FindInitializationErrors
    FromString
    HasExtension
    HasField
    IsInitialized
    ListFields
    MergeFrom
    MergeFromString
    ParseFromString
    RegisterExtension
    SerializePartialToString
    SerializeToString
    SetInParent
    UnknownFields
    WhichOneof
    _CheckCalledFromGeneratedFile
    _SetListener
    __class__
    __deepcopy__
    __delattr__
    __dir__
    __doc__
    __eq__
    __format__
    __ge__
    __getattribute__
    __getstate__
    __gt__
    __hash__
    __init__
    __init_subclass__
    __le__
    __lt__
    __module__
    __ne__
    __new__
    __reduce__
    __reduce_ex__
    __repr__
    __setattr__
    __setstate__
    __sizeof__
    __slots__
    __str__
    __subclasshook__
    __unicode__
    _extensions_by_name
    _extensions_by_number
    elem_type
    
    ======================================================================
    ERROR: test_identity_sequence_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 359, in run
        outputs = list(prepared_model.run(inputs))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 83, in run
        outs = self._session.run(feeds)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 49, in run
        shapes[k], v.shape, k))
    AttributeError: 'list' object has no attribute 'shape'
    
    ======================================================================
    ERROR: test_if_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'If' domain='', looking for 'shape_if' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_if_opt_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
        return _CombineModels(OnnxInference(content),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 185, in _init
        self.graph_ = self.to_sequence(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 629, in to_sequence
        outputs[obj.name] = _var_as_dict(obj)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 459, in _var_as_dict
        dtype['optional'] = _var_as_dict(optional)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 558, in _var_as_dict
        return dict(optional=True, elem_type=_var_as_dict(var.elem_type))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 553, in _var_as_dict
        d[n] = _var_as_dict(at)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 560, in _var_as_dict
        raise NotImplementedError(  # pragma: no cover
    NotImplementedError: Unable to guess which object it is type is <class 'onnx.onnx_ml_pb2.Sequence'> value is 'elem_type {\n  tensor_type {\n    elem_type: 1\n    shape {\n      dim {\n        dim_value: 5\n      }\n    }\n  }\n}\n' (hasattr(var,'type')=False, var.type=None
    ByteSize
    Clear
    ClearExtension
    ClearField
    CopyFrom
    DESCRIPTOR
    DiscardUnknownFields
    Extensions
    FindInitializationErrors
    FromString
    HasExtension
    HasField
    IsInitialized
    ListFields
    MergeFrom
    MergeFromString
    ParseFromString
    RegisterExtension
    SerializePartialToString
    SerializeToString
    SetInParent
    UnknownFields
    WhichOneof
    _CheckCalledFromGeneratedFile
    _SetListener
    __class__
    __deepcopy__
    __delattr__
    __dir__
    __doc__
    __eq__
    __format__
    __ge__
    __getattribute__
    __getstate__
    __gt__
    __hash__
    __init__
    __init_subclass__
    __le__
    __lt__
    __module__
    __ne__
    __new__
    __reduce__
    __reduce_ex__
    __repr__
    __setattr__
    __setstate__
    __sizeof__
    __slots__
    __str__
    __subclasshook__
    __unicode__
    _extensions_by_name
    _extensions_by_number
    elem_type
    
    ======================================================================
    ERROR: test_if_seq_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'If' domain='', looking for 'shape_if' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_instancenorm_epsilon_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'InstanceNormalization' domain='', looking for 'shape_instancenormalization' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_instancenorm_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'InstanceNormalization' domain='', looking for 'shape_instancenormalization' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_2d_axis0_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'LayerNormalization' domain='', looking for 'shape_layernormalization' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_2d_axis0_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_2d_axis0_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_2d_axis1_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'LayerNormalization' domain='', looking for 'shape_layernormalization' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_2d_axis1_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_2d_axis1_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_2d_axis_negative_1_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'LayerNormalization' domain='', looking for 'shape_layernormalization' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_2d_axis_negative_1_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_2d_axis_negative_1_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_2d_axis_negative_2_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'LayerNormalization' domain='', looking for 'shape_layernormalization' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_2d_axis_negative_2_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_2d_axis_negative_2_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_3d_axis0_epsilon_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'LayerNormalization' domain='', looking for 'shape_layernormalization' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_3d_axis0_epsilon_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_3d_axis0_epsilon_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_3d_axis1_epsilon_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'LayerNormalization' domain='', looking for 'shape_layernormalization' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_3d_axis1_epsilon_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_3d_axis1_epsilon_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_3d_axis2_epsilon_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'LayerNormalization' domain='', looking for 'shape_layernormalization' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_3d_axis2_epsilon_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_3d_axis2_epsilon_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_3d_axis_negative_1_epsilon_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'LayerNormalization' domain='', looking for 'shape_layernormalization' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_3d_axis_negative_1_epsilon_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_3d_axis_negative_1_epsilon_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_3d_axis_negative_2_epsilon_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'LayerNormalization' domain='', looking for 'shape_layernormalization' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_3d_axis_negative_2_epsilon_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_3d_axis_negative_2_epsilon_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_3d_axis_negative_3_epsilon_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'LayerNormalization' domain='', looking for 'shape_layernormalization' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_3d_axis_negative_3_epsilon_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_3d_axis_negative_3_epsilon_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_4d_axis0_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'LayerNormalization' domain='', looking for 'shape_layernormalization' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_4d_axis0_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_4d_axis0_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_4d_axis1_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'LayerNormalization' domain='', looking for 'shape_layernormalization' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_4d_axis1_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_4d_axis1_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_4d_axis2_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'LayerNormalization' domain='', looking for 'shape_layernormalization' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_4d_axis2_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_4d_axis2_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_4d_axis3_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'LayerNormalization' domain='', looking for 'shape_layernormalization' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_4d_axis3_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_4d_axis3_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_4d_axis_negative_1_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'LayerNormalization' domain='', looking for 'shape_layernormalization' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_4d_axis_negative_1_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_4d_axis_negative_1_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_4d_axis_negative_2_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'LayerNormalization' domain='', looking for 'shape_layernormalization' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_4d_axis_negative_2_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_4d_axis_negative_2_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_4d_axis_negative_3_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'LayerNormalization' domain='', looking for 'shape_layernormalization' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_4d_axis_negative_3_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_4d_axis_negative_3_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_4d_axis_negative_4_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'LayerNormalization' domain='', looking for 'shape_layernormalization' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_4d_axis_negative_4_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_4d_axis_negative_4_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_default_axis_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'LayerNormalization' domain='', looking for 'shape_layernormalization' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_default_axis_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_layer_normalization_default_axis_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_leakyrelu_default_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_leakyrelu_example_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_leakyrelu_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_logsoftmax_axis_0_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_logsoftmax_axis_0_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_logsoftmax_axis_1_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_logsoftmax_axis_1_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_logsoftmax_axis_2_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_logsoftmax_axis_2_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_logsoftmax_default_axis_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_logsoftmax_default_axis_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_logsoftmax_example_1_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_logsoftmax_example_1_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_logsoftmax_large_number_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_logsoftmax_large_number_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_logsoftmax_negative_axis_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_logsoftmax_negative_axis_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_loop11_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Loop' domain='', looking for 'shape_loop' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_loop13_seq_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Loop' domain='', looking for 'shape_loop' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_loop16_seq_none_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
        return _CombineModels(OnnxInference(content),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 185, in _init
        self.graph_ = self.to_sequence(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 619, in to_sequence
        variables[obj.name] = _var_as_dict(obj)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 459, in _var_as_dict
        dtype['optional'] = _var_as_dict(optional)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 558, in _var_as_dict
        return dict(optional=True, elem_type=_var_as_dict(var.elem_type))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 553, in _var_as_dict
        d[n] = _var_as_dict(at)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 560, in _var_as_dict
        raise NotImplementedError(  # pragma: no cover
    NotImplementedError: Unable to guess which object it is type is <class 'onnx.onnx_ml_pb2.Sequence'> value is 'elem_type {\n  tensor_type {\n    elem_type: 1\n    shape {\n    }\n  }\n}\n' (hasattr(var,'type')=False, var.type=None
    ByteSize
    Clear
    ClearExtension
    ClearField
    CopyFrom
    DESCRIPTOR
    DiscardUnknownFields
    Extensions
    FindInitializationErrors
    FromString
    HasExtension
    HasField
    IsInitialized
    ListFields
    MergeFrom
    MergeFromString
    ParseFromString
    RegisterExtension
    SerializePartialToString
    SerializeToString
    SetInParent
    UnknownFields
    WhichOneof
    _CheckCalledFromGeneratedFile
    _SetListener
    __class__
    __deepcopy__
    __delattr__
    __dir__
    __doc__
    __eq__
    __format__
    __ge__
    __getattribute__
    __getstate__
    __gt__
    __hash__
    __init__
    __init_subclass__
    __le__
    __lt__
    __module__
    __ne__
    __new__
    __reduce__
    __reduce_ex__
    __repr__
    __setattr__
    __setstate__
    __sizeof__
    __slots__
    __str__
    __subclasshook__
    __unicode__
    _extensions_by_name
    _extensions_by_number
    elem_type
    
    ======================================================================
    ERROR: test_lrn_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'LRN' domain='', looking for 'shape_lrn' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_lrn_default_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'LRN' domain='', looking for 'shape_lrn' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_lstm_batchwise_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'LSTM' domain='', looking for 'shape_lstm' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_lstm_defaults_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'LSTM' domain='', looking for 'shape_lstm' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_lstm_with_initial_bias_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'LSTM' domain='', looking for 'shape_lstm' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_lstm_with_peepholes_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'LSTM' domain='', looking for 'shape_lstm' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_matmul_2d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'MatMul' domain='', looking for 'shape_matmul' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_matmul_3d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'MatMul' domain='', looking for 'shape_matmul' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_matmul_4d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'MatMul' domain='', looking for 'shape_matmul' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_matmulinteger_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'MatMulInteger' domain='', looking for 'shape_matmulinteger' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_maxpool_1d_default_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_maxpool_2d_ceil_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_maxpool_2d_default_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_maxpool_2d_dilations_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_maxpool_2d_pads_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_maxpool_2d_precomputed_pads_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_maxpool_2d_precomputed_same_upper_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_maxpool_2d_precomputed_strides_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_maxpool_2d_same_lower_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_maxpool_2d_same_upper_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_maxpool_2d_strides_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_maxpool_2d_uint8_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_maxpool_3d_default_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_maxpool_with_argmax_2d_precomputed_pads_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_maxpool_with_argmax_2d_precomputed_strides_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_maxunpool_export_with_output_shape_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'MaxUnpool' domain='', looking for 'shape_maxunpool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_maxunpool_export_without_output_shape_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'MaxUnpool' domain='', looking for 'shape_maxunpool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_mean_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Mean' domain='', looking for 'shape_mean' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_mean_one_input_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Mean' domain='', looking for 'shape_mean' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_mean_two_inputs_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Mean' domain='', looking for 'shape_mean' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_melweightmatrix_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'MelWeightMatrix' domain='', looking for 'shape_melweightmatrix' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_momentum_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Momentum' domain='ai.onnx.preview.training', looking for 'shape_momentum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_momentum_multiple_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Momentum' domain='ai.onnx.preview.training', looking for 'shape_momentum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_mvn_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
        return _CombineModels(OnnxInference(content),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 258, in _init
        node.setup_runtime(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 240, in setup_runtime
        self.ops_ = load_op(self.onnx_node, desc=self.desc,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
        return lo(onnx_node, desc=desc, options=options)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 232, in load_op
        return cl(onnx_node, {'log': None})
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 172, in <lambda>
        new_cls = lambda *args, sess=sess: OpFunction(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/_op.py", line 814, in __init__
        self.attributes_ = {
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/_op.py", line 815, in <dictcomp>
        name: getattr(self, name)
    AttributeError: 'OpFunction' object has no attribute 'axes'
    
    ======================================================================
    ERROR: test_mvn_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_mvn_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nesterov_momentum_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Momentum' domain='ai.onnx.preview.training', looking for 'shape_momentum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nllloss_NC_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'NegativeLogLikelihoodLoss' domain='', looking for 'shape_negativeloglikelihoodloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nllloss_NC_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nllloss_NCd1_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'NegativeLogLikelihoodLoss' domain='', looking for 'shape_negativeloglikelihoodloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nllloss_NCd1_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nllloss_NCd1_ii_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'NegativeLogLikelihoodLoss' domain='', looking for 'shape_negativeloglikelihoodloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nllloss_NCd1_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nllloss_NCd1_mean_weight_negative_ii_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'NegativeLogLikelihoodLoss' domain='', looking for 'shape_negativeloglikelihoodloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nllloss_NCd1_mean_weight_negative_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nllloss_NCd1_weight_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'NegativeLogLikelihoodLoss' domain='', looking for 'shape_negativeloglikelihoodloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nllloss_NCd1_weight_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nllloss_NCd1_weight_ii_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'NegativeLogLikelihoodLoss' domain='', looking for 'shape_negativeloglikelihoodloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nllloss_NCd1_weight_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nllloss_NCd1d2_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'NegativeLogLikelihoodLoss' domain='', looking for 'shape_negativeloglikelihoodloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nllloss_NCd1d2_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nllloss_NCd1d2_no_weight_reduction_mean_ii_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'NegativeLogLikelihoodLoss' domain='', looking for 'shape_negativeloglikelihoodloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nllloss_NCd1d2_no_weight_reduction_mean_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nllloss_NCd1d2_reduction_mean_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'NegativeLogLikelihoodLoss' domain='', looking for 'shape_negativeloglikelihoodloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nllloss_NCd1d2_reduction_mean_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nllloss_NCd1d2_reduction_sum_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'NegativeLogLikelihoodLoss' domain='', looking for 'shape_negativeloglikelihoodloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nllloss_NCd1d2_reduction_sum_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nllloss_NCd1d2_with_weight_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'NegativeLogLikelihoodLoss' domain='', looking for 'shape_negativeloglikelihoodloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nllloss_NCd1d2_with_weight_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nllloss_NCd1d2_with_weight_reduction_mean_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'NegativeLogLikelihoodLoss' domain='', looking for 'shape_negativeloglikelihoodloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nllloss_NCd1d2_with_weight_reduction_mean_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nllloss_NCd1d2_with_weight_reduction_sum_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'NegativeLogLikelihoodLoss' domain='', looking for 'shape_negativeloglikelihoodloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nllloss_NCd1d2_with_weight_reduction_sum_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nllloss_NCd1d2_with_weight_reduction_sum_ii_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'NegativeLogLikelihoodLoss' domain='', looking for 'shape_negativeloglikelihoodloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nllloss_NCd1d2_with_weight_reduction_sum_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nllloss_NCd1d2d3_none_no_weight_negative_ii_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'NegativeLogLikelihoodLoss' domain='', looking for 'shape_negativeloglikelihoodloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nllloss_NCd1d2d3_none_no_weight_negative_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nllloss_NCd1d2d3_sum_weight_high_ii_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'NegativeLogLikelihoodLoss' domain='', looking for 'shape_negativeloglikelihoodloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nllloss_NCd1d2d3_sum_weight_high_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nllloss_NCd1d2d3d4d5_mean_weight_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'NegativeLogLikelihoodLoss' domain='', looking for 'shape_negativeloglikelihoodloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nllloss_NCd1d2d3d4d5_mean_weight_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nllloss_NCd1d2d3d4d5_none_no_weight_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'NegativeLogLikelihoodLoss' domain='', looking for 'shape_negativeloglikelihoodloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nllloss_NCd1d2d3d4d5_none_no_weight_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nonmaxsuppression_center_point_box_format_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'NonMaxSuppression' domain='', looking for 'shape_nonmaxsuppression' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nonmaxsuppression_flipped_coordinates_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'NonMaxSuppression' domain='', looking for 'shape_nonmaxsuppression' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nonmaxsuppression_identical_boxes_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'NonMaxSuppression' domain='', looking for 'shape_nonmaxsuppression' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nonmaxsuppression_limit_output_size_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'NonMaxSuppression' domain='', looking for 'shape_nonmaxsuppression' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nonmaxsuppression_single_box_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'NonMaxSuppression' domain='', looking for 'shape_nonmaxsuppression' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nonmaxsuppression_suppress_by_IOU_and_scores_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'NonMaxSuppression' domain='', looking for 'shape_nonmaxsuppression' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nonmaxsuppression_suppress_by_IOU_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'NonMaxSuppression' domain='', looking for 'shape_nonmaxsuppression' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nonmaxsuppression_two_batches_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'NonMaxSuppression' domain='', looking for 'shape_nonmaxsuppression' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nonmaxsuppression_two_classes_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'NonMaxSuppression' domain='', looking for 'shape_nonmaxsuppression' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_nonzero_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'NonZero' domain='', looking for 'shape_nonzero' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_onehot_negative_indices_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'OneHot' domain='', looking for 'shape_onehot' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_onehot_with_axis_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'OneHot' domain='', looking for 'shape_onehot' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_onehot_with_negative_axis_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'OneHot' domain='', looking for 'shape_onehot' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_onehot_without_axis_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'OneHot' domain='', looking for 'shape_onehot' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_optional_get_element_optional_sequence_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
        return _CombineModels(OnnxInference(content),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 185, in _init
        self.graph_ = self.to_sequence(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 619, in to_sequence
        variables[obj.name] = _var_as_dict(obj)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 459, in _var_as_dict
        dtype['optional'] = _var_as_dict(optional)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 558, in _var_as_dict
        return dict(optional=True, elem_type=_var_as_dict(var.elem_type))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 553, in _var_as_dict
        d[n] = _var_as_dict(at)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnx_tools/onnx2py_helper.py", line 560, in _var_as_dict
        raise NotImplementedError(  # pragma: no cover
    NotImplementedError: Unable to guess which object it is type is <class 'onnx.onnx_ml_pb2.Sequence'> value is 'elem_type {\n  tensor_type {\n    elem_type: 6\n    shape {\n      dim {\n        dim_value: 4\n      }\n    }\n  }\n}\n' (hasattr(var,'type')=False, var.type=None
    ByteSize
    Clear
    ClearExtension
    ClearField
    CopyFrom
    DESCRIPTOR
    DiscardUnknownFields
    Extensions
    FindInitializationErrors
    FromString
    HasExtension
    HasField
    IsInitialized
    ListFields
    MergeFrom
    MergeFromString
    ParseFromString
    RegisterExtension
    SerializePartialToString
    SerializeToString
    SetInParent
    UnknownFields
    WhichOneof
    _CheckCalledFromGeneratedFile
    _SetListener
    __class__
    __deepcopy__
    __delattr__
    __dir__
    __doc__
    __eq__
    __format__
    __ge__
    __getattribute__
    __getstate__
    __gt__
    __hash__
    __init__
    __init_subclass__
    __le__
    __lt__
    __module__
    __ne__
    __new__
    __reduce__
    __reduce_ex__
    __repr__
    __setattr__
    __setstate__
    __sizeof__
    __slots__
    __str__
    __subclasshook__
    __unicode__
    _extensions_by_name
    _extensions_by_number
    elem_type
    
    ======================================================================
    ERROR: test_optional_get_element_optional_tensor_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'OptionalGetElement' domain='', looking for 'shape_optionalgetelement' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_optional_get_element_sequence_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'OptionalGetElement' domain='', looking for 'shape_optionalgetelement' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_optional_get_element_tensor_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'OptionalGetElement' domain='', looking for 'shape_optionalgetelement' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_optional_has_element_empty_no_input_name_optional_input_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'OptionalHasElement' domain='', looking for 'shape_optionalhaselement' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_optional_has_element_empty_no_input_name_tensor_input_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'OptionalHasElement' domain='', looking for 'shape_optionalhaselement' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_optional_has_element_empty_no_input_optional_input_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'OptionalHasElement' domain='', looking for 'shape_optionalhaselement' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_optional_has_element_empty_no_input_tensor_input_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'OptionalHasElement' domain='', looking for 'shape_optionalhaselement' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_optional_has_element_empty_optional_input_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'OptionalHasElement' domain='', looking for 'shape_optionalhaselement' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_optional_has_element_optional_input_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'OptionalHasElement' domain='', looking for 'shape_optionalhaselement' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_optional_has_element_tensor_input_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'OptionalHasElement' domain='', looking for 'shape_optionalhaselement' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_pow_bcast_scalar_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 105, in shape_dispatch
        return fct_shape(known_shape, node)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 122, in shape_pow
        return _element_wise(known_shapes, node, same_type=False)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/_element_wise.py", line 51, in _element_wise
        ShapeResult.broadcast(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 364, in broadcast
        raise NotImplementedShapeInferenceError(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.NotImplementedShapeInferenceError: Broadcasting is only implemented for shape of the same size, shapes are ShapeResult('x', [3], dtype('float32')) and ShapeResult('y', [], dtype('float32')).
    
    ======================================================================
    ERROR: test_prelu_broadcast_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_prelu_broadcast_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_prelu_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_prelu_example_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_qlinearconv_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'QLinearConv' domain='', looking for 'shape_qlinearconv' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_qlinearmatmul_2D_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'QLinearMatMul' domain='', looking for 'shape_qlinearmatmul' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_qlinearmatmul_3D_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'QLinearMatMul' domain='', looking for 'shape_qlinearmatmul' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_quantizelinear_axis_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'QuantizeLinear' domain='', looking for 'shape_quantizelinear' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_quantizelinear_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'QuantizeLinear' domain='', looking for 'shape_quantizelinear' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_range_float_type_positive_delta_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_range_float_type_positive_delta_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_range_int32_type_negative_delta_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_range_int32_type_negative_delta_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_l1_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_l1_default_axes_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_l1_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_l1_default_axes_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_l1_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_l1_do_not_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_l1_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_l1_do_not_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_l1_keep_dims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_l1_keep_dims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_l1_keep_dims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_l1_keep_dims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_l1_negative_axes_keep_dims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_l1_negative_axes_keep_dims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_l1_negative_axes_keep_dims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_l1_negative_axes_keep_dims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_l2_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_l2_default_axes_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_l2_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_l2_default_axes_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_l2_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_l2_do_not_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_l2_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_l2_do_not_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_l2_keep_dims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_l2_keep_dims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_l2_keep_dims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_l2_keep_dims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_l2_negative_axes_keep_dims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_l2_negative_axes_keep_dims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_l2_negative_axes_keep_dims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_l2_negative_axes_keep_dims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_log_sum_asc_axes_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_log_sum_asc_axes_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_log_sum_default_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_log_sum_default_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_log_sum_desc_axes_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_log_sum_desc_axes_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_log_sum_exp_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_log_sum_exp_default_axes_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_log_sum_exp_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_log_sum_exp_default_axes_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_log_sum_exp_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_log_sum_exp_do_not_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_log_sum_exp_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_log_sum_exp_do_not_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_log_sum_exp_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_log_sum_exp_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_log_sum_exp_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_log_sum_exp_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_log_sum_exp_negative_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_log_sum_exp_negative_axes_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_log_sum_exp_negative_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_log_sum_exp_negative_axes_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Cast' domain='', looking for 'shape_cast' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_log_sum_negative_axes_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_log_sum_negative_axes_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_max_default_axes_keepdim_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceMax' domain='', looking for 'shape_reducemax' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_max_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceMax' domain='', looking for 'shape_reducemax' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_max_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceMax' domain='', looking for 'shape_reducemax' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_max_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceMax' domain='', looking for 'shape_reducemax' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_max_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceMax' domain='', looking for 'shape_reducemax' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_max_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceMax' domain='', looking for 'shape_reducemax' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_max_negative_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceMax' domain='', looking for 'shape_reducemax' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_max_negative_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceMax' domain='', looking for 'shape_reducemax' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_mean_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceMean' domain='', looking for 'shape_reducemean' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_mean_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceMean' domain='', looking for 'shape_reducemean' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_mean_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceMean' domain='', looking for 'shape_reducemean' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_mean_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceMean' domain='', looking for 'shape_reducemean' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_mean_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceMean' domain='', looking for 'shape_reducemean' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_mean_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceMean' domain='', looking for 'shape_reducemean' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_mean_negative_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceMean' domain='', looking for 'shape_reducemean' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_mean_negative_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceMean' domain='', looking for 'shape_reducemean' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_min_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceMin' domain='', looking for 'shape_reducemin' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_min_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceMin' domain='', looking for 'shape_reducemin' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_min_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceMin' domain='', looking for 'shape_reducemin' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_min_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceMin' domain='', looking for 'shape_reducemin' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_min_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceMin' domain='', looking for 'shape_reducemin' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_min_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceMin' domain='', looking for 'shape_reducemin' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_min_negative_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceMin' domain='', looking for 'shape_reducemin' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_min_negative_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceMin' domain='', looking for 'shape_reducemin' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_prod_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceProd' domain='', looking for 'shape_reduceprod' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_prod_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceProd' domain='', looking for 'shape_reduceprod' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_prod_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceProd' domain='', looking for 'shape_reduceprod' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_prod_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceProd' domain='', looking for 'shape_reduceprod' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_prod_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceProd' domain='', looking for 'shape_reduceprod' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_prod_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceProd' domain='', looking for 'shape_reduceprod' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_prod_negative_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceProd' domain='', looking for 'shape_reduceprod' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_prod_negative_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceProd' domain='', looking for 'shape_reduceprod' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_sum_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_sum_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_sum_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_sum_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_sum_empty_axes_input_noop_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_sum_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_sum_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_sum_negative_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_sum_negative_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_sum_square_default_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_sum_square_default_axes_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_sum_square_default_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_sum_square_default_axes_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_sum_square_do_not_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_sum_square_do_not_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_sum_square_do_not_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_sum_square_do_not_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_sum_square_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_sum_square_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_sum_square_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_sum_square_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_sum_square_negative_axes_keepdims_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_sum_square_negative_axes_keepdims_example_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_sum_square_negative_axes_keepdims_random_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 79, in shape_dispatch
        sess = rt_class(onnx_schema.function_body)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reduce_sum_square_negative_axes_keepdims_random_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reflect_pad_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Pad' domain='', looking for 'shape_pad' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_relu_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reshape_allowzero_reordered_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Reshape' domain='', looking for 'shape_reshape' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reshape_extended_dims_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Reshape' domain='', looking for 'shape_reshape' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reshape_negative_dim_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Reshape' domain='', looking for 'shape_reshape' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reshape_negative_extended_dims_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Reshape' domain='', looking for 'shape_reshape' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reshape_one_dim_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Reshape' domain='', looking for 'shape_reshape' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reshape_reduced_dims_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Reshape' domain='', looking for 'shape_reshape' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reshape_reordered_all_dims_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Reshape' domain='', looking for 'shape_reshape' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reshape_reordered_last_dims_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Reshape' domain='', looking for 'shape_reshape' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reshape_zero_and_negative_dim_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Reshape' domain='', looking for 'shape_reshape' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reshape_zero_dim_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Reshape' domain='', looking for 'shape_reshape' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_resize_downsample_scales_cubic_A_n0p5_exclude_outside_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Resize' domain='', looking for 'shape_resize' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_resize_downsample_scales_cubic_align_corners_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Resize' domain='', looking for 'shape_resize' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_resize_downsample_scales_cubic_antialias_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Resize' domain='', looking for 'shape_resize' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_resize_downsample_scales_cubic_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Resize' domain='', looking for 'shape_resize' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_resize_downsample_scales_linear_align_corners_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Resize' domain='', looking for 'shape_resize' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_resize_downsample_scales_linear_antialias_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Resize' domain='', looking for 'shape_resize' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_resize_downsample_scales_linear_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Resize' domain='', looking for 'shape_resize' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_resize_downsample_scales_nearest_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Resize' domain='', looking for 'shape_resize' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_resize_downsample_sizes_cubic_antialias_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Resize' domain='', looking for 'shape_resize' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_resize_downsample_sizes_cubic_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Resize' domain='', looking for 'shape_resize' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_resize_downsample_sizes_linear_antialias_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Resize' domain='', looking for 'shape_resize' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_resize_downsample_sizes_linear_pytorch_half_pixel_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Resize' domain='', looking for 'shape_resize' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_resize_downsample_sizes_nearest_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Resize' domain='', looking for 'shape_resize' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_resize_downsample_sizes_nearest_not_larger_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Resize' domain='', looking for 'shape_resize' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_resize_downsample_sizes_nearest_not_smaller_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Resize' domain='', looking for 'shape_resize' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_resize_tf_crop_and_resize_axes_2_3_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Resize' domain='', looking for 'shape_resize' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_resize_tf_crop_and_resize_axes_3_2_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Resize' domain='', looking for 'shape_resize' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_resize_tf_crop_and_resize_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Resize' domain='', looking for 'shape_resize' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_resize_upsample_scales_cubic_A_n0p5_exclude_outside_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Resize' domain='', looking for 'shape_resize' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_resize_upsample_scales_cubic_align_corners_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Resize' domain='', looking for 'shape_resize' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_resize_upsample_scales_cubic_asymmetric_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Resize' domain='', looking for 'shape_resize' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_resize_upsample_scales_cubic_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Resize' domain='', looking for 'shape_resize' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_resize_upsample_scales_linear_align_corners_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Resize' domain='', looking for 'shape_resize' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_resize_upsample_scales_linear_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Resize' domain='', looking for 'shape_resize' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_resize_upsample_scales_nearest_axes_2_3_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Resize' domain='', looking for 'shape_resize' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_resize_upsample_scales_nearest_axes_3_2_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Resize' domain='', looking for 'shape_resize' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_resize_upsample_scales_nearest_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Resize' domain='', looking for 'shape_resize' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_resize_upsample_sizes_cubic_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Resize' domain='', looking for 'shape_resize' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_resize_upsample_sizes_nearest_axes_2_3_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Resize' domain='', looking for 'shape_resize' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_resize_upsample_sizes_nearest_axes_3_2_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Resize' domain='', looking for 'shape_resize' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_resize_upsample_sizes_nearest_ceil_half_pixel_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Resize' domain='', looking for 'shape_resize' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_resize_upsample_sizes_nearest_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Resize' domain='', looking for 'shape_resize' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_resize_upsample_sizes_nearest_floor_align_corners_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Resize' domain='', looking for 'shape_resize' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_resize_upsample_sizes_nearest_not_larger_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Resize' domain='', looking for 'shape_resize' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_resize_upsample_sizes_nearest_round_prefer_ceil_asymmetric_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Resize' domain='', looking for 'shape_resize' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reversesequence_batch_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReverseSequence' domain='', looking for 'shape_reversesequence' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_reversesequence_time_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReverseSequence' domain='', looking for 'shape_reversesequence' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_rnn_seq_length_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'RNN' domain='', looking for 'shape_rnn' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_roialign_aligned_false_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'RoiAlign' domain='', looking for 'shape_roialign' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_roialign_aligned_true_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'RoiAlign' domain='', looking for 'shape_roialign' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_scan9_sum_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Scan' domain='', looking for 'shape_scan' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_scan_sum_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Scan' domain='', looking for 'shape_scan' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_scatter_elements_with_axis_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ScatterElements' domain='', looking for 'shape_scatterelements' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_scatter_elements_with_duplicate_indices_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ScatterElements' domain='', looking for 'shape_scatterelements' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_scatter_elements_with_negative_indices_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ScatterElements' domain='', looking for 'shape_scatterelements' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_scatter_elements_with_reduction_max_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ScatterElements' domain='', looking for 'shape_scatterelements' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_scatter_elements_with_reduction_min_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ScatterElements' domain='', looking for 'shape_scatterelements' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_scatter_elements_without_axis_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ScatterElements' domain='', looking for 'shape_scatterelements' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_scatter_with_axis_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
        return _CombineModels(OnnxInference(content),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 258, in _init
        node.setup_runtime(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 240, in setup_runtime
        self.ops_ = load_op(self.onnx_node, desc=self.desc,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
        return lo(onnx_node, desc=desc, options=options)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 91, in load_op
        cl = onnx_load_op(options.get('domain', ''),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/reference/ops/_op_list.py", line 315, in load_op
        raise RuntimeImplementationError(
    onnx.reference.op_run.RuntimeImplementationError: No registered implementation for operator 'Scatter' and domain '', schema.has_function is False, schema.has_context_dependent_function is False. You may either add one or skip the test in 'reference_evaluator_bakcend_test.py'. Available implementations:
    Abs, Acos, Acosh, Add, And, ArgMax, ArgMin, Asin, Asinh, Atan, Atanh,
    AttributeHasValue, AveragePool, BatchNormalization, Bernoulli,
    BitShift, BitwiseAnd, BitwiseNot, BitwiseOr, BitwiseXor,
    BlackmanWindow, Cast, CastLike, Ceil, Celu, CenterCropPad, Clip,
    Col2Im, Compress, Concat, ConcatFromSequence, Constant,
    ConstantOfShape, Conv, ConvInteger, ConvTranspose, Cos, Cosh, CumSum,
    DFT, DepthToSpace, DequantizeLinear, Det, Div, Dropout,
    DynamicQuantizeLinear, Einsum, Elu, Equal, Erf, Exp, Expand, EyeLike,
    Flatten, Floor, GRU, Gather, GatherElements, GatherND, Gemm,
    GlobalAveragePool, GlobalMaxPool, Greater, GreaterOrEqual, GridSample,
    HammingWindow, HannWindow, HardSigmoid, Hardmax, Identity, If,
    InstanceNormalization, IsInf, IsNaN, LRN, LSTM, LayerNormalization,
    LeakyRelu, Less, LessOrEqual, Log, LogSoftmax, Loop, LpNormalization,
    MatMul, MatMulInteger, Max, MaxPool, MaxUnpool, Mean, MelWeightMatrix,
    Min, Mod, Mul, Neg, NegativeLogLikelihoodLoss, NonMaxSuppression,
    NonZero, Not, OneHot, OpFunction, OpRun, Optional, OptionalGetElement,
    OptionalHasElement, Or, PRelu, Pad, Pow, QLinearConv, QLinearMatMul,
    QuantizeLinear, RNN, RandomNormal, RandomNormalLike, RandomUniform,
    RandomUniformLike, Range, Reciprocal, ReduceL1, ReduceL2,
    ReduceLogSum, ReduceLogSumExp, ReduceMax, ReduceMean, ReduceMin,
    ReduceProd, ReduceSum, ReduceSumSquare, Relu, Reshape, Resize,
    ReverseSequence, RoiAlign, Round, STFT, Scan, ScatterElements,
    ScatterND, Selu, SequenceAt, SequenceConstruct, SequenceEmpty,
    SequenceErase, SequenceInsert, SequenceLength, SequenceMap, Shape,
    Shrink, Sigmoid, Sign, Sin, Sinh, Size, Slice, Softmax,
    SoftmaxCrossEntropyLoss, Softplus, Softsign, SpaceToDepth, Split,
    SplitToSequence, Sqrt, Squeeze, StringNormalizer, Sub, Sum, Tan, Tanh,
    TfIdfVectorizer, ThresholdedRelu, Tile, TopK, Transpose, Trilu,
    Unique, Unsqueeze, Upsample, Where, Xor
    
    ======================================================================
    ERROR: test_scatter_without_axis_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
        return _CombineModels(OnnxInference(content),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 258, in _init
        node.setup_runtime(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 240, in setup_runtime
        self.ops_ = load_op(self.onnx_node, desc=self.desc,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
        return lo(onnx_node, desc=desc, options=options)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 91, in load_op
        cl = onnx_load_op(options.get('domain', ''),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/reference/ops/_op_list.py", line 315, in load_op
        raise RuntimeImplementationError(
    onnx.reference.op_run.RuntimeImplementationError: No registered implementation for operator 'Scatter' and domain '', schema.has_function is False, schema.has_context_dependent_function is False. You may either add one or skip the test in 'reference_evaluator_bakcend_test.py'. Available implementations:
    Abs, Acos, Acosh, Add, And, ArgMax, ArgMin, Asin, Asinh, Atan, Atanh,
    AttributeHasValue, AveragePool, BatchNormalization, Bernoulli,
    BitShift, BitwiseAnd, BitwiseNot, BitwiseOr, BitwiseXor,
    BlackmanWindow, Cast, CastLike, Ceil, Celu, CenterCropPad, Clip,
    Col2Im, Compress, Concat, ConcatFromSequence, Constant,
    ConstantOfShape, Conv, ConvInteger, ConvTranspose, Cos, Cosh, CumSum,
    DFT, DepthToSpace, DequantizeLinear, Det, Div, Dropout,
    DynamicQuantizeLinear, Einsum, Elu, Equal, Erf, Exp, Expand, EyeLike,
    Flatten, Floor, GRU, Gather, GatherElements, GatherND, Gemm,
    GlobalAveragePool, GlobalMaxPool, Greater, GreaterOrEqual, GridSample,
    HammingWindow, HannWindow, HardSigmoid, Hardmax, Identity, If,
    InstanceNormalization, IsInf, IsNaN, LRN, LSTM, LayerNormalization,
    LeakyRelu, Less, LessOrEqual, Log, LogSoftmax, Loop, LpNormalization,
    MatMul, MatMulInteger, Max, MaxPool, MaxUnpool, Mean, MelWeightMatrix,
    Min, Mod, Mul, Neg, NegativeLogLikelihoodLoss, NonMaxSuppression,
    NonZero, Not, OneHot, OpFunction, OpRun, Optional, OptionalGetElement,
    OptionalHasElement, Or, PRelu, Pad, Pow, QLinearConv, QLinearMatMul,
    QuantizeLinear, RNN, RandomNormal, RandomNormalLike, RandomUniform,
    RandomUniformLike, Range, Reciprocal, ReduceL1, ReduceL2,
    ReduceLogSum, ReduceLogSumExp, ReduceMax, ReduceMean, ReduceMin,
    ReduceProd, ReduceSum, ReduceSumSquare, Relu, Reshape, Resize,
    ReverseSequence, RoiAlign, Round, STFT, Scan, ScatterElements,
    ScatterND, Selu, SequenceAt, SequenceConstruct, SequenceEmpty,
    SequenceErase, SequenceInsert, SequenceLength, SequenceMap, Shape,
    Shrink, Sigmoid, Sign, Sin, Sinh, Size, Slice, Softmax,
    SoftmaxCrossEntropyLoss, Softplus, Softsign, SpaceToDepth, Split,
    SplitToSequence, Sqrt, Squeeze, StringNormalizer, Sub, Sum, Tan, Tanh,
    TfIdfVectorizer, ThresholdedRelu, Tile, TopK, Transpose, Trilu,
    Unique, Unsqueeze, Upsample, Where, Xor
    
    ======================================================================
    ERROR: test_scatternd_add_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ScatterND' domain='', looking for 'shape_scatternd' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_scatternd_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ScatterND' domain='', looking for 'shape_scatternd' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_scatternd_max_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ScatterND' domain='', looking for 'shape_scatternd' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_scatternd_min_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ScatterND' domain='', looking for 'shape_scatternd' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_scatternd_multiply_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ScatterND' domain='', looking for 'shape_scatternd' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_NCd1_mean_weight_negative_ii_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_NCd1_mean_weight_negative_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_NCd1_mean_weight_negative_ii_log_prob_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_NCd1_mean_weight_negative_ii_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_NCd1d2d3_none_no_weight_negative_ii_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_NCd1d2d3_none_no_weight_negative_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_NCd1d2d3_none_no_weight_negative_ii_log_prob_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_NCd1d2d3_none_no_weight_negative_ii_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_NCd1d2d3_sum_weight_high_ii_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_NCd1d2d3_sum_weight_high_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_NCd1d2d3_sum_weight_high_ii_log_prob_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_NCd1d2d3_sum_weight_high_ii_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_NCd1d2d3d4d5_mean_weight_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_NCd1d2d3d4d5_mean_weight_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_NCd1d2d3d4d5_mean_weight_log_prob_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_NCd1d2d3d4d5_mean_weight_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_NCd1d2d3d4d5_none_no_weight_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_NCd1d2d3d4d5_none_no_weight_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_NCd1d2d3d4d5_none_no_weight_log_prob_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_NCd1d2d3d4d5_none_no_weight_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_mean_3d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_mean_3d_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_mean_3d_log_prob_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_mean_3d_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_mean_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_mean_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_mean_log_prob_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_mean_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_mean_no_weight_ii_3d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_mean_no_weight_ii_3d_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_mean_no_weight_ii_3d_log_prob_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_mean_no_weight_ii_3d_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_mean_no_weight_ii_4d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_mean_no_weight_ii_4d_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_mean_no_weight_ii_4d_log_prob_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_mean_no_weight_ii_4d_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_mean_no_weight_ii_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_mean_no_weight_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_mean_no_weight_ii_log_prob_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_mean_no_weight_ii_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_mean_weight_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_mean_weight_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_mean_weight_ii_3d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_mean_weight_ii_3d_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_mean_weight_ii_3d_log_prob_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_mean_weight_ii_3d_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_mean_weight_ii_4d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_mean_weight_ii_4d_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_mean_weight_ii_4d_log_prob_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_mean_weight_ii_4d_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_mean_weight_ii_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_mean_weight_ii_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_mean_weight_ii_log_prob_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_mean_weight_ii_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_mean_weight_log_prob_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_mean_weight_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_none_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_none_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_none_log_prob_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_none_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_none_weights_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_none_weights_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_none_weights_log_prob_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_none_weights_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_sum_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_sum_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_sum_log_prob_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SoftmaxCrossEntropyLoss' domain='', looking for 'shape_softmaxcrossentropyloss' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sce_sum_log_prob_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_selu_default_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_selu_example_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_selu_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sequence_insert_at_back_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SequenceInsert' domain='', looking for 'shape_sequenceinsert' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sequence_insert_at_front_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SequenceInsert' domain='', looking for 'shape_sequenceinsert' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sequence_map_add_1_sequence_1_tensor_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SequenceMap' domain='', looking for 'shape_sequencemap' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sequence_map_add_1_sequence_1_tensor_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SequenceLength' domain='', looking for 'shape_sequencelength' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sequence_map_add_2_sequences_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SequenceMap' domain='', looking for 'shape_sequencemap' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sequence_map_add_2_sequences_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SequenceLength' domain='', looking for 'shape_sequencelength' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sequence_map_extract_shapes_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SequenceMap' domain='', looking for 'shape_sequencemap' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sequence_map_extract_shapes_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SequenceLength' domain='', looking for 'shape_sequencelength' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sequence_map_identity_1_sequence_1_tensor_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SequenceMap' domain='', looking for 'shape_sequencemap' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sequence_map_identity_1_sequence_1_tensor_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SequenceLength' domain='', looking for 'shape_sequencelength' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sequence_map_identity_1_sequence_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SequenceMap' domain='', looking for 'shape_sequencemap' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sequence_map_identity_1_sequence_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SequenceLength' domain='', looking for 'shape_sequencelength' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sequence_map_identity_2_sequences_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SequenceMap' domain='', looking for 'shape_sequencemap' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sequence_map_identity_2_sequences_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SequenceLength' domain='', looking for 'shape_sequencelength' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_shape_clip_end_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Shape' domain='', looking for 'shape_shape' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_shape_clip_start_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Shape' domain='', looking for 'shape_shape' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_shape_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Shape' domain='', looking for 'shape_shape' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_shape_end_1_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Shape' domain='', looking for 'shape_shape' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_shape_end_negative_1_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Shape' domain='', looking for 'shape_shape' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_shape_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Shape' domain='', looking for 'shape_shape' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_shape_start_1_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Shape' domain='', looking for 'shape_shape' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_shape_start_1_end_2_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Shape' domain='', looking for 'shape_shape' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_shape_start_1_end_negative_1_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Shape' domain='', looking for 'shape_shape' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_shape_start_negative_1_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Shape' domain='', looking for 'shape_shape' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_shrink_hard_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_shrink_soft_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_simple_rnn_batchwise_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'RNN' domain='', looking for 'shape_rnn' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_simple_rnn_defaults_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'RNN' domain='', looking for 'shape_rnn' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_simple_rnn_with_initial_bias_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'RNN' domain='', looking for 'shape_rnn' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_size_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Size' domain='', looking for 'shape_size' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_size_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Size' domain='', looking for 'shape_size' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_slice_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Slice' domain='', looking for 'shape_slice' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_slice_default_axes_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Slice' domain='', looking for 'shape_slice' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_slice_default_steps_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Slice' domain='', looking for 'shape_slice' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_slice_end_out_of_bounds_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Slice' domain='', looking for 'shape_slice' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_slice_neg_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Slice' domain='', looking for 'shape_slice' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_slice_neg_steps_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Slice' domain='', looking for 'shape_slice' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_slice_negative_axes_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Slice' domain='', looking for 'shape_slice' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_slice_start_out_of_bounds_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Slice' domain='', looking for 'shape_slice' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_softmax_axis_0_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_softmax_axis_0_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_softmax_axis_1_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_softmax_axis_1_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_softmax_axis_2_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_softmax_axis_2_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_softmax_default_axis_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_softmax_default_axis_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_softmax_example_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_softmax_example_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_softmax_large_number_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_softmax_large_number_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_softmax_negative_axis_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_softmax_negative_axis_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_softplus_example_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_softplus_expanded_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_softsign_example_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_softsign_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_spacetodepth_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SpaceToDepth' domain='', looking for 'shape_spacetodepth' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_spacetodepth_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SpaceToDepth' domain='', looking for 'shape_spacetodepth' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_split_1d_uneven_split_opset18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Split' domain='', looking for 'shape_split' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_split_2d_uneven_split_opset18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Split' domain='', looking for 'shape_split' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_split_equal_parts_1d_opset13_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Split' domain='', looking for 'shape_split' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_split_equal_parts_1d_opset18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Split' domain='', looking for 'shape_split' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_split_equal_parts_2d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Split' domain='', looking for 'shape_split' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_split_equal_parts_2d_opset13_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Split' domain='', looking for 'shape_split' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_split_equal_parts_default_axis_opset13_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Split' domain='', looking for 'shape_split' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_split_equal_parts_default_axis_opset18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Split' domain='', looking for 'shape_split' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_split_variable_parts_1d_opset13_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Split' domain='', looking for 'shape_split' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_split_variable_parts_1d_opset18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Split' domain='', looking for 'shape_split' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_split_variable_parts_2d_opset13_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Split' domain='', looking for 'shape_split' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_split_variable_parts_2d_opset18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Split' domain='', looking for 'shape_split' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_split_variable_parts_default_axis_opset13_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Split' domain='', looking for 'shape_split' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_split_variable_parts_default_axis_opset18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Split' domain='', looking for 'shape_split' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_split_zero_size_splits_opset13_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Split' domain='', looking for 'shape_split' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_split_zero_size_splits_opset18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Split' domain='', looking for 'shape_split' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_squeeze_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Squeeze' domain='', looking for 'shape_squeeze' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_squeeze_negative_axes_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Squeeze' domain='', looking for 'shape_squeeze' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_stft_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'STFT' domain='', looking for 'shape_stft' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_stft_with_window_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'STFT' domain='', looking for 'shape_stft' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_strnormalizer_export_monday_casesensintive_lower_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'StringNormalizer' domain='', looking for 'shape_stringnormalizer' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_strnormalizer_export_monday_casesensintive_nochangecase_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'StringNormalizer' domain='', looking for 'shape_stringnormalizer' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_strnormalizer_export_monday_casesensintive_upper_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'StringNormalizer' domain='', looking for 'shape_stringnormalizer' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_strnormalizer_export_monday_empty_output_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'StringNormalizer' domain='', looking for 'shape_stringnormalizer' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_strnormalizer_export_monday_insensintive_upper_twodim_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'StringNormalizer' domain='', looking for 'shape_stringnormalizer' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_strnormalizer_nostopwords_nochangecase_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'StringNormalizer' domain='', looking for 'shape_stringnormalizer' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sum_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Sum' domain='', looking for 'shape_sum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sum_one_input_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Sum' domain='', looking for 'shape_sum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sum_two_inputs_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Sum' domain='', looking for 'shape_sum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_tfidfvectorizer_tf_batch_onlybigrams_skip0_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'TfIdfVectorizer' domain='', looking for 'shape_tfidfvectorizer' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_tfidfvectorizer_tf_batch_onlybigrams_skip5_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'TfIdfVectorizer' domain='', looking for 'shape_tfidfvectorizer' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_tfidfvectorizer_tf_batch_uniandbigrams_skip5_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'TfIdfVectorizer' domain='', looking for 'shape_tfidfvectorizer' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_tfidfvectorizer_tf_only_bigrams_skip0_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'TfIdfVectorizer' domain='', looking for 'shape_tfidfvectorizer' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_tfidfvectorizer_tf_onlybigrams_levelempty_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'TfIdfVectorizer' domain='', looking for 'shape_tfidfvectorizer' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_tfidfvectorizer_tf_onlybigrams_skip5_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'TfIdfVectorizer' domain='', looking for 'shape_tfidfvectorizer' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_tfidfvectorizer_tf_uniandbigrams_skip5_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'TfIdfVectorizer' domain='', looking for 'shape_tfidfvectorizer' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_thresholdedrelu_default_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_thresholdedrelu_example_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_thresholdedrelu_expanded_ver18_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_tile_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Tile' domain='', looking for 'shape_tile' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_tile_precomputed_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Tile' domain='', looking for 'shape_tile' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_top_k_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'TopK' domain='', looking for 'shape_topk' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_top_k_negative_axis_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'TopK' domain='', looking for 'shape_topk' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_top_k_smallest_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'TopK' domain='', looking for 'shape_topk' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_training_dropout_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Dropout' domain='', looking for 'shape_dropout' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_training_dropout_default_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Dropout' domain='', looking for 'shape_dropout' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_training_dropout_default_mask_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Dropout' domain='', looking for 'shape_dropout' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_training_dropout_mask_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Dropout' domain='', looking for 'shape_dropout' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_training_dropout_zero_ratio_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Dropout' domain='', looking for 'shape_dropout' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_training_dropout_zero_ratio_mask_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Dropout' domain='', looking for 'shape_dropout' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_transpose_all_permutations_0_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Transpose' domain='', looking for 'shape_transpose' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_transpose_all_permutations_1_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Transpose' domain='', looking for 'shape_transpose' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_transpose_all_permutations_2_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Transpose' domain='', looking for 'shape_transpose' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_transpose_all_permutations_3_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Transpose' domain='', looking for 'shape_transpose' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_transpose_all_permutations_4_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Transpose' domain='', looking for 'shape_transpose' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_transpose_all_permutations_5_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Transpose' domain='', looking for 'shape_transpose' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_transpose_default_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Transpose' domain='', looking for 'shape_transpose' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_tril_zero_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 359, in run
        outputs = list(prepared_model.run(inputs))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 83, in run
        outs = self._session.run(feeds)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 46, in run
        if not shapes[k].is_compatible(v):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 186, in is_compatible
        raise NotImplementedError(f"{self!r} ? {shape!r}")
    NotImplementedError: ShapeResult('y', [3, '_1', 5], dtype('int64'), sparse=False, mtype=<OnnxKind.Tensor: 0>, constraints=ShapeConstraintList([ShapeConstraint('_1', {'_0'})])) ? (3, 0, 5)
    
    ======================================================================
    ERROR: test_triu_zero_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 359, in run
        outputs = list(prepared_model.run(inputs))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 83, in run
        outs = self._session.run(feeds)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 46, in run
        if not shapes[k].is_compatible(v):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/shape_result.py", line 186, in is_compatible
        raise NotImplementedError(f"{self!r} ? {shape!r}")
    NotImplementedError: ShapeResult('y', ['_1', 5], dtype('int64'), sparse=False, mtype=<OnnxKind.Tensor: 0>, constraints=ShapeConstraintList([ShapeConstraint('_1', {'_0'})])) ? (0, 5)
    
    ======================================================================
    ERROR: test_unique_not_sorted_without_axis_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Unique' domain='', looking for 'shape_unique' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_unique_sorted_with_axis_3d_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Unique' domain='', looking for 'shape_unique' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_unique_sorted_with_axis_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Unique' domain='', looking for 'shape_unique' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_unique_sorted_with_negative_axis_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Unique' domain='', looking for 'shape_unique' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_unique_sorted_without_axis_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Unique' domain='', looking for 'shape_unique' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_unsqueeze_axis_0_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Unsqueeze' domain='', looking for 'shape_unsqueeze' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_unsqueeze_axis_1_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Unsqueeze' domain='', looking for 'shape_unsqueeze' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_unsqueeze_axis_2_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Unsqueeze' domain='', looking for 'shape_unsqueeze' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_unsqueeze_negative_axes_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Unsqueeze' domain='', looking for 'shape_unsqueeze' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_unsqueeze_three_axes_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Unsqueeze' domain='', looking for 'shape_unsqueeze' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_unsqueeze_two_axes_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Unsqueeze' domain='', looking for 'shape_unsqueeze' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_unsqueeze_unsorted_axes_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Unsqueeze' domain='', looking for 'shape_unsqueeze' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_upsample_nearest_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Upsample' domain='', looking for 'shape_upsample' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_where_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Where' domain='', looking for 'shape_where' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_where_long_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Where' domain='', looking for 'shape_where' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_AvgPool1d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Unsqueeze' domain='', looking for 'shape_unsqueeze' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_AvgPool1d_stride_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Unsqueeze' domain='', looking for 'shape_unsqueeze' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_AvgPool2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'AveragePool' domain='', looking for 'shape_averagepool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_AvgPool2d_stride_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'AveragePool' domain='', looking for 'shape_averagepool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_AvgPool3d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'AveragePool' domain='', looking for 'shape_averagepool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_AvgPool3d_stride1_pad0_gpu_input_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'AveragePool' domain='', looking for 'shape_averagepool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_AvgPool3d_stride_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'AveragePool' domain='', looking for 'shape_averagepool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_BatchNorm1d_3d_input_eval_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_BatchNorm2d_eval_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_BatchNorm2d_momentum_eval_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_BatchNorm3d_eval_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_BatchNorm3d_momentum_eval_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_ConstantPad2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Pad' domain='', looking for 'shape_pad' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_Conv1d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_Conv1d_dilated_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_Conv1d_groups_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_Conv1d_pad1_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_Conv1d_pad1size1_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_Conv1d_pad2_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_Conv1d_pad2size1_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_Conv1d_stride_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_Conv2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_Conv2d_depthwise_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_Conv2d_depthwise_padded_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_Conv2d_depthwise_strided_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_Conv2d_depthwise_with_multiplier_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_Conv2d_dilated_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_Conv2d_groups_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_Conv2d_groups_thnn_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_Conv2d_no_bias_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_Conv2d_padding_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_Conv2d_strided_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_Conv3d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_Conv3d_dilated_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_Conv3d_dilated_strided_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_Conv3d_groups_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_Conv3d_no_bias_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_Conv3d_stride_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_Conv3d_stride_padding_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_ConvTranspose2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_ConvTranspose2d_no_bias_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_Embedding_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_Embedding_sparse_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_GLU_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Split' domain='', looking for 'shape_split' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_GLU_dim_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Split' domain='', looking for 'shape_split' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_Linear_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_Linear_no_bias_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_MaxPool1d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_MaxPool1d_stride_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_MaxPool1d_stride_padding_dilation_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_MaxPool2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_MaxPool2d_stride_padding_dilation_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_MaxPool3d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_MaxPool3d_stride_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_MaxPool3d_stride_padding_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_PReLU_1d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_PReLU_1d_multiparam_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_PReLU_2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_PReLU_2d_multiparam_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_PReLU_3d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_PReLU_3d_multiparam_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_PixelShuffle_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_PoissonNLLLLoss_no_reduce_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_ReflectionPad2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Pad' domain='', looking for 'shape_pad' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_ReplicationPad2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Pad' domain='', looking for 'shape_pad' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_Softsign_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_ZeroPad2d_cpu (__main__.OnnxBackendPyTorchConvertedModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Pad' domain='', looking for 'shape_pad' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_operator_addconstant_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_operator_addmm_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Gemm' domain='', looking for 'shape_gemm' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_operator_chunk_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Split' domain='', looking for 'shape_split' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_operator_concat2_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Concat' domain='', looking for 'shape_concat' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_operator_conv_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_operator_convtranspose_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_operator_flatten_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Flatten' domain='', looking for 'shape_flatten' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_operator_index_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Slice' domain='', looking for 'shape_slice' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_operator_maxpool_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'MaxPool' domain='', looking for 'shape_maxpool' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_operator_mm_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_operator_non_float_params_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_operator_pad_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Pad' domain='', looking for 'shape_pad' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_operator_params_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_operator_permute2_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Transpose' domain='', looking for 'shape_transpose' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_operator_reduced_mean_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceMean' domain='', looking for 'shape_reducemean' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_operator_reduced_mean_keepdim_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceMean' domain='', looking for 'shape_reducemean' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_operator_reduced_sum_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_operator_reduced_sum_keepdim_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'ReduceSum' domain='', looking for 'shape_reducesum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_operator_repeat_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Constant' domain='', looking for 'shape_constant' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_operator_repeat_dim_overflow_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
        return _CombineModels(OnnxInference(content),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 283, in _init
        self.inplaces_ = self._guess_inplace(self.input_inplace)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 1628, in _guess_inplace
        node.enable_inplace_compute(n)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 542, in enable_inplace_compute
        (self.ops_ or self.function_).enable_inplace_compute(
    AttributeError: 'Tile_9' object has no attribute 'enable_inplace_compute'
    
    ======================================================================
    ERROR: test_operator_symbolic_override_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='1')
    
    ======================================================================
    ERROR: test_operator_symbolic_override_nested_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Sum' domain='', looking for 'shape_sum' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_operator_view_cpu (__main__.OnnxBackendPyTorchOperatorModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Flatten' domain='', looking for 'shape_flatten' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_bvlc_alexnet_cpu (__main__.OnnxBackendRealModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='conv1_w_0')
    
    ======================================================================
    ERROR: test_expand_shape_model1_cpu (__main__.OnnxBackendSimpleModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Expand' domain='', looking for 'shape_expand' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_expand_shape_model2_cpu (__main__.OnnxBackendSimpleModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Expand' domain='', looking for 'shape_expand' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_expand_shape_model3_cpu (__main__.OnnxBackendSimpleModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Expand' domain='', looking for 'shape_expand' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_expand_shape_model4_cpu (__main__.OnnxBackendSimpleModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'Expand' domain='', looking for 'shape_expand' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_gradient_of_add_and_mul_cpu (__main__.OnnxBackendSimpleModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 91, in load_op
        cl = onnx_load_op(options.get('domain', ''),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/reference/ops/_op_list.py", line 274, in load_op
        raise ValueError(f"Domain must be '' not {domain!r}.")
    ValueError: Domain must be '' not 'ai.onnx.preview.training'.
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
        return _CombineModels(OnnxInference(content),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 258, in _init
        node.setup_runtime(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 259, in setup_runtime
        raise e
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 240, in setup_runtime
        self.ops_ = load_op(self.onnx_node, desc=self.desc,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
        return lo(onnx_node, desc=desc, options=options)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 94, in load_op
        raise MissingOperatorError(
    mlprodict.onnxrt.excs.MissingOperatorError: Unable to load class for operator name=Gradient, opset=1, options={'domain': 'ai.onnx.preview.training', 'target_opset': 1, 'ir_version': 7}, _additional_ops={}.
    
    ======================================================================
    ERROR: test_gradient_of_add_cpu (__main__.OnnxBackendSimpleModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 91, in load_op
        cl = onnx_load_op(options.get('domain', ''),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/reference/ops/_op_list.py", line 274, in load_op
        raise ValueError(f"Domain must be '' not {domain!r}.")
    ValueError: Domain must be '' not 'ai.onnx.preview.training'.
    
    The above exception was the direct cause of the following exception:
    
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
        return _CombineModels(OnnxInference(content),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 258, in _init
        node.setup_runtime(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 259, in setup_runtime
        raise e
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 240, in setup_runtime
        self.ops_ = load_op(self.onnx_node, desc=self.desc,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops.py", line 36, in load_op
        return lo(onnx_node, desc=desc, options=options)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_cpu/__init__.py", line 94, in load_op
        raise MissingOperatorError(
    mlprodict.onnxrt.excs.MissingOperatorError: Unable to load class for operator name=Gradient, opset=1, options={'domain': 'ai.onnx.preview.training', 'target_opset': 1, 'ir_version': 7}, _additional_ops={}.
    
    ======================================================================
    ERROR: test_sequence_model1_cpu (__main__.OnnxBackendSimpleModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='pos')
    
    ======================================================================
    ERROR: test_sequence_model2_cpu (__main__.OnnxBackendSimpleModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
        return _CombineModels(OnnxInference(content),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 283, in _init
        self.inplaces_ = self._guess_inplace(self.input_inplace)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 1628, in _guess_inplace
        node.enable_inplace_compute(n)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 542, in enable_inplace_compute
        (self.ops_ or self.function_).enable_inplace_compute(
    AttributeError: 'SequenceErase_12' object has no attribute 'enable_inplace_compute'
    
    ======================================================================
    ERROR: test_sequence_model3_cpu (__main__.OnnxBackendSimpleModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
        return _CombineModels(OnnxInference(content),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 283, in _init
        self.inplaces_ = self._guess_inplace(self.input_inplace)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 1628, in _guess_inplace
        node.enable_inplace_compute(n)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 542, in enable_inplace_compute
        (self.ops_ or self.function_).enable_inplace_compute(
    AttributeError: 'SequenceErase_12' object has no attribute 'enable_inplace_compute'
    
    ======================================================================
    ERROR: test_sequence_model4_cpu (__main__.OnnxBackendSimpleModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SequenceConstruct' domain='', looking for 'shape_sequenceconstruct' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sequence_model5_cpu (__main__.OnnxBackendSimpleModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'SequenceConstruct' domain='', looking for 'shape_sequenceconstruct' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_sequence_model6_cpu (__main__.OnnxBackendSimpleModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
        return _CombineModels(OnnxInference(content),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 283, in _init
        self.inplaces_ = self._guess_inplace(self.input_inplace)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 1628, in _guess_inplace
        node.enable_inplace_compute(n)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 542, in enable_inplace_compute
        (self.ops_ or self.function_).enable_inplace_compute(
    AttributeError: 'SequenceLength_12' object has no attribute 'enable_inplace_compute'
    
    ======================================================================
    ERROR: test_sequence_model7_cpu (__main__.OnnxBackendSimpleModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 136, in _run_empty
        raise NotImplementedError(
    NotImplementedError: Optional inputs are not implemented yet. (name='pos_at')
    
    ======================================================================
    ERROR: test_sequence_model8_cpu (__main__.OnnxBackendSimpleModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 304, in create_inference_session
        return _CombineModels(OnnxInference(content),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 140, in __init__
        self._init(existing_functions)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 283, in _init
        self.inplaces_ = self._guess_inplace(self.input_inplace)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference.py", line 1628, in _guess_inplace
        node.enable_inplace_compute(n)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_inference_node.py", line 542, in enable_inplace_compute
        (self.ops_ or self.function_).enable_inplace_compute(
    AttributeError: 'SequenceLength_12' object has no attribute 'enable_inplace_compute'
    
    ======================================================================
    ERROR: test_strnorm_model_monday_casesensintive_lower_cpu (__main__.OnnxBackendSimpleModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'StringNormalizer' domain='', looking for 'shape_stringnormalizer' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_strnorm_model_monday_casesensintive_nochangecase_cpu (__main__.OnnxBackendSimpleModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'StringNormalizer' domain='', looking for 'shape_stringnormalizer' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_strnorm_model_monday_casesensintive_upper_cpu (__main__.OnnxBackendSimpleModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'StringNormalizer' domain='', looking for 'shape_stringnormalizer' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_strnorm_model_monday_empty_output_cpu (__main__.OnnxBackendSimpleModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'StringNormalizer' domain='', looking for 'shape_stringnormalizer' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_strnorm_model_monday_insensintive_upper_twodim_cpu (__main__.OnnxBackendSimpleModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'StringNormalizer' domain='', looking for 'shape_stringnormalizer' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    ERROR: test_strnorm_model_nostopwords_nochangecase_cpu (__main__.OnnxBackendSimpleModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 332, in run
        prepared_model = self.backend.prepare(model, device)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 221, in prepare
        return cls.prepare(binm, device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 202, in prepare
        inf = cls.create_inference_session(model)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/backend.py", line 305, in create_inference_session
        OnnxShapeInference(content))
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 67, in __init__
        self.known_shapes_ = self._run_empty()
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/onnx_shape_inference.py", line 163, in _run_empty
        cont = cont or shape_dispatch(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_doc/sphinxdoc/source/mlprodict/onnxrt/ops_shape/__init__.py", line 107, in shape_dispatch
        raise ShapeInferenceMissing(  # pragma: no cover
    mlprodict.onnxrt.ops_shape.shape_excs.ShapeInferenceMissing: Unable to find a corresponding function for operator type 'StringNormalizer' domain='', looking for 'shape_stringnormalizer' among
    shape_abs shape_acos shape_acosh shape_add shape_and shape_asin
    shape_asinh shape_atan shape_atanh shape_castlike shape_ceil
    shape_celu shape_clip shape_cos shape_cosh shape_det shape_div
    shape_elu shape_equal shape_erf shape_excs shape_exp shape_floor
    shape_greater shape_greaterorequal shape_hardmax shape_hardsigmoid
    shape_identity shape_isinf shape_isnan shape_leakyrelu shape_less
    shape_lessorequal shape_log shape_logsoftmax shape_max shape_min
    shape_mod shape_mul shape_neg shape_not shape_or shape_pow
    shape_reciprocal shape_relu shape_result shape_round shape_selu
    shape_shrink shape_sigmoid shape_sign shape_sin shape_sinh
    shape_softmax shape_softplus shape_softsign shape_sqrt shape_sub
    shape_tan shape_tanh shape_thresholdedrelu shape_trilu shape_xor
    
    ======================================================================
    FAIL: test_castlike_FLOAT_to_STRING_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 360, in run
        self.assert_similar_outputs(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 208, in assert_similar_outputs
        np.testing.assert_array_equal(outputs[i], ref_outputs[i])
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 934, in assert_array_equal
        assert_array_compare(operator.__eq__, x, y, err_msg=err_msg,
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 844, in assert_array_compare
        raise AssertionError(msg)
    AssertionError: 
    Arrays are not equal
    
    Mismatched elements: 12 / 12 (100%)
     x: array([[0.9767611026763916, 0.6048455238342285, 0.7392635941505432,
            0.03918779268860817],
           [0.28280696272850037, 0.12019655853509903, 0.296140193939209,...
     y: array([['0.9767611', '0.6048455', '0.7392636', '0.039187793'],
           ['0.28280696', '0.12019656', '0.2961402', '0.11872772'],
           ['0.31798318', '0.41426298', '0.064147495', '0.6924721']],
          dtype=object)
    
    ======================================================================
    FAIL: test_mod_int64_fmod_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 360, in run
        self.assert_similar_outputs(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 210, in assert_similar_outputs
        np.testing.assert_allclose(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 1527, in assert_allclose
        assert_array_compare(compare, actual, desired, err_msg=str(err_msg),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 844, in assert_array_compare
        raise AssertionError(msg)
    AssertionError: 
    Not equal to tolerance rtol=0.001, atol=1e-07
    
    Mismatched elements: 2 / 6 (33.3%)
    Max absolute difference: 3
    Max relative difference: 3.
     x: array([ 0, -2,  5,  0,  2,  3])
     y: array([ 0,  1,  5,  0, -1,  3])
    
    ======================================================================
    FAIL: test_thresholdedrelu_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 360, in run
        self.assert_similar_outputs(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 210, in assert_similar_outputs
        np.testing.assert_allclose(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 1527, in assert_allclose
        assert_array_compare(compare, actual, desired, err_msg=str(err_msg),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 844, in assert_array_compare
        raise AssertionError(msg)
    AssertionError: 
    Not equal to tolerance rtol=0.001, atol=1e-07
    
    Mismatched elements: 58 / 60 (96.7%)
    Max absolute difference: 2.
    Max relative difference: 0.
     x: array([[[2.      , 2.      , 2.      , 2.240893, 2.      ],
            [2.      , 2.      , 2.      , 2.      , 2.      ],
            [2.      , 2.      , 2.      , 2.      , 2.      ],...
     y: array([[[0.      , 0.      , 0.      , 2.240893, 0.      ],
            [0.      , 0.      , 0.      , 0.      , 0.      ],
            [0.      , 0.      , 0.      , 0.      , 0.      ],...
    
    ======================================================================
    FAIL: test_thresholdedrelu_default_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 360, in run
        self.assert_similar_outputs(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 210, in assert_similar_outputs
        np.testing.assert_allclose(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 1527, in assert_allclose
        assert_array_compare(compare, actual, desired, err_msg=str(err_msg),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 844, in assert_array_compare
        raise AssertionError(msg)
    AssertionError: 
    Not equal to tolerance rtol=0.001, atol=1e-07
    
    Mismatched elements: 49 / 60 (81.7%)
    Max absolute difference: 1.
    Max relative difference: 0.
     x: array([[[1.764052, 1.      , 1.      , 2.240893, 1.867558],
            [1.      , 1.      , 1.      , 1.      , 1.      ],
            [1.      , 1.454273, 1.      , 1.      , 1.      ],...
     y: array([[[1.764052, 0.      , 0.      , 2.240893, 1.867558],
            [0.      , 0.      , 0.      , 0.      , 0.      ],
            [0.      , 1.454273, 0.      , 0.      , 0.      ],...
    
    ======================================================================
    FAIL: test_thresholdedrelu_example_cpu (__main__.OnnxBackendNodeModelTest)
    ----------------------------------------------------------------------
    Traceback (most recent call last):
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 296, in device_test_func
        return test_func(*args, device=device, **kwargs)
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 360, in run
        self.assert_similar_outputs(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/onnx/backend/test/runner/__init__.py", line 210, in assert_similar_outputs
        np.testing.assert_allclose(
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 1527, in assert_allclose
        assert_array_compare(compare, actual, desired, err_msg=str(err_msg),
      File "somewhere/workspace/mlprodict/mlprodict_UT_39_std/_venv/lib/python3.9/site-packages/numpy/testing/_private/utils.py", line 844, in assert_array_compare
        raise AssertionError(msg)
    AssertionError: 
    Not equal to tolerance rtol=0.001, atol=1e-07
    
    Mismatched elements: 4 / 5 (80%)
    Max absolute difference: 2.
    Max relative difference: 0.
     x: array([2. , 2. , 2. , 2. , 2.2], dtype=float32)
     y: array([0. , 0. , 0. , 0. , 2.2], dtype=float32)
    
    ----------------------------------------------------------------------
    Ran 2492 tests in 10.848s
    
    FAILED (failures=5, errors=965, skipped=1254)