From 95f293df19c9a38806868fe12a64a4f9b457f9c1 Mon Sep 17 00:00:00 2001 From: Adrian Lundell <36153706+AdrianLundell@users.noreply.github.com> Date: Wed, 28 Aug 2024 10:07:48 +0200 Subject: [PATCH] Refactor pooling-tests + update documentation (#144) - Adds support for pooling in RefactoredTestGen - Regenerates data - Aligns variable names in related tests - Adds missing int16 LSTM support to README --- README.md | 4 +- Tests/UnitTest/README.md | 6 +- .../RefactoredTestGen/Lib/op_pooling.py | 80 +++ Tests/UnitTest/RefactoredTestGen/Lib/test.py | 15 +- .../UnitTest/RefactoredTestGen/test_plan.json | 330 ++++++++++ .../TestData/avgpooling/biases_data.h | 25 - .../TestData/avgpooling/config_data.h | 30 +- .../TestData/avgpooling/input_data.h | 562 ------------------ .../TestData/avgpooling/input_tensor.h | 284 +++++++++ .../TestCases/TestData/avgpooling/output.h | 14 + .../TestData/avgpooling/output_mult_data.h | 40 -- .../TestData/avgpooling/output_ref_data.h | 22 - .../TestData/avgpooling/output_shift_data.h | 25 - .../TestCases/TestData/avgpooling/test_data.h | 6 +- .../TestData/avgpooling_1/config_data.h | 29 +- .../TestData/avgpooling_1/input_data.h | 12 - .../TestData/avgpooling_1/input_tensor.h | 13 + .../TestCases/TestData/avgpooling_1/output.h | 6 + .../TestData/avgpooling_1/output_ref_data.h | 5 - .../TestData/avgpooling_1/test_data.h | 5 +- .../TestData/avgpooling_2/config_data.h | 29 +- .../TestData/avgpooling_2/input_data.h | 8 - .../TestData/avgpooling_2/input_tensor.h | 9 + .../TestCases/TestData/avgpooling_2/output.h | 9 + .../TestData/avgpooling_2/output_ref_data.h | 8 - .../TestData/avgpooling_2/test_data.h | 5 +- .../TestData/avgpooling_3/config_data.h | 30 +- .../TestData/avgpooling_3/input_data.h | 8 - .../TestData/avgpooling_3/input_tensor.h | 7 + .../TestCases/TestData/avgpooling_3/output.h | 6 + .../TestData/avgpooling_3/output_ref_data.h | 7 - .../TestData/avgpooling_3/test_data.h | 6 +- .../TestData/avgpooling_4/config_data.h | 30 +- .../TestData/avgpooling_4/input_data.h | 12 - .../TestData/avgpooling_4/input_tensor.h | 12 + .../TestCases/TestData/avgpooling_4/output.h | 8 + .../TestData/avgpooling_4/output_ref_data.h | 8 - .../TestData/avgpooling_4/test_data.h | 6 +- .../TestData/avgpooling_5/config_data.h | 29 +- .../TestData/avgpooling_5/input_data.h | 5 - .../TestData/avgpooling_5/input_tensor.h | 6 + .../TestCases/TestData/avgpooling_5/output.h | 6 + .../TestData/avgpooling_5/output_ref_data.h | 5 - .../TestData/avgpooling_5/test_data.h | 5 +- .../TestData/avgpooling_int16/config_data.h | 29 +- .../TestData/avgpooling_int16/input_data.h | 35 -- .../TestData/avgpooling_int16/input_tensor.h | 36 ++ .../TestData/avgpooling_int16/output.h | 21 + .../avgpooling_int16/output_ref_data.h | 20 - .../TestData/avgpooling_int16/test_data.h | 5 +- .../TestData/avgpooling_int16_1/config_data.h | 30 +- .../TestData/avgpooling_int16_1/input_data.h | 10 - .../avgpooling_int16_1/input_tensor.h | 10 + .../TestData/avgpooling_int16_1/output.h | 8 + .../avgpooling_int16_1/output_ref_data.h | 8 - .../TestData/avgpooling_int16_1/test_data.h | 6 +- .../TestData/avgpooling_int16_2/config_data.h | 29 +- .../TestData/avgpooling_int16_2/input_data.h | 18 - .../avgpooling_int16_2/input_tensor.h | 19 + .../TestData/avgpooling_int16_2/output.h | 14 + .../avgpooling_int16_2/output_ref_data.h | 13 - .../TestData/avgpooling_int16_2/test_data.h | 5 +- .../TestData/avgpooling_int16_3/config_data.h | 37 +- .../TestData/avgpooling_int16_3/input_data.h | 35 -- .../avgpooling_int16_3/input_tensor.h | 308 ++++++++++ .../TestData/avgpooling_int16_3/output.h | 15 + .../avgpooling_int16_3/output_ref_data.h | 16 - .../TestData/avgpooling_int16_3/test_data.h | 5 +- .../TestData/maxpool_int16/config_data.h | 30 +- .../TestData/maxpool_int16/input_data.h | 12 - .../TestData/maxpool_int16/input_tensor.h | 11 + .../TestCases/TestData/maxpool_int16/output.h | 7 + .../TestData/maxpool_int16/output_ref_data.h | 7 - .../TestData/maxpool_int16/test_data.h | 6 +- .../TestData/maxpool_int16_1/config_data.h | 38 +- .../TestData/maxpool_int16_1/input_data.h | 12 - .../TestData/maxpool_int16_1/input_tensor.h | 8 + .../TestData/maxpool_int16_1/output.h | 7 + .../maxpool_int16_1/output_ref_data.h | 9 - .../TestData/maxpool_int16_1/test_data.h | 6 +- .../TestData/maxpool_int16_2/config_data.h | 29 +- .../TestData/maxpool_int16_2/input_data.h | 16 - .../TestData/maxpool_int16_2/input_tensor.h | 17 + .../TestData/maxpool_int16_2/output.h | 11 + .../maxpool_int16_2/output_ref_data.h | 10 - .../TestData/maxpool_int16_2/test_data.h | 5 +- .../TestData/maxpooling/config_data.h | 30 +- .../TestData/maxpooling/input_data.h | 229 ------- .../TestData/maxpooling/input_tensor.h | 229 +++++++ .../TestCases/TestData/maxpooling/output.h | 13 + .../TestData/maxpooling/output_ref_data.h | 13 - .../TestCases/TestData/maxpooling/test_data.h | 6 +- .../TestData/maxpooling_1/config_data.h | 29 +- .../TestData/maxpooling_1/input_data.h | 12 - .../TestData/maxpooling_1/input_tensor.h | 13 + .../TestCases/TestData/maxpooling_1/output.h | 6 + .../TestData/maxpooling_1/output_ref_data.h | 5 - .../TestData/maxpooling_1/test_data.h | 5 +- .../TestData/maxpooling_2/config_data.h | 29 +- .../TestData/maxpooling_2/input_data.h | 8 - .../TestData/maxpooling_2/input_tensor.h | 9 + .../TestCases/TestData/maxpooling_2/output.h | 9 + .../TestData/maxpooling_2/output_ref_data.h | 8 - .../TestData/maxpooling_2/test_data.h | 5 +- .../TestData/maxpooling_3/config_data.h | 30 +- .../TestData/maxpooling_3/input_data.h | 9 - .../TestData/maxpooling_3/input_tensor.h | 7 + .../TestCases/TestData/maxpooling_3/output.h | 6 + .../TestData/maxpooling_3/output_ref_data.h | 8 - .../TestData/maxpooling_3/test_data.h | 6 +- .../TestData/maxpooling_4/config_data.h | 29 +- .../TestData/maxpooling_4/input_data.h | 7 - .../TestData/maxpooling_4/input_tensor.h | 8 + .../TestCases/TestData/maxpooling_4/output.h | 6 + .../TestData/maxpooling_4/output_ref_data.h | 5 - .../TestData/maxpooling_4/test_data.h | 5 +- .../TestData/maxpooling_5/config_data.h | 37 +- .../TestData/maxpooling_5/input_data.h | 6 - .../TestData/maxpooling_5/input_tensor.h | 15 + .../TestCases/TestData/maxpooling_5/output.h | 15 + .../TestData/maxpooling_5/output_ref_data.h | 6 - .../TestData/maxpooling_5/test_data.h | 5 +- .../TestData/maxpooling_6/config_data.h | 29 +- .../TestData/maxpooling_6/input_data.h | 9 - .../TestData/maxpooling_6/input_tensor.h | 10 + .../TestCases/TestData/maxpooling_6/output.h | 7 + .../TestData/maxpooling_6/output_ref_data.h | 6 - .../TestData/maxpooling_6/test_data.h | 5 +- .../TestData/maxpooling_7/config_data.h | 29 +- .../TestData/maxpooling_7/input_data.h | 5 - .../TestData/maxpooling_7/input_tensor.h | 6 + .../TestCases/TestData/maxpooling_7/output.h | 6 + .../TestData/maxpooling_7/output_ref_data.h | 5 - .../TestData/maxpooling_7/test_data.h | 5 +- .../test_arm_avgpool_s16.c | 173 +++--- .../test_arm_avgpool_s8/test_arm_avgpool_s8.c | 241 ++++---- .../test_arm_max_pool_s16.c | 125 ++-- .../test_arm_max_pool_s8.c | 274 +++++---- 138 files changed, 2480 insertions(+), 2073 deletions(-) create mode 100644 Tests/UnitTest/RefactoredTestGen/Lib/op_pooling.py delete mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling/biases_data.h delete mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling/input_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling/input_tensor.h create mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling/output.h delete mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling/output_mult_data.h delete mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling/output_ref_data.h delete mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling/output_shift_data.h delete mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling_1/input_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling_1/input_tensor.h create mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling_1/output.h delete mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling_1/output_ref_data.h delete mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling_2/input_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling_2/input_tensor.h create mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling_2/output.h delete mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling_2/output_ref_data.h delete mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling_3/input_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling_3/input_tensor.h create mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling_3/output.h delete mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling_3/output_ref_data.h delete mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling_4/input_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling_4/input_tensor.h create mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling_4/output.h delete mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling_4/output_ref_data.h delete mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling_5/input_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling_5/input_tensor.h create mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling_5/output.h delete mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling_5/output_ref_data.h delete mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling_int16/input_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling_int16/input_tensor.h create mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling_int16/output.h delete mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling_int16/output_ref_data.h delete mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling_int16_1/input_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling_int16_1/input_tensor.h create mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling_int16_1/output.h delete mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling_int16_1/output_ref_data.h delete mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling_int16_2/input_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling_int16_2/input_tensor.h create mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling_int16_2/output.h delete mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling_int16_2/output_ref_data.h delete mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling_int16_3/input_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling_int16_3/input_tensor.h create mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling_int16_3/output.h delete mode 100644 Tests/UnitTest/TestCases/TestData/avgpooling_int16_3/output_ref_data.h delete mode 100644 Tests/UnitTest/TestCases/TestData/maxpool_int16/input_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/maxpool_int16/input_tensor.h create mode 100644 Tests/UnitTest/TestCases/TestData/maxpool_int16/output.h delete mode 100644 Tests/UnitTest/TestCases/TestData/maxpool_int16/output_ref_data.h delete mode 100644 Tests/UnitTest/TestCases/TestData/maxpool_int16_1/input_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/maxpool_int16_1/input_tensor.h create mode 100644 Tests/UnitTest/TestCases/TestData/maxpool_int16_1/output.h delete mode 100644 Tests/UnitTest/TestCases/TestData/maxpool_int16_1/output_ref_data.h delete mode 100644 Tests/UnitTest/TestCases/TestData/maxpool_int16_2/input_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/maxpool_int16_2/input_tensor.h create mode 100644 Tests/UnitTest/TestCases/TestData/maxpool_int16_2/output.h delete mode 100644 Tests/UnitTest/TestCases/TestData/maxpool_int16_2/output_ref_data.h delete mode 100644 Tests/UnitTest/TestCases/TestData/maxpooling/input_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/maxpooling/input_tensor.h create mode 100644 Tests/UnitTest/TestCases/TestData/maxpooling/output.h delete mode 100644 Tests/UnitTest/TestCases/TestData/maxpooling/output_ref_data.h delete mode 100644 Tests/UnitTest/TestCases/TestData/maxpooling_1/input_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/maxpooling_1/input_tensor.h create mode 100644 Tests/UnitTest/TestCases/TestData/maxpooling_1/output.h delete mode 100644 Tests/UnitTest/TestCases/TestData/maxpooling_1/output_ref_data.h delete mode 100644 Tests/UnitTest/TestCases/TestData/maxpooling_2/input_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/maxpooling_2/input_tensor.h create mode 100644 Tests/UnitTest/TestCases/TestData/maxpooling_2/output.h delete mode 100644 Tests/UnitTest/TestCases/TestData/maxpooling_2/output_ref_data.h delete mode 100644 Tests/UnitTest/TestCases/TestData/maxpooling_3/input_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/maxpooling_3/input_tensor.h create mode 100644 Tests/UnitTest/TestCases/TestData/maxpooling_3/output.h delete mode 100644 Tests/UnitTest/TestCases/TestData/maxpooling_3/output_ref_data.h delete mode 100644 Tests/UnitTest/TestCases/TestData/maxpooling_4/input_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/maxpooling_4/input_tensor.h create mode 100644 Tests/UnitTest/TestCases/TestData/maxpooling_4/output.h delete mode 100644 Tests/UnitTest/TestCases/TestData/maxpooling_4/output_ref_data.h delete mode 100644 Tests/UnitTest/TestCases/TestData/maxpooling_5/input_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/maxpooling_5/input_tensor.h create mode 100644 Tests/UnitTest/TestCases/TestData/maxpooling_5/output.h delete mode 100644 Tests/UnitTest/TestCases/TestData/maxpooling_5/output_ref_data.h delete mode 100644 Tests/UnitTest/TestCases/TestData/maxpooling_6/input_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/maxpooling_6/input_tensor.h create mode 100644 Tests/UnitTest/TestCases/TestData/maxpooling_6/output.h delete mode 100644 Tests/UnitTest/TestCases/TestData/maxpooling_6/output_ref_data.h delete mode 100644 Tests/UnitTest/TestCases/TestData/maxpooling_7/input_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/maxpooling_7/input_tensor.h create mode 100644 Tests/UnitTest/TestCases/TestData/maxpooling_7/output.h delete mode 100644 Tests/UnitTest/TestCases/TestData/maxpooling_7/output_ref_data.h diff --git a/README.md b/README.md index a1deaa3f..790c9d84 100644 --- a/README.md +++ b/README.md @@ -36,7 +36,7 @@ Examples are Cortex-M55 or Cortex-M85 configured with MVE. | MaxPooling | Yes | Yes | N/A | Yes | Yes | N/A | Yes | Yes | N/A | | AvgPooling | Yes | Yes | N/A | Yes | Yes | N/A | Yes | Yes | N/A | | Softmax | Yes | Yes | N/A | Yes | Yes | N/A | Yes | No | N/A | -| LSTM | Yes | NA | No | Yes | NA | No | Yes | NA | No | +| LSTM | Yes | Yes | No | Yes | Yes | No | Yes | Yes | No | | SVDF | Yes | No | No | Yes | No | No | Yes | No | No | * int4 weights + int8 activations @@ -110,4 +110,4 @@ This product confirms to Arm’s inclusive language policy and, to the best of o ## Support / Contact -For any questions or to reach the CMSIS-NN team, please create a new issue in https://github.com/ARM-software/CMSIS-NN/issues \ No newline at end of file +For any questions or to reach the CMSIS-NN team, please create a new issue in https://github.com/ARM-software/CMSIS-NN/issues diff --git a/Tests/UnitTest/README.md b/Tests/UnitTest/README.md index c54a09f9..71fdaa55 100644 --- a/Tests/UnitTest/README.md +++ b/Tests/UnitTest/README.md @@ -168,11 +168,11 @@ Current progress: | convolution | x | x | New version only supports 16x8 and int4 packed weights | depthwise conv | x | | | fully_connected | x | x | New version supports int4 packed weights. Only new version supports per channels quantization for int8. -| lstm | x | x | Only new version supporting 16x8 +| lstm | | x | Only new version supporting 16x8 | svdf | x | | | softmax | x | | -| avgpool | x | | -| maxpool | x | | +| avgpool | | x | +| maxpool | | x | | add | x | | | mul | x | | | batch matmul | | x | diff --git a/Tests/UnitTest/RefactoredTestGen/Lib/op_pooling.py b/Tests/UnitTest/RefactoredTestGen/Lib/op_pooling.py new file mode 100644 index 00000000..16025553 --- /dev/null +++ b/Tests/UnitTest/RefactoredTestGen/Lib/op_pooling.py @@ -0,0 +1,80 @@ +# SPDX-FileCopyrightText: Copyright 2024 Arm Limited and/or its affiliates +# +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the License); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an AS IS BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import Lib.op_utils +import tensorflow as tf +import math +import numpy as np + +from tensorflow.lite.python.interpreter import Interpreter +from tensorflow.lite.python.interpreter import OpResolverType +import tf_keras as keras + +class Op_pooling(Lib.op_utils.Op_type): + + def get_shapes(params): + shapes = {} + shapes["input_tensor"] = (params["batch_size"], params["input_h"], params["input_w"], params["input_c"]) + shapes["representational_dataset"] = shapes["input_tensor"] + + return shapes + + def generate_keras_model(shapes, params): + model = keras.models.Sequential() + model.add(keras.layers.InputLayer(input_shape=shapes["input_tensor"][1:], batch_size=shapes["input_tensor"][0])) + if params["op_type"] == 'avgpool': + model.add( + keras.layers.AveragePooling2D(pool_size=(params["filter_h"], params["filter_w"]), + strides=(params["stride_h"], params["stride_w"]), + padding=params["pad"], + input_shape=shapes["input_tensor"][1:])) + elif params["op_type"] == 'maxpool': + model.add( + keras.layers.MaxPooling2D(pool_size=(params["filter_h"], params["filter_w"]), + strides=(params["stride_h"], params["stride_w"]), + padding=params["pad"], + input_shape=shapes["input_tensor"][1:])) + else: + raise RuntimeError("Wrong test type") + + + return model + + def generate_data_tflite(tflite_fname, params): + tensors = {} + effective_scales = {} + scales = {} + generated_params = {} + + interpreter = Interpreter(str(tflite_fname), experimental_op_resolver_type=OpResolverType.BUILTIN_REF) + interpreter.allocate_tensors() + output_details = interpreter.get_output_details() + + generated_params["output_c"] = output_details[0]['shape'][3] + generated_params["output_w"] = output_details[0]['shape'][2] + generated_params["output_h"] = output_details[0]['shape'][1] + + if params["pad"] == "SAME": + pad_along_width = max((generated_params["output_w"] - 1) * params["stride_w"] + params["filter_w"] - params["input_w"], 0) + pad_along_height = max((generated_params["output_h"] - 1) * params["stride_h"] + params["filter_h"] - params["input_h"], 0) + + generated_params["padding_h"] = pad_along_height // 2 + generated_params["padding_w"] = pad_along_width // 2 + else: + generated_params["padding_h"] = 0 + generated_params["padding_w"] = 0 + + return Lib.op_utils.Generated_data(generated_params, tensors, scales, effective_scales) diff --git a/Tests/UnitTest/RefactoredTestGen/Lib/test.py b/Tests/UnitTest/RefactoredTestGen/Lib/test.py index 16375765..0a8b7082 100644 --- a/Tests/UnitTest/RefactoredTestGen/Lib/test.py +++ b/Tests/UnitTest/RefactoredTestGen/Lib/test.py @@ -19,6 +19,7 @@ import Lib.op_conv import Lib.op_batch_matmul import Lib.op_fully_connected +import Lib.op_pooling import tensorflow as tf import numpy as np from tensorflow.lite.python.interpreter import Interpreter @@ -77,11 +78,16 @@ def generate(params, args, fpaths): except KeyError: pass + if "bias_data_type" in params: + bias_dtype = params["bias_data_type"] + else: + bias_dtype = None + convert_keras_to_tflite(fpaths["tflite"], keras_model, quantize=True, dtype=params["input_data_type"], - bias_dtype=params["bias_data_type"], + bias_dtype=bias_dtype, shape=shapes, per_tensor_quant_for_dense=per_tensor_quant_for_dense) @@ -138,6 +144,11 @@ def generate(params, args, fpaths): else: raise ValueError(f"Invalid interpreter in {params['name']}") + if "activation_min" in params: + data.tensors["output"] = np.maximum(data.tensors["output"], params["activation_min"]) + if "activation_max" in params: + data.tensors["output"] = np.minimum(data.tensors["output"], params["activation_max"]) + # Write data header = get_header(params["tflite_generator"], params["interpreter"]) @@ -172,6 +183,8 @@ def get_op_type(op_type_string): return Lib.op_batch_matmul.Op_batch_matmul elif op_type_string == "fully_connected": return Lib.op_fully_connected.Op_fully_connected + if op_type_string == "avgpool" or op_type_string == "maxpool": + return Lib.op_pooling.Op_pooling else: raise ValueError(f"Unknown op type '{op_type_string}'") diff --git a/Tests/UnitTest/RefactoredTestGen/test_plan.json b/Tests/UnitTest/RefactoredTestGen/test_plan.json index 61e283fb..d3d8359d 100644 --- a/Tests/UnitTest/RefactoredTestGen/test_plan.json +++ b/Tests/UnitTest/RefactoredTestGen/test_plan.json @@ -820,6 +820,336 @@ "out_ch" : 22, "generate_bias": true, "per_channel_quant": true + } + ] +}, +{ + "suite_name" : "test_arm_avgpool_s8", + "op_type" : "avgpool", + "input_data_type": "int8_t", + "interpreter": "tensorflow", + "tflite_generator": "keras", + "tests" : [ + {"name" : "avgpooling", + "batch_size" : 1, + "input_n" : 1, + "input_w" : 22, + "input_h" : 12, + "input_c" : 20, + "filter_w": 6, + "filter_h": 5, + "stride_w": 9, + "stride_h": 5, + "pad" : "SAME", + "activation_max": 127, + "activation_min": -128 + }, + {"name" : "avgpooling_1", + "batch_size" : 1, + "input_n" : 1, + "input_w" : 9, + "input_h" : 5, + "input_c" : 3, + "filter_w": 9, + "filter_h": 5, + "stride_w": 1, + "stride_h": 2, + "pad" : "VALID", + "activation_max": 127, + "activation_min": -128 + }, + {"name" : "avgpooling_2", + "batch_size" : 1, + "input_n" : 1, + "input_w" : 12, + "input_h" : 1, + "input_c" : 5, + "filter_w": 3, + "filter_h": 1, + "stride_w": 1, + "stride_h": 2, + "pad" : "SAME", + "activation_max": 127, + "activation_min": -128 + }, + {"name" : "avgpooling_3", + "batch_size" : 1, + "input_n" : 1, + "input_w" : 9, + "input_h" : 1, + "input_c" : 2, + "filter_w": 1, + "filter_h": 1, + "stride_w": 2, + "stride_h": 1, + "pad" : "VALID", + "activation_max": 127, + "activation_min": -128 + }, + {"name" : "avgpooling_4", + "batch_size" : 3, + "input_n" : 1, + "input_w" : 1, + "input_h" : 20, + "input_c" : 2, + "filter_w": 1, + "filter_h": 3, + "stride_w": 1, + "stride_h": 3, + "pad" : "SAME", + "activation_max": 127, + "activation_min": -128 + }, + {"name" : "avgpooling_5", + "batch_size" : 1, + "input_n" : 1, + "input_w" : 3, + "input_h" : 3, + "input_c" : 1, + "filter_w": 1, + "filter_h": 3, + "stride_w": 1, + "stride_h": 1, + "pad" : "SAME", + "activation_max": 6, + "activation_min": 0 + } + ] +}, +{ + "suite_name" : "test_arm_avgpool_s16", + "op_type" : "avgpool", + "input_data_type": "int16_t", + "interpreter": "tensorflow", + "tflite_generator": "keras", + "tests" : [ + {"name" : "avgpooling_int16", + "batch_size" : 1, + "input_n" : 1, + "input_w" : 6, + "input_h" : 4, + "input_c" : 17, + "filter_w": 2, + "filter_h": 3, + "stride_w": 2, + "stride_h": 1, + "pad" : "SAME", + "activation_max": 32767, + "activation_min": -32768 + }, + {"name" : "avgpooling_int16_1", + "batch_size" : 3, + "input_n" : 3, + "input_w" : 9, + "input_h" : 1, + "input_c" : 2, + "filter_w": 1, + "filter_h": 1, + "stride_w": 2, + "stride_h": 1, + "pad" : "VALID", + "activation_max": 32767, + "activation_min": -32768 + }, + {"name" : "avgpooling_int16_2", + "batch_size" : 1, + "input_n" : 1, + "input_w" : 9, + "input_h" : 1, + "input_c" : 20, + "filter_w": 1, + "filter_h": 1, + "stride_w": 2, + "stride_h": 1, + "pad" : "VALID", + "activation_max": 32767, + "activation_min": -32768 + }, + {"name" : "avgpooling_int16_3", + "batch_size" : 2, + "input_n" : 2, + "input_w" : 22, + "input_h" : 12, + "input_c" : 8, + "filter_w": 6, + "filter_h": 5, + "stride_w": 9, + "stride_h": 5, + "pad" : "SAME", + "activation_max": 32767, + "activation_min": -32768 + } + ] +}, +{ + "suite_name" : "test_arm_maxpool_s8", + "op_type" : "maxpool", + "input_data_type": "int8_t", + "interpreter": "tensorflow", + "tflite_generator": "keras", + "tests" : [ + {"name" : "maxpooling", + "batch_size" : 2, + "input_n" : 2, + "input_w" : 22, + "input_h" : 12, + "input_c" : 8, + "filter_w": 6, + "filter_h": 5, + "stride_w": 9, + "stride_h": 5, + "pad" : "SAME", + "activation_max": 127, + "activation_min": -128 + }, + {"name" : "maxpooling_1", + "batch_size" : 1, + "input_n" : 1, + "input_w" : 9, + "input_h" : 5, + "input_c" : 3, + "filter_w": 9, + "filter_h": 5, + "stride_w": 1, + "stride_h": 2, + "pad" : "VALID", + "activation_max": 127, + "activation_min": -128 + }, + {"name" : "maxpooling_2", + "batch_size" : 1, + "input_n" : 1, + "input_w" : 12, + "input_h" : 1, + "input_c" : 5, + "filter_w": 3, + "filter_h": 1, + "stride_w": 1, + "stride_h": 2, + "pad" : "SAME", + "activation_max": 127, + "activation_min": -128 + }, + {"name" : "maxpooling_3", + "batch_size" : 1, + "input_n" : 1, + "input_w" : 9, + "input_h" : 1, + "input_c" : 2, + "filter_w": 1, + "filter_h": 1, + "stride_w": 2, + "stride_h": 1, + "pad" : "VALID", + "activation_max": 127, + "activation_min": -128 + }, + {"name" : "maxpooling_4", + "batch_size" : 1, + "input_n" : 1, + "input_w" : 1, + "input_h" : 20, + "input_c" : 2, + "filter_w": 1, + "filter_h": 3, + "stride_w": 1, + "stride_h": 3, + "pad" : "SAME", + "activation_max": 127, + "activation_min": -128 + }, + {"name" : "maxpooling_5", + "batch_size" : 1, + "input_n" : 1, + "input_w" : 3, + "input_h" : 3, + "input_c" : 20, + "filter_w": 1, + "filter_h": 1, + "stride_w": 1, + "stride_h": 1, + "pad" : "SAME", + "activation_max": 127, + "activation_min": -128 + }, + {"name" : "maxpooling_6", + "batch_size" : 1, + "input_n" : 1, + "input_w" : 1, + "input_h" : 5, + "input_c" : 17, + "filter_w": 3, + "filter_h": 4, + "stride_w": 1, + "stride_h": 3, + "pad" : "SAME", + "activation_max": 127, + "activation_min": -128 + }, + {"name" : "maxpooling_7", + "batch_size" : 1, + "input_n" : 1, + "input_w" : 4, + "input_h" : 2, + "input_c" : 1, + "filter_w": 2, + "filter_h": 2, + "stride_w": 2, + "stride_h": 2, + "pad" : "VALID", + "activation_max": 6, + "activation_min": 0 + } + ] +}, +{ + "suite_name" : "test_arm_maxpool_s16", + "op_type" : "maxpool", + "input_data_type": "int16_t", + "interpreter": "tensorflow", + "tflite_generator": "keras", + "tests" : [ + {"name" : "maxpool_int16", + "batch_size" : 3, + "input_n" : 3, + "input_w" : 4, + "input_h" : 3, + "input_c" : 2, + "filter_w": 2, + "filter_h": 2, + "stride_w": 2, + "stride_h": 2, + "pad" : "VALID", + "activation_max": 32767, + "activation_min": -32768 + }, + {"name" : "maxpool_int16_1", + "batch_size" : 2, + "input_n" : 4, + "input_w" : 5, + "input_h" : 1, + "input_c" : 2, + "filter_w": 3, + "filter_h": 3, + "stride_w": 2, + "stride_h": 1, + "pad" : "SAME", + "activation_max": 30000, + "activation_min": -30000 + }, + {"name" : "maxpool_int16_2", + "batch_size" : 1, + "input_n" : 1, + "input_w" : 7, + "input_h" : 7, + "input_c" : 3, + "filter_w": 3, + "filter_h": 3, + "stride_w": 1, + "stride_h": 1, + "pad" : "VALID", + "activation_max": 30000, + "activation_min": -30000 } ] } diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling/biases_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling/biases_data.h deleted file mode 100644 index 237b2c37..00000000 --- a/Tests/UnitTest/TestCases/TestData/avgpooling/biases_data.h +++ /dev/null @@ -1,25 +0,0 @@ - -/* - * Copyright (C) 2010-2020 Arm Limited or its affiliates. All rights reserved. - * - * SPDX-License-Identifier: Apache-2.0 - * - * Licensed under the Apache License, Version 2.0 (the License); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an AS IS BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#pragma once -// Generated by generate_test_data.py -#include - -const int32_t avgpooling_biases[17] = - {2159, 6477, 9716, 4318, 2159, 4318, 4318, 6477, 6477, 4318, 4318, 6477, 6477, 6477, 6477, 9716, 19431}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling/config_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling/config_data.h index 45f37cbd..14c2ab41 100644 --- a/Tests/UnitTest/TestCases/TestData/avgpooling/config_data.h +++ b/Tests/UnitTest/TestCases/TestData/avgpooling/config_data.h @@ -1,20 +1,20 @@ -// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). -// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. #pragma once -#define AVGPOOLING_OUT_CH 20 -#define AVGPOOLING_IN_CH 20 +#define AVGPOOLING_BATCH_SIZE 1 +#define AVGPOOLING_INPUT_N 1 #define AVGPOOLING_INPUT_W 22 #define AVGPOOLING_INPUT_H 12 -#define AVGPOOLING_DST_SIZE 360 -#define AVGPOOLING_INPUT_SIZE 5280 -#define AVGPOOLING_OUT_ACTIVATION_MIN -128 -#define AVGPOOLING_OUT_ACTIVATION_MAX 127 -#define AVGPOOLING_INPUT_BATCHES 2 -#define AVGPOOLING_FILTER_X 6 -#define AVGPOOLING_FILTER_Y 5 -#define AVGPOOLING_STRIDE_X 9 -#define AVGPOOLING_STRIDE_Y 5 -#define AVGPOOLING_PAD_X 1 -#define AVGPOOLING_PAD_Y 1 +#define AVGPOOLING_INPUT_C 20 +#define AVGPOOLING_FILTER_W 6 +#define AVGPOOLING_FILTER_H 5 +#define AVGPOOLING_STRIDE_W 9 +#define AVGPOOLING_STRIDE_H 5 +#define AVGPOOLING_PAD SAME +#define AVGPOOLING_ACTIVATION_MAX 127 +#define AVGPOOLING_ACTIVATION_MIN -128 +#define AVGPOOLING_OUTPUT_C 20 #define AVGPOOLING_OUTPUT_W 3 #define AVGPOOLING_OUTPUT_H 3 +#define AVGPOOLING_PADDING_H 1 +#define AVGPOOLING_PADDING_W 1 diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling/input_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling/input_data.h deleted file mode 100644 index ba5f6d8b..00000000 --- a/Tests/UnitTest/TestCases/TestData/avgpooling/input_data.h +++ /dev/null @@ -1,562 +0,0 @@ -// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). -// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. -#pragma once -#include - -const int8_t avgpooling_input[10560] = { - -50, -84, 24, 30, 13, -120, -88, 19, 42, -51, -40, 115, -83, -92, 41, 106, 37, -50, 98, - -59, -11, 80, -106, 20, -1, -90, 100, 126, 24, 24, 98, -67, 17, 71, 126, 118, 107, -25, - 1, 11, -116, 51, 18, -117, 118, -67, -86, 57, 8, -47, -57, -7, 60, 82, 119, -85, -75, - -81, -84, -98, -8, 12, 12, -26, -89, -93, 100, -2, 57, 6, -30, 43, -79, -72, -78, -12, - 44, 106, -13, 103, 114, 26, 6, -35, 37, 25, -10, -20, 68, 53, 102, 3, 97, -71, 18, - -101, 64, 23, -60, -70, -65, -44, -82, 115, 87, 3, -78, 53, -81, 16, -92, 87, -41, -90, - -56, -120, -27, 77, -12, 13, 48, 89, -5, 54, -13, -94, 37, -77, 47, 51, -66, 85, 121, - -61, 66, -16, -3, 45, -70, -60, -40, 46, -89, -96, -57, -88, 123, 0, -66, -91, 81, 0, - 33, -126, 107, -74, 14, 53, 81, 79, -61, -111, -107, -89, -112, -39, 100, 93, 65, -64, 119, - 123, 81, 125, -82, 13, 38, -84, 81, -127, -128, -71, 55, -85, 21, 77, 16, 94, 103, 32, - -97, -83, 99, 90, -4, -10, -14, -71, -31, 0, 92, -76, -115, 109, -94, 41, -26, -8, 30, - -64, 54, -50, 49, 59, 13, -113, 79, 12, -1, -36, 10, 104, 43, 59, -102, 57, -126, 61, - 79, -78, 54, -119, -72, 3, -124, -86, -82, 91, 121, 24, -68, 112, 102, 53, -60, -126, -3, - 107, -48, -109, -44, -122, -54, 98, -119, 76, 111, -2, -127, 94, -4, -71, 41, 52, -8, -113, - -54, 12, 90, -113, 60, 69, -57, 37, 108, -90, 17, 80, -77, -105, 31, 48, 105, 44, -111, - 71, -28, -74, -80, -38, -58, 52, -8, 76, 75, 88, -105, 66, -46, -128, 105, -76, -20, 18, - 3, -16, -56, -75, -3, -40, 86, -111, 122, -26, -94, -125, 61, -20, -98, 126, 70, 71, -7, - -84, -23, 75, -126, 95, -27, 56, -74, -96, 27, -100, -15, 95, 12, -103, 13, -17, -24, -50, - -55, 103, 45, -110, -95, -26, 58, -24, 113, 113, 10, 11, -109, -114, 15, -3, -21, 9, -87, - -106, 91, 80, -3, 34, 84, 45, 75, 47, -55, 10, -84, 79, -97, 5, -66, -20, -29, -104, - -120, -100, -77, 75, 82, 72, 94, 102, 66, -78, 122, -36, -81, 95, 41, 86, 66, 126, -121, - 9, 99, -117, -97, 115, -49, 11, -45, 80, 117, 12, -116, -110, -105, 39, 114, 17, 29, 10, - -7, 56, 48, 103, -85, -122, -22, -103, -128, -63, -18, -28, 105, 74, 27, 11, 58, -33, 12, - 3, -53, 34, 106, 115, -121, 97, 17, 46, 71, 111, -33, -18, -106, -25, -125, -32, -51, -78, - -110, 29, -65, -16, -10, -21, -107, -42, -119, -85, 19, -93, -75, -47, 21, -28, -125, 63, -93, - -74, 33, 98, -61, -23, 20, -60, 84, -96, -124, 34, -34, 96, 120, -66, 51, 7, 124, -124, - -4, 86, -36, 29, -126, 3, 49, -99, -33, 77, -92, 42, 9, -49, 24, 99, 4, 67, -42, - -127, 48, -109, -15, 28, 33, -66, -118, 86, 112, -12, 53, -2, 45, -105, 69, -18, -46, 90, - 9, -26, 63, 101, -118, 44, 13, -127, -127, -127, -98, 44, 45, 96, 38, -125, -71, -63, 8, - 7, 118, 29, -14, -11, 125, -127, -36, -50, 22, -52, -74, 72, -25, -83, 44, -120, -39, 64, - 35, 53, 31, -110, 61, -110, -58, -18, -6, -16, 41, 9, -47, 19, -59, -25, 58, -69, -45, - 72, -41, -76, -30, 4, -63, 20, -99, -120, -108, 37, -59, 30, -24, 106, 11, 82, 24, 87, - -46, 69, 76, 114, -39, 75, 57, 88, -120, 43, -7, 86, -72, -120, -35, 55, -61, -93, 57, - 81, -118, 98, -52, 108, -56, -69, 21, -35, 7, 116, 42, 44, -47, 73, 71, -106, -5, -66, - 100, 75, -107, -12, 61, 24, -60, -97, -51, 107, 60, -78, 40, -14, 22, 37, -25, 23, 68, - -87, -31, 86, -25, -3, 19, 49, 27, -40, -58, 77, 14, 85, 16, -15, 2, -117, 18, 9, - 61, -86, 116, 29, -93, 79, 90, -29, 35, 52, -7, -23, -43, -128, -1, -26, -24, -94, -55, - -128, -26, 62, -53, 46, -88, -124, 14, -68, 3, 96, 61, 68, -107, 50, 117, -121, 87, 31, - 112, -46, -118, -105, 122, -118, -80, -40, -79, -85, 121, -115, -41, 80, 26, -5, -31, 99, -1, - -20, -119, -128, 118, -62, 12, 113, -82, -104, 46, 21, -53, -30, -24, 17, -10, -73, 71, 94, - -41, -75, -66, -29, -123, 104, -49, -40, 54, -69, 53, 72, 97, -118, 51, -74, -52, 66, 49, - 96, 118, 52, 22, 3, -10, 75, -74, 50, -93, 120, -25, 17, 96, -12, -128, -82, 85, 57, - 56, 58, 113, 81, -68, 31, 49, -96, 82, -40, -112, 22, -3, -44, 82, 66, 33, -116, -14, - 84, 9, -19, -114, 10, 16, 99, -123, 11, 37, -45, 32, -30, -30, 59, 58, -79, -14, -71, - 6, -105, -48, -30, -77, 29, 31, 89, 43, 56, 65, -34, 58, -106, 51, 39, -60, -64, -15, - 73, 37, -6, 30, 57, 65, -12, 1, -45, 46, 6, 81, 72, 3, -49, -117, -7, 10, 22, - -57, 37, -2, 58, 28, 113, 21, 96, 5, -89, -26, 71, 13, -29, -119, -39, -117, -32, 76, - -80, 104, 94, -38, -67, 123, -124, -22, -36, -29, -117, 110, 31, -5, -45, 0, -56, -74, 22, - -104, -90, 59, -81, 6, 116, 64, -50, 101, -68, 125, -83, -65, -77, 40, -70, 28, 25, -2, - 38, -115, 51, 72, 119, 72, -22, -109, 16, -6, 94, 24, 101, -65, 31, 34, -64, -4, -103, - 10, 126, -116, 14, -126, -90, 76, 119, 69, -47, 118, -114, -101, 80, -17, 123, -120, 45, -124, - -71, 75, -26, 58, 19, 31, -109, -10, 81, 21, 55, -68, 3, 1, 23, -22, -24, 37, -60, - -81, 97, -89, -106, -29, 16, -103, -116, 64, 15, 126, 107, 29, -5, 93, 119, 90, 34, -108, - 73, -4, 49, 100, -31, 35, 61, -33, -109, 96, -89, -9, -1, -2, -75, -83, 3, -10, -30, - 118, -55, -90, -45, -126, -76, -19, -101, -30, 17, 16, 111, 43, 46, 105, 117, -58, 107, 14, - -106, 50, 4, 105, -14, -49, 68, 28, -25, -94, -14, 24, 25, 15, -76, 91, 21, 2, 3, - -82, -74, -100, -123, 6, 30, -117, 34, -14, -126, -96, -65, -124, -40, -103, 39, -19, 42, -63, - 12, 36, 57, 122, -76, 75, 105, -105, -106, 48, 39, -118, 29, -66, -112, 61, -10, -18, 106, - 5, 60, 87, -66, -14, 102, -36, 63, -79, -112, -124, -44, 23, 70, 84, 48, 95, -122, -14, - 0, -96, 106, -52, -86, 101, -23, -109, 81, 42, -124, -28, -104, -122, -104, 48, -54, -55, -69, - 82, 73, 66, 34, 122, 119, 97, 92, -49, -7, 21, 89, 17, -66, -27, -58, -37, -38, 79, - -84, 62, 34, -67, -87, -5, 122, -11, -84, 96, 91, -97, -4, 83, 22, 5, 90, -84, 72, - 63, 97, -51, -126, -97, 0, -91, -63, 28, -82, 22, 73, -98, 30, -78, -29, -73, 103, -120, - 82, -27, -57, 71, -123, 116, -85, -119, 116, -108, 49, 80, 114, -54, 105, -112, 52, -55, -56, - -60, -77, 110, -95, 81, 71, -80, 113, -95, 67, -7, 13, 40, -8, 114, 26, -121, 111, -99, - 63, -12, 30, 6, -126, 33, -2, -73, 26, 26, -125, 50, 43, -88, 74, -92, 83, -28, -112, - -88, 13, -112, 27, -116, 109, 50, 99, 102, -73, -112, -28, 100, -30, 54, -1, -92, 119, -59, - 112, 68, -105, -6, 20, 106, -105, -34, -50, 5, 56, -20, 40, 64, 15, -113, 5, -33, -41, - 51, -54, -116, -125, 31, 66, 62, -123, 124, 91, 0, 30, -91, -95, 28, 23, 86, 51, 75, - 117, -122, -4, -126, -73, -52, -44, -25, -34, -62, -4, -62, -112, -8, -79, -66, 64, 83, -71, - 92, 74, 89, 91, -69, -23, 126, 65, -12, -77, -18, -36, 98, -20, 25, 112, 14, -29, -57, - 49, 97, 11, -53, -9, 125, -53, 44, 45, -105, 55, -123, 116, 5, 85, 46, 59, 34, 92, - 36, 76, -53, 102, 91, -74, -65, 22, 96, -9, 116, 31, -62, 22, -39, -106, -1, -54, 16, - -49, 36, 42, -55, -40, 15, 64, -10, -117, -87, -19, -48, -34, 51, -122, -45, 35, -70, -1, - -4, -52, -49, 12, 68, 46, -82, 52, -4, 88, -100, -59, 121, 91, -112, 96, -25, 87, 19, - 116, -90, -91, -55, 28, 31, -89, -100, -44, 115, -107, -63, -92, 43, -58, 123, 52, 100, -51, - 30, 96, 88, -68, 5, 27, -22, -7, 60, -70, 30, 96, 82, -106, 121, -119, 5, -71, 83, - -40, 118, -58, -22, 90, 3, 91, 109, 74, -32, -39, 2, -121, -43, -50, 108, 77, -33, 68, - -52, 29, -85, 86, 29, -26, -9, -45, 2, 40, 15, -43, -113, 123, 68, 99, 103, -2, -10, - 14, -115, -25, -25, -21, -56, 101, 58, -65, 100, -123, -29, 47, 57, 82, -17, -76, 120, -76, - 5, 37, 98, 2, 105, -122, -15, -61, -81, -44, -69, -42, 86, 80, -29, -2, -85, -21, -123, - 95, -57, -31, -23, -22, 112, 108, 40, -38, -61, 69, -49, 109, 80, -101, 6, -8, 13, 48, - -46, -19, -73, -59, -103, 100, -44, -69, -1, 5, 88, 62, 62, -14, -11, -38, 64, 112, -56, - -41, 6, 17, 47, -45, 75, 117, -67, 63, 33, -122, -14, 91, -75, 10, 54, -31, -20, 25, - 43, 25, -64, -39, 81, 95, 12, -50, -102, 77, 108, -99, 72, 106, -9, 28, 76, 62, -58, - 70, 5, -94, -41, -89, 32, -51, -26, -95, -114, 55, 122, 46, -53, 65, -9, -23, -47, -24, - 80, -65, -64, -84, -68, 44, 3, 31, 124, -86, 28, -124, 75, -1, 43, 101, 8, 76, -96, - 18, -13, -123, -104, -30, 46, 56, -6, -46, 13, -86, 122, -73, 46, 0, -95, -43, 80, 34, - -83, -91, -50, 72, -34, 108, 0, 126, 25, 47, 40, 47, -47, -75, 14, -47, -90, -35, -87, - -9, -113, 126, -121, 80, 34, -1, 26, 122, 32, 23, 73, 108, 41, -128, -44, -32, -8, -2, - 10, 88, 24, -10, 68, 92, -58, -37, 58, -95, 123, 117, 97, 83, 77, 68, 48, -103, 65, - 73, -34, -98, -3, 88, -7, 36, -43, -116, -103, 94, 65, 72, 26, -113, 71, 20, 5, 117, - -119, -10, 13, -105, 60, 113, -127, -100, 52, 106, 99, 119, 65, 55, -53, 0, 100, 50, -57, - -52, 85, 32, 80, -115, -113, 22, 118, -74, 22, -55, 89, 81, -127, -23, -114, -104, -63, 83, - 78, -54, 94, 101, 122, -48, 6, 52, 124, -108, 86, -44, -120, -71, 126, 101, 112, -88, 64, - -50, 60, 47, 28, -126, 83, 57, -44, -127, 5, 36, 76, -4, -88, -29, 28, 122, -23, 92, - -50, -90, -45, 125, -121, -68, 34, 15, -114, 25, -84, -11, -108, -84, 5, -45, 24, 37, 96, - 12, 19, -45, 114, 77, 1, 101, -114, -3, -108, -59, 104, -24, 82, 61, 39, 115, 43, 75, - -20, 39, 55, -116, 57, -88, 48, -51, 117, 45, 8, -123, 30, 65, 5, 23, 52, 90, -120, - 37, -13, -44, -88, 26, -81, 45, -9, 26, 122, -127, -126, -29, 67, -102, 68, 100, -22, -97, - 55, -39, -120, 56, 91, -94, -8, -28, 76, -117, 29, -45, 43, -78, 45, 43, 95, -108, 41, - 52, 115, 64, -111, -121, 87, 17, -24, 92, -73, 104, -84, -31, -73, 117, -68, -67, 107, -122, - -22, -19, 30, 124, 103, 105, -35, -37, 85, 91, 55, 42, -11, 87, 120, -72, 103, -80, -66, - -86, -81, -6, 66, -54, 69, -58, -1, 98, 39, -127, 53, 60, -111, 46, 69, 12, -123, 27, - -12, 117, 69, 78, 125, 126, 111, -85, 93, 41, 58, 68, -74, -117, -23, -108, 89, -60, -110, - 100, 58, -25, -95, -31, 51, -73, 55, 68, 85, -77, 106, -109, -71, 117, -81, 3, -29, 100, - -33, -83, 10, -19, 31, 77, 98, 2, -1, -115, 35, 124, -100, -6, -3, 20, 49, 44, 111, - 125, 116, 109, 30, -36, -69, -83, -64, 108, -100, -79, -101, -16, -98, 80, 78, 71, -96, -34, - 21, -105, -56, 111, -28, -60, 75, 71, -33, 110, 24, -41, 46, -5, -30, 95, -86, 32, 102, - -119, 103, -84, -82, 7, -29, -21, 26, 90, -80, -19, 44, 19, -99, -90, -6, 116, 105, -46, - 17, 71, -50, 19, 105, -19, 116, 52, -69, -57, -26, -42, 65, -85, -88, -116, 88, 101, 94, - 87, 73, -29, -40, -74, -91, 74, 46, 99, -57, -123, -17, 32, 88, -102, -62, 52, 85, -22, - -89, 118, -91, 119, -101, -70, 92, -103, 58, 16, -43, -56, 24, 78, -75, -102, 125, -110, -23, - -55, 33, -91, 123, -127, 54, 33, -102, -71, 29, -34, -58, 44, -84, 37, 72, 55, -81, 95, - 88, -19, 11, -80, -125, 11, 116, 120, -106, -30, 100, -87, 34, 34, 6, -45, 71, -124, 41, - 59, -107, 6, 109, -118, 86, 62, 30, 55, 6, 75, 19, 89, -98, -78, 66, -46, 46, -64, - 120, -64, -101, 51, 66, 91, 79, -5, 46, 50, 30, -20, -9, 4, 111, 76, 32, -47, -58, - 92, -22, 68, -3, -119, -18, 95, 71, 35, 92, 60, 107, -66, -112, 99, 103, 90, -119, -98, - -102, 68, 120, 76, -58, -74, -28, -61, -127, 98, -3, 123, -54, 25, -119, 53, -42, 10, -121, - -77, -12, 100, 13, -12, 8, 121, -12, -22, -47, 8, 95, -19, 58, 116, 99, 74, 33, -23, - 62, 22, 65, 95, 23, 84, 15, -54, -2, -88, 35, 77, -56, 74, 3, -34, -103, -24, -54, - 18, -85, 3, 106, -120, -76, -114, 61, -63, -36, 4, 110, -11, -97, -79, 24, -113, 96, 115, - -128, -106, 29, -106, -48, -48, 58, 4, 60, 43, -57, 41, 114, -6, 45, 48, 24, 48, 46, - -12, 99, 26, -112, 126, -121, 51, -80, 32, -72, 32, -107, 39, 16, 109, -13, -62, -25, 35, - -19, 74, 27, -106, 78, -29, 15, -14, -111, -45, 11, 51, -33, -121, -81, -103, 66, -22, 23, - 9, -65, -49, 34, 36, 44, -91, -13, -127, -49, -87, 31, 102, 126, -83, -75, -119, 110, -57, - -100, 80, -87, 104, -86, -47, -75, 16, -4, 52, 59, 26, 12, 124, 24, -120, -9, -18, -117, - 64, 111, 47, -102, -53, 119, 80, 62, 92, 31, 85, -32, -50, -39, 37, -29, -60, 9, -116, - -102, 0, 117, -18, -60, 83, -98, -56, 94, -61, 18, 112, -103, -118, 38, 16, -113, 115, 27, - 88, -37, 40, 62, -112, -89, 63, 79, 27, 78, -29, -36, -104, 50, -122, 103, 17, -98, 102, - 14, -2, 126, -99, -111, 41, -67, -25, -106, 107, 62, -107, 69, -22, -74, -89, 65, -123, -66, - -37, -29, -61, -43, 14, -45, -14, -61, 123, 97, -87, -98, 9, 89, -90, 38, -68, -14, -20, - 88, 37, 25, 65, -34, 47, 84, 76, 116, 32, 0, 83, 105, -60, 85, 21, -44, -9, 36, - 120, -68, 6, -95, -55, 22, -68, -128, 104, -96, -75, -63, -47, -87, 121, 125, 97, 37, -123, - 70, -105, 92, -47, 71, -80, -32, 108, 12, 93, 77, -89, 54, 34, -77, -31, -82, -41, -101, - 79, 92, -52, 87, 8, 25, 77, -23, -60, 105, 65, 78, 51, 93, -88, 79, -56, 7, -128, - 14, 56, 123, -84, -112, -19, 92, 23, 27, -8, 72, 75, 2, 67, -119, -111, -70, -118, -36, - -99, -122, -7, -11, 84, 125, 112, 34, 42, 18, -61, 46, 62, -46, 104, 50, 57, 94, 79, - 58, -54, 11, 77, 108, -63, 34, -87, -84, -11, -72, -20, 17, 82, -22, -15, -39, -19, 96, - 126, -90, -42, 40, -89, 73, 21, 14, 92, 74, -89, -124, -86, 102, 47, 53, 100, 44, -93, - 69, -121, -83, -17, -31, 97, -122, 79, -37, 65, -54, -36, -7, 55, -99, -12, 88, 68, -18, - -72, -33, -123, 44, 98, -109, 62, -65, 48, 97, -112, 16, 11, -11, -22, 92, 126, 97, -121, - 23, -40, -26, -107, -70, 43, -37, 31, -75, 23, 21, 119, 55, 19, -54, 29, -39, -5, -56, - 29, 42, -53, 83, 26, 1, -111, -78, 105, -73, 27, 66, -126, 121, 0, 37, -23, -14, 88, - 82, 36, -1, -4, -53, -19, 66, -95, 51, -40, 94, -48, 27, 29, -105, -48, -108, -76, -9, - -72, 119, -94, 119, -60, 82, -105, -53, -124, -12, 86, 98, 82, 120, -77, 120, 72, -90, 68, - 10, -31, -4, -116, -110, 96, 91, 57, 4, 1, -121, 20, -72, 84, 32, -7, 56, -92, -119, - 10, 16, -64, 52, -79, -120, -36, -57, 95, 112, -97, 121, 4, -24, 117, 85, 28, -27, -60, - -25, -92, 2, 76, -58, -64, -60, -91, 49, -61, -63, 26, -45, -127, -18, -51, 37, 26, 83, - 116, -16, 69, 117, -25, -74, 8, -36, 79, -34, 52, -94, 51, 30, 63, 18, 38, 21, 11, - -98, -126, 50, 39, 3, 95, 7, -26, -25, 54, 1, 41, 108, 66, 58, 25, -98, 33, -85, - 125, -30, 16, -99, 65, -102, 85, 53, -124, 100, 28, 114, -122, -82, 45, 118, 51, 64, -102, - -21, -43, 51, 41, -28, -91, 92, -52, -66, -29, 100, -41, 64, 5, -104, -81, 49, -15, -53, - 105, -14, -75, -97, -14, 13, 53, 48, 7, -23, -125, 9, 30, -28, 75, 33, 74, -126, 33, - -13, -54, -4, -25, 116, 64, -93, 46, -21, -110, 74, -45, -12, 49, -10, 91, 108, 23, -1, - 4, 66, -15, 114, 119, 69, 65, 64, 6, -17, 21, 13, -17, -7, -36, 48, 66, 104, -17, - -76, -84, 94, -56, 107, 80, 62, -46, 27, -52, 12, -1, 3, 25, 109, 73, 96, 69, -73, - -12, -103, 53, -21, -101, 20, 88, 1, 13, 89, 94, -31, 119, -74, -91, 52, -83, -40, 110, - -52, 27, 5, 107, 10, 46, -110, 28, 97, -91, 14, -10, 92, 117, 10, 4, -28, -116, -38, - 12, 26, -46, 54, 82, -120, -117, 78, -67, 112, -57, -108, 93, -62, -18, -17, 10, -116, -14, - 12, 71, 34, 81, 28, 70, 35, -51, -89, -41, 76, 109, -11, 20, -27, -97, 118, 50, -9, - -34, -23, -41, -72, 48, 114, -16, -26, 42, -82, 86, -39, -44, -82, -111, 34, 1, 106, 52, - 51, 69, 69, -103, 51, 126, 17, -100, -87, -18, 22, 104, 53, -65, -4, 91, -54, -118, -34, - 38, 84, -43, -39, 3, -25, -91, -62, 60, 119, -22, 43, 19, 103, 88, -91, 125, 109, 21, - -31, -108, -83, -7, -44, 63, -28, -79, -115, -15, -19, 64, 16, 122, -49, -15, -122, -40, 103, - 22, 112, -111, -68, 66, -13, -56, 111, 67, 126, 29, -124, 0, 41, 102, -28, -52, 124, -67, - -12, -23, -127, -36, -12, -31, -35, -26, -61, 77, 84, -10, -38, -50, 57, 12, -57, -14, 110, - -20, 81, -48, -96, -88, -65, 103, 95, -117, -81, 44, 45, 120, -36, 1, -88, -37, -62, 4, - -62, 108, 43, 67, -109, -46, 38, 89, 5, 33, 16, 115, 20, -33, -19, -89, 3, 125, 8, - -112, 2, 5, 104, 68, 89, 25, -66, 114, 107, 88, -16, 105, 101, -103, -93, -122, -66, 55, - -26, 87, 77, -49, -9, -110, 89, 106, -56, 37, 3, 63, 41, 46, -20, -76, 12, -55, 40, - 87, -59, -52, 42, 51, -80, -115, 39, -36, -59, 18, -9, 114, -2, -2, -49, 58, -100, 120, - 14, 36, 17, -33, -40, 105, 24, 1, -71, -77, 69, -47, -100, -79, 120, -109, 11, 58, 41, - 89, 14, -26, -90, -69, -2, 68, -59, 88, 108, -69, 80, -75, -108, 71, 101, 18, -34, -119, - 121, -99, -128, 120, -88, -9, -82, 38, -74, -3, -62, 91, -61, 87, -113, 32, -93, 82, -43, - -117, -127, 85, 51, -52, -51, 18, -95, 15, -81, -16, -56, 55, -120, -41, -19, 12, 101, -42, - -119, 74, 19, -78, 28, 101, 85, 93, 16, -2, 31, 70, 40, -110, 22, -109, 83, -95, -119, - 60, 30, -73, -24, 62, 81, -59, 45, 49, -14, 47, -54, -54, 73, 7, -30, -115, -74, 52, - 39, 65, -64, -54, -121, -74, -128, -109, -125, 126, -16, -39, 110, 60, 95, -128, -52, -52, -17, - 18, 115, 20, -40, -16, -104, 45, -111, 46, -85, 87, 119, -97, 95, -55, 123, -12, -22, -73, - -6, 68, 107, -98, 0, -67, 47, -1, -30, -19, -107, 3, 119, -118, 75, -51, -63, 102, -15, - 120, 85, -108, -95, 43, -106, 86, 62, -123, -81, 34, -55, -118, -51, -49, -73, -46, 54, 19, - 53, -122, 26, 45, 39, -12, -3, -71, 60, -7, 29, 26, -72, 48, -66, 81, 50, -79, 12, - 26, 0, 38, 24, 115, -72, 81, -23, -115, -96, -9, 120, -28, 12, -6, 105, 116, -67, 89, - -113, 87, -94, -69, 95, -127, 65, 90, -93, -39, 65, -106, -71, 31, 105, -89, -19, 29, 120, - -20, 47, -127, 104, -75, -121, -42, 21, 124, -93, -113, -120, 36, -5, -23, 28, -110, 62, -107, - 17, 93, -26, 109, 4, -4, 91, 115, -84, -114, -111, 106, -122, -127, 90, -3, -55, -99, -31, - -93, -79, 57, -115, -112, 0, -76, -84, -8, 70, 31, -12, -19, -19, -5, -19, 70, -15, 50, - 78, -41, -65, 112, 39, -120, 42, -76, 11, 114, -52, -15, 57, 104, -84, 118, -118, -82, 70, - -23, 96, 68, -7, 94, -20, -99, 87, -80, -23, -73, 34, -112, 113, -54, -39, 41, -42, 42, - 48, 4, -70, -35, 111, 117, -29, 94, -83, 22, 84, 75, 10, -52, 52, 42, 7, -121, 103, - -77, 64, 51, 118, -7, -105, 16, -31, -70, -85, -9, -116, 75, -77, 31, 100, -119, 105, -7, - -54, -85, -102, -8, -77, -63, -40, 111, -86, -123, 119, -43, 21, 29, -53, -19, -27, -86, 83, - -125, -25, 87, -102, -125, 46, -23, -112, 91, -18, -106, 104, 26, 32, -5, -118, -119, 108, 88, - -46, 41, 76, 44, -45, 60, -21, 105, 90, 18, -124, 76, -15, -87, -74, 90, -94, -14, -28, - -122, -42, 94, -16, -122, 66, 19, -27, 48, 6, 31, 110, -40, 11, 119, 84, -90, -95, -4, - 15, -27, 41, -128, -101, 10, 16, -26, 69, -71, 71, -25, 5, 38, 0, 109, 110, 78, -80, - -93, 60, -21, -25, 27, -53, 118, -96, 59, -7, -68, 61, -92, -53, -122, 49, 117, -6, -103, - 73, -62, 63, 10, 85, 93, -121, 72, 116, 116, -53, 80, 10, 41, -57, -103, -83, 42, 13, - 101, 66, 110, 11, 86, -76, -97, -99, 75, -64, 32, -21, 7, 3, 49, 54, 12, 5, -51, - 41, -70, -120, 50, -124, -11, -86, -84, 107, -66, -112, -4, 119, 24, -62, -60, -97, 31, -102, - -55, -105, 18, 18, 5, -123, -125, 93, -75, -70, -84, 56, -5, 34, 39, 76, -70, -4, -77, - 78, 72, -105, 89, -96, 112, 70, 100, -43, 41, 5, -40, 121, 27, 58, 81, 99, -43, 126, - 24, 43, 54, 38, -78, -22, 34, 36, 101, -36, -45, 96, -48, -81, -41, -77, 41, 21, 109, - 66, -15, -8, 64, 118, -30, 63, 14, -71, 104, -120, -82, -82, 49, 26, 25, -118, 2, -53, - 101, -60, 21, -62, -51, 1, -27, 64, -71, 18, 67, -116, -94, -64, -3, 41, -11, -103, -109, - -99, -7, -18, -8, 48, -5, 117, 44, -72, -98, -62, -57, -54, -64, -69, -53, -12, -48, -6, - 36, 122, -20, -78, 10, 99, -54, -100, -88, -127, 31, 15, 72, 40, -106, 111, 121, 2, -94, - -114, -103, -23, 91, 20, -120, -27, -94, -25, -126, -64, -19, 115, -32, -10, 95, -64, -123, 57, - 4, 63, 110, 84, 16, -114, 4, -32, 86, 27, -5, 84, 75, 37, -32, 100, -118, 118, 103, - -96, -14, -36, -106, -104, 78, 99, 119, -12, 54, -53, 102, 25, 64, -102, 51, -88, -99, -49, - 102, 90, 97, 5, -1, 28, 96, 24, 7, -89, 54, -88, 2, -96, -94, 54, 90, 83, 39, - 42, -77, -73, -124, 58, 59, 83, 92, -112, 54, 111, -42, 114, 93, -16, 46, -39, -77, 21, - 67, 80, -59, 119, -38, 119, -54, -99, -52, -101, -112, -25, -7, -20, -99, 28, 118, 1, -85, - 44, -50, -42, -44, 122, 108, -48, 54, 9, -58, 110, 30, 22, 121, 65, 0, -25, -82, 113, - 3, -63, 103, 54, 113, 26, -25, -123, -73, 81, -121, -85, 20, 120, -127, -18, 28, -115, 24, - -23, 59, -128, -48, 118, -61, 118, 63, 107, 65, -75, 48, -65, 45, -56, 15, -101, 56, 11, - -42, 83, 0, -54, -37, 9, -35, -28, -41, -61, -96, 90, 28, -3, 71, 115, -112, -61, 60, - 34, -73, -17, -50, 85, 10, 51, 92, 113, 62, 63, 118, 9, 44, -124, 0, -116, 37, 58, - 67, 108, -63, -66, -3, -7, -127, 51, -60, -112, 40, 2, -112, -45, -105, 94, -110, -45, -75, - -42, 76, -99, -4, -26, -28, 123, 12, 38, 114, -88, 31, 113, -118, -36, 21, -18, -119, 31, - 100, 2, -120, -57, -64, -99, 71, -73, -23, 120, -114, 33, 78, 53, 14, -44, -8, -56, -83, - -9, -34, -114, 125, -125, -22, -40, -121, 63, 11, -118, -111, 121, -3, -54, -73, 11, -5, 13, - 7, -71, -34, 124, -85, -109, 119, -104, 24, 120, -88, 78, 65, 95, 95, 82, 13, -76, 119, - 106, 123, 104, 95, -11, 92, -37, 42, 32, -124, 57, 109, -30, 98, 39, -116, 80, -125, 120, - 4, 105, 119, 71, -91, 122, -50, -82, 103, -48, 65, -124, -101, 5, 36, 44, -75, 63, -76, - -74, 34, 86, -30, 8, 83, 105, 38, -55, -35, -7, -72, 67, -84, -85, 28, 17, -118, 100, - 16, -32, 1, 56, -34, -37, -58, 17, -90, 8, -120, 40, 105, -67, -107, 125, -75, 23, -96, - 15, -14, -67, 51, -74, -83, 43, -126, -10, 5, 88, 65, 60, 61, 94, -128, -48, -107, 42, - -47, 79, -97, 89, -66, 3, -25, -41, 68, 54, -41, -85, 68, 58, -99, -115, -61, -15, -22, - 0, -111, 64, 121, -71, 101, -109, -128, -2, -94, 30, -118, 84, 126, 23, -83, -43, -40, 115, - 11, 40, -22, -57, 26, -82, 12, 34, -100, 82, 62, -85, -28, -24, 98, -25, -73, -110, -123, - 59, 114, 72, 80, 96, -93, -109, 63, 38, 90, 64, 113, 85, 30, -92, 95, 2, 30, 51, - -112, 121, -93, 80, -107, 94, 3, 96, -106, -54, -122, 63, -27, -51, -37, -32, -128, 32, -29, - 121, -117, 30, 7, -37, -115, 48, 119, -14, 48, -70, -70, -68, -74, 82, 91, 14, 10, 126, - -100, -55, -36, 98, 79, -56, -21, -63, 2, 63, 31, 37, -74, -103, 55, -53, -82, -40, -102, - 3, 108, -119, -114, 105, -113, -69, -110, 67, 37, -117, -33, -49, 58, -114, 79, -21, 86, 125, - -111, -25, -8, 24, 53, 60, -91, 113, -26, -83, -114, -87, 99, 38, 104, -34, 13, -104, 18, - 61, -47, -114, 47, -29, -102, -27, 10, 88, -117, -127, -70, -117, 41, 41, 16, 74, -85, -107, - -2, 114, -102, -99, -91, 32, -97, 15, -106, 83, 97, -105, 120, 80, -122, -21, -58, 5, -55, - 66, -25, 108, -39, 98, 73, 105, -122, -114, -77, 121, 87, -7, 108, 62, -71, -108, -2, 77, - -14, -17, 56, -61, 95, -60, 37, -15, 124, -34, 108, 114, -118, 71, -110, 8, -29, 95, 10, - -33, -125, -117, 1, -55, 15, -72, 2, 96, -114, -51, -15, -91, 62, 1, -6, 85, -70, 47, - 63, -49, 118, -49, -112, -16, -75, -18, 19, -29, -56, -43, 92, -15, 48, -99, -76, -13, -16, - 98, 124, -18, 83, 24, -23, -18, 70, -90, 27, 73, -70, -18, -97, 41, 23, 49, -50, -75, - -104, 36, -80, 111, 22, 51, -117, 28, -49, 92, -128, -57, -96, 22, 109, 40, -88, 70, 121, - -47, 87, -8, -58, -89, 0, -95, 117, -64, -103, -60, -59, 52, 85, -103, -35, 0, 79, 12, - 36, -80, 75, 41, -71, 17, 79, -26, -36, 78, 7, 91, -128, -87, -10, -5, 125, -111, -98, - -116, 67, -7, 36, -99, 22, 42, 109, -48, 123, 119, -52, -17, 20, -71, 55, -13, -43, 59, - -111, 95, -19, 122, -12, -91, -112, 11, -33, -62, -4, -1, -34, 70, -8, 14, 31, 75, -65, - -105, 37, -123, -55, 79, 6, 53, 0, -99, -101, 96, 70, 44, -28, -120, -6, -75, -100, -100, - 64, -113, -39, 122, -12, -56, -121, 12, -110, -90, -75, 69, 78, 73, 27, -29, -1, -66, 117, - 55, -103, -22, -83, -103, -58, -89, 11, -81, -119, 53, 66, -41, 71, -9, 17, 66, -65, 60, - -94, 30, 52, -109, -109, 38, 111, -34, -36, 34, -18, -88, 51, 103, 36, -85, 52, -83, -65, - -120, 28, -122, -61, 4, 41, -9, -36, 85, 2, 95, 75, 29, -91, -72, -13, -69, 41, -117, - 108, -103, -3, -48, 45, -60, -50, 77, 5, 13, 121, 74, -68, 83, -45, -124, 20, 35, -32, - -125, 10, -116, -91, 59, -81, -14, 86, -1, 40, -52, -80, -83, -91, -82, 54, -83, 120, 44, - 37, -68, -90, -36, 31, 53, 84, 81, -6, -114, -72, 97, -68, -99, -89, 60, 20, 81, 28, - -91, 69, -1, -124, 24, 81, 19, 45, -48, 6, 67, -16, -61, -66, -90, -122, -45, 24, -26, - 36, -117, 52, -56, 1, 74, -19, 88, -34, 72, -68, 33, 20, 105, 75, -102, 49, -29, -20, - -37, -76, -109, -1, 23, -1, -99, -86, 40, 118, -9, -125, 54, -4, 91, -112, 83, -96, -92, - 96, 100, 66, -94, 114, -119, 65, -43, 85, -115, 36, 42, -54, 11, 88, -85, 94, -3, -107, - 22, -66, -52, -74, -8, 3, -6, -72, 38, -124, 1, 15, -119, 116, 108, -79, -121, -92, 80, - 13, -2, -34, 96, -35, -38, 89, -84, 106, 104, -13, -79, 71, 10, -97, -54, 115, -3, 108, - 57, 73, 63, -112, 93, -44, -128, -48, -75, -34, 76, 33, 117, 51, -27, 108, 8, -9, 8, - -27, 38, -18, -54, 8, 118, -19, 87, 63, 12, -73, -56, -50, -52, -24, -80, -76, -91, -32, - 57, -65, -53, 119, 11, 124, -104, -59, 104, 107, 120, -63, -34, 65, -96, -23, -33, -55, 41, - -61, -28, -66, -39, 79, 112, -9, 44, 82, 20, 4, 69, -107, 79, -51, 41, 95, -13, -101, - -112, 74, 16, -14, 119, 40, -44, 36, -113, 55, 98, 117, -12, -51, 114, -125, -76, -2, 24, - -124, -58, 74, 120, 112, -88, 106, -57, 75, 70, 68, -101, 13, 44, 44, -11, -59, 90, 125, - 12, 11, -34, 85, 82, 57, -41, 65, -36, 120, -96, 117, -45, -94, -82, -1, 120, -9, 85, - -68, -117, 69, 51, -14, -39, -72, 114, 72, -44, -48, 31, -109, 26, -4, -96, -34, -56, -81, - 10, -38, -61, 94, 70, 80, -18, -44, -75, 121, -10, 88, 104, -110, 8, 86, -104, -86, 100, - -27, -118, -12, -81, 17, -127, -16, 57, -76, -44, -59, -32, -120, 99, -32, 70, -41, 43, 115, - 14, 12, -49, -11, 37, -78, 8, 84, -26, -34, 123, 96, -25, 126, -117, -100, -7, 30, 3, - -76, -98, -87, -37, 80, -68, 60, -73, 100, -68, 13, 44, -93, 9, 103, -16, -31, -95, -25, - 38, -28, 114, -88, -112, -67, 53, 64, 104, -44, 37, 60, -123, 116, -103, -79, 89, 121, -107, - 114, 12, -102, -57, 77, 100, 99, -23, -119, -59, 47, -83, -31, 18, -76, 108, -18, -106, 19, - -66, 16, -50, -94, -34, -43, 102, 115, -79, -18, -79, 39, 100, 89, -97, 65, -18, 6, 85, - 58, -109, 10, -116, -13, 82, 114, -51, 116, -1, 112, 116, -7, -48, -33, 72, -102, -106, -10, - -67, -10, -104, 102, 108, -90, -90, 87, -80, 45, -38, -98, 89, -126, -40, 96, 74, 29, -101, - -59, 61, 81, -92, -73, 62, 21, 89, -65, -52, -8, -22, 96, -3, -16, -40, -72, -53, -43, - 42, 24, -39, -56, -127, -32, 10, 111, -76, 99, 79, -4, 26, 111, -11, -100, -63, 105, -25, - 95, -88, 61, 106, -30, -85, 101, 60, 103, 90, 37, 104, 44, -31, 92, -92, -20, 93, 10, - -19, 1, 39, 91, -105, 15, 26, 23, -109, 28, 96, -80, 118, -61, -39, -37, 112, -43, 32, - -31, 32, 112, -95, -55, -20, -113, -116, 39, 82, -40, 46, -127, -87, -67, 30, -17, 71, -126, - -117, -46, -28, -74, -83, 44, -11, -50, 31, -38, -13, 5, -33, -42, 30, 81, -33, 93, 26, - 46, 53, -123, -35, -10, 97, 117, -68, -6, -50, 35, 74, 52, -62, -81, -62, 42, -48, 19, - -37, -86, -71, -96, -60, -79, -37, 61, -120, 57, -1, 98, -13, 86, 0, 66, 67, -6, 39, - -86, 125, -96, -77, -24, -111, -70, -29, -18, 88, -16, 33, 92, 46, -35, -16, -93, -23, 63, - 88, 24, -56, 118, -64, 51, -64, 50, -48, 48, 7, -25, -13, 69, -84, 109, 43, -84, 74, - -64, -23, 86, 93, 65, 71, -27, -122, 22, 4, 74, 3, 103, -7, 67, 59, -59, -30, -60, - -45, 102, 63, 94, 24, -111, 19, -15, -13, -58, 6, -54, -115, -57, 19, -71, 124, 101, -38, - -127, -40, -112, 21, -40, 17, -12, -1, 116, -89, -31, 54, -72, -60, -89, 81, 12, 99, 45, - 42, -91, 121, -109, -53, 126, 13, 120, 43, -87, 83, 116, -128, 107, 84, -113, 24, -30, 109, - -13, -28, 107, 74, -32, -34, 108, 24, -46, 15, 125, -53, 114, -114, -79, 112, -92, -93, -124, - 126, 17, 83, 36, 97, 64, -36, 17, -47, 3, 60, 11, -118, -67, -74, -59, 57, -40, -87, - -64, -83, 104, -98, -49, 15, 30, -3, -118, -74, 69, -88, 51, 106, -38, -127, 59, 65, 5, - 87, 18, -73, 120, 112, 16, -35, 118, -99, -113, 118, -112, 117, 39, 89, 1, 82, 107, 67, - -91, 112, -91, -120, 36, -87, 15, -33, -80, 110, 65, 50, -99, -123, -80, -11, 27, 47, -67, - -33, -116, -47, 107, -124, -5, 117, -18, -12, 119, 105, -90, 20, -107, -74, -66, -92, 73, 92, - 94, -128, -14, -19, 115, -114, 105, -96, 19, 123, -81, 28, 125, 84, -14, 126, -103, -111, 17, - -122, 62, -95, -87, -80, -114, -42, -17, 12, -68, 37, 40, -73, 104, 27, -59, -113, -116, 111, - -51, 53, 39, 107, 3, 19, -3, -85, -102, -17, 125, 69, 77, 27, -60, -104, -86, -41, 91, - 57, 85, -113, -115, -46, -93, 105, -8, 18, -118, -71, -91, -48, -46, -14, 7, 20, -66, 1, - 48, 33, -6, -29, 98, -14, 13, -88, -106, 103, 2, 118, 67, 28, -58, -81, -61, 32, 92, - 83, -106, -77, 7, -81, 84, -8, 86, 3, -68, -126, 61, 60, 11, 126, -104, 122, -60, -61, - -81, -28, -77, 103, -120, 0, -20, -82, 120, 102, 105, 56, -72, -34, 40, -42, 104, -109, -7, - 87, -87, -93, 76, -51, -125, 75, 9, -122, -85, -111, 121, 20, 4, -125, 7, 117, 63, -11, - -68, 22, -83, -3, 84, -109, 23, -112, -87, -17, 55, -60, 64, -37, -16, -40, 96, -77, -16, - -80, 12, -101, 83, 7, -59, 74, -22, 41, 16, -42, -124, -97, -76, -15, 45, 94, -103, 72, - 124, -110, 71, -64, -92, -80, -113, -45, 97, -98, 103, -120, 118, -87, -19, -52, 105, 12, -63, - 47, -14, 15, -51, -75, -12, -14, -83, 64, -18, 7, -68, -21, -26, 55, -125, -52, 83, -12, - 74, 33, 79, -10, 100, 85, 86, 32, -15, -110, 0, -84, -119, 102, 30, -50, -83, -5, 102, - 54, 69, -55, -94, 103, -118, -121, -3, 112, 90, 10, 116, -94, -51, -57, 85, 24, -61, -107, - -111, 39, -91, 49, 99, -40, 89, 117, -29, -39, 30, 26, 112, -127, 118, -61, 44, 88, 33, - -127, 54, 124, 27, 4, -89, 107, -13, -86, 73, 25, 61, 88, 79, -12, -102, 90, 32, 95, - -26, -1, -121, -116, 28, 78, -115, -112, -47, 32, -57, 92, 5, -24, 15, -8, 111, 110, 71, - 44, -61, -4, -85, 43, 10, 8, -101, 50, 56, 62, 72, 112, -1, 11, 111, 40, 106, -42, - 4, -41, 85, 54, -87, -15, -109, 74, -72, -109, -70, 115, -4, -97, -27, -2, 21, 17, 74, - -41, 35, -32, 89, 70, 26, -111, 122, -45, -107, -76, -110, -14, -45, -11, -27, 53, 4, -25, - -52, -62, -39, 53, -62, 117, 51, 27, 77, 91, -23, 28, -81, -70, -81, 71, 39, 98, 31, - 99, 27, -69, -82, -97, 53, -21, 41, 30, -96, -16, -83, 62, -3, -68, 113, 49, 70, 54, - -98, 59, -101, -104, 42, -61, 9, -39, 75, -126, 51, -67, -112, -50, 62, -51, 49, -45, 10, - -102, 62, 75, 80, 120, 17, 9, -93, 76, -8, 110, -126, 110, 11, 39, 58, -81, 71, 72, - -44, -35, 98, 105, -92, -92, -101, -28, -116, -70, 98, -110, -15, -23, -78, -110, 106, 13, 108, - 75, 36, 116, -17, -79, 54, -14, -125, -56, -7, -3, 113, -55, -83, -105, -18, -7, 54, -52, - -83, 84, 124, -83, 124, 50, -33, -46, -69, 17, -55, -31, 111, -120, -100, 64, -78, -127, -73, - -1, 29, 119, -24, 105, -55, 120, -59, -35, -125, -60, -29, -120, 44, -60, -87, 64, 58, -119, - 117, 36, 28, 47, -104, 84, -15, -105, 23, 107, -39, -63, -119, 53, -99, 25, -27, 52, 64, - 49, -4, -31, -2, 118, 5, -23, 20, -60, 113, 17, 32, -103, -13, 117, 65, 98, 58, 7, - -47, 45, -31, 91, 98, -91, 111, 82, 99, 7, -48, -45, -43, -40, 61, -98, 66, 92, -37, - -40, -23, -43, -33, 26, -53, 118, 73, 119, 114, -94, 54, 58, -37, -112, 18, 4, -100, -62, - 72, -32, 16, 102, 17, 57, -28, -4, -111, 19, 73, 39, 53, -13, 93, -60, -13, 123, 54, - 45, -66, -83, -15, 98, 119, 76, 98, 37, 45, 83, 48, -82, -113, -96, 68, 51, 33, -30, - 81, 74, 121, 119, 48, -76, -7, -3, 100, -95, -37, -72, -28, 35, 84, 55, -83, -98, -89, - -74, -13, -63, 112, 33, 29, -19, -85, 82, -70, -61, -91, 23, 39, -77, -59, 12, 73, 4, - -100, -62, -40, -13, 67, -86, -78, -59, -83, 124, -118, -117, -110, -47, 51, 58, -30, 113, -69, - 28, 98, -81, -66, 104, -79, 113, -105, -98, 100, 100, -66, 110, -7, 120, -28, 13, -57, -112, - 30, -29, -84, -18, 97, -114, 6, -72, -8, 58, 115, 8, 53, 107, -92, 15, 12, 88, 57, - 13, 69, -6, -38, 44, -7, -48, -119, -69, -66, -63, 22, 54, -11, 121, 51, 109, -125, -43, - 91, 70, -101, 62, 116, 6, 102, -67, -88, 84, -28, -118, -86, -101, 6, 58, 115, -113, -82, - 100, 63, 17, 55, -84, 111, -29, 103, 14, -116, 11, 106, -83, 80, -27, 22, -123, 14, 19, - -24, 85, -30, 87, 125, -117, -127, -85, -32, 5, 53, -20, 74, 25, -74, -86, 37, -92, 12, - -127, 107, 50, -30, 18, 37, 29, 60, 107, 10, -69, 90, -92, -58, 108, 107, 105, 87, 44, - -87, 26, -118, 87, -111, 58, 67, -37, -53, -56, -78, -81, -125, 80, 99, -128, -52, -67, -35, - -81, 42, -119, -75, 13, 45, -62, 7, -127, 123, -60, -10, -127, 33, 39, 16, -59, -92, -59, - -38, 14, 69, -70, 55, -8, 58, -23, 19, -70, -18, -31, -113, 109, 27, 109, -118, 62, -86, - 92, 73, 46, -95, -60, 8, -81, 70, 56, 92, -22, 114, -116, 98, 41, 6, -42, -110, 55, - -110, 37, 81, 59, 126, -46, -86, -74, -27, -70, 106, -51, -5, -55, -10, -31, 79, 1, 115, - 54, 6, -124, -116, 92, 9, 39, 12, -38, 124, 105, -13, 118, 126, -78, 5, -104, 118, 3, - -109, 98, 121, -93, 57, -71, 90, 111, 86, -76, 88, 31, -112, -120, -34, -105, 78, -65, 23, - 114, -72, 33, -41, -25, -36, -94, -42, -97, -109, -11, 47, -76, 23, -63, -62, -21, -45, 3, - 38, 84, 10, -2, -10, -111, 24, 20, 68, -121, 103, 51, 25, -54, -128, -95, -28, -114, 48, - -18, 119, -34, 15, -126, 121, 48, 8, -82, -65, -77, 75, 124, -123, 7, 22, 37, 72, -82, - -79, -60, -103, -85, -39, -72, 52, 109, 69, 85, 57, 14, 77, -74, 52, -104, -26, -121, -64, - -15, -20, -24, 9, -18, -73, 73, -70, 24, -34, -123, -43, -77, 114, -42, 85, -111, -55, 60, - 62, 13, -56, -116, -37, -85, 114, 96, 93, -21, -67, 83, 60, 119, 20, 47, 25, -101, 35, - -67, -37, -16, -57, 116, -128, 49, -22, 68, -40, 53, -93, 59, -24, -95, -29, 109, 51, 5, - 64, -37, -78, -21, -67, -1, -25, -83, 57, 76, 18, -24, -97, 68, -3, 44, 53, -82, -47, - 44, 95, -96, -52, -67, 18, -72, 61, -101, 97, 6, 9, -23, -77, 91, -74, 9, -48, -85, - 77, 64, 65, 39, -115, -94, -15, -102, -28, -83, 28, 108, -19, 119, 113, -74, 95, -93, -123, - 7, -24, 37, 50, 59, 39, -94, -78, 29, -32, -65, 118, 118, 120, 25, -103, 66, -84, 50, - 75, 103, -42, 4, -76, -14, 90, 38, -77, -19, -111, 18, 64, 8, -73, -78, 84, -28, 64, - -114, 17, 39, 94, 55, 32, 98, -32, 94, 3, 80, -102, -41, -48, -4, 23, -72, 18, -90, - 90, 91, 35, 2, 0, 51, 6, -101, -60, 28, -63, -87, 85, 47, -72, -86, 107, 10, -91, - -41, -1, 62, -32, 88, -92, -14, -98, -86, 45, -24, -6, -61, 90, 105, 43, 96, -55, 49, - 113, 5, 63, -113, -34, 76, 67, 87, 2, 85, 36, -2, -41, -36, -2, -6, 0, -76, -67, - -46, 61, -53, 94, -78, 23, -61, -76, 12, -84, 36, -28, 60, -74, -91, -94, -128, 66, 68, - 120, 83, -58, -52, 89, -59, 125, -72, 102, -113, -29, -30, 126, 107, -118, -72, 49, -64, 47, - -113, -70, 53, 118, -13, -105, 94, -128, 110, 103, -105, 51, 111, 109, -37, 84, 59, 83, -3, - -61, 61, 36, -21, 81, -105, 13, -81, 21, 25, -102, 8, -104, -61, -4, 104, 119, -49, 28, - -127, 37, -16, 101, -97, 21, 22, 53, -103, 47, -113, -64, 122, -60, -71, -30, 50, -80, -52, - -78, 77, 88, -52, 124, 49, -47, 5, 61, 7, -127, 6, 76, 85, 96, -126, 36, -87, -79, - -94, 29, -110, -72, -94, -61, 121, -91, -109, -73, 68, 92, 44, -107, -73, -58, -67, 66, 77, - 84, -126, 79, -3, 61, 99, -9, -106, -13, 27, 98, 0, 103, -92, -101, 52, -44, -115, 109, - 105, 115, -77, -112, 119, 3, -84, 18, -62, 69, -67, 76, 53, 31, -123, -89, -123, -125, -125, - 97, 8, -69, -93, 59, 20, -65, -122, 90, -65, 44, -76, -93, 125, -109, -57, 126, -87, -80, - 101, -29, 125, 0, -54, 122, -122, 78, 98, 93, 116, 0, -72, -12, 28, 37, -34, 120, 84, - -83, 71, -123, 71, -31, -47, 107, 15, -81, -14, 67, 78, -111, -4, -1, -2, 101, 6, -48, - -18, 15, -36, -114, -82, -9, -94, 100, -10, -11, -92, -3, 74, -108, -50, 38, -37, -76, 64, - -23, 83, -10, -112, 111, -72, -115, -97, 104, 3, 41, 4, 8, -24, -10, -14, 46, -91, 85, - 83, 78, -24, -39, 56, 111, -120, 19, -56, -101, -68, 62, 10, 68, 97, -102, -23, 39, -116, - 65, -117, 28, 73, 120, -89, -53, 20, 80, -50, -128, 48, 80, 4, -11, 9, 123, 15, 112, - -76, -75, -41, -7, -120, -97, -16, -54, 106, -88, -117, 22, -39, 104, -103, 23, -3, 21, -38, - -3, -97, -6, -14, -78, 36, -6, 86, 92, 11, 88, -79, 30, -86, -32, -85, 56, -124, -38, - -23, -19, 68, 116, 123, -45, -56, -7, 92, 122, 83, 6, -51, -41, -101, -27, 77, 37, -72, - -124, 101, 86, -6, 97, 65, 113, 44, -115, 47, -2, 38, 36, -90, -111, 19, 120, 57, -71, - -64, 125, 121, 40, -114, -45, 116, 43, -25, 18, 77, 108, -72, 44, 43, -105, 17, 99, -55, - 116, -85, -60, -106, 102, -39, -38, 112, 74, 34, 2, -108, -63, -109, 22, 112, 124, -4, 66, - -7, -95, -82, 42, 113, 6, -8, -115, -23, 71, -17, -101, -22, -12, -107, 58, -1, 93, -58, - -88, -107, -12, 104, -126, -26, 114, -27, -114, -50, -101, 17, -110, -12, 69, -84, -86, -99, 57, - 113, -44, -2, 47, 126, -121, -45, 14, 50, -68, 78, 35, 114, -55, -88, -83, -37, 96, -61, - -93, 40, -87, 26, 44, 21, 102, -86, 27, -66, 80, 8, -59, -9, -84, -41, 91, 25, -116, - 25, 75, -31, 20, 75, 30, -45, 87, 83, -17, -9, 90, -31, -106, 11, 104, -4, -85, -55, - -59, 97, -30, 79, 112, -127, 82, 65, -57, -99, -108, -51, -34, -92, 66, -37, 98, 21, 60, - -63, 107, -52, -87, 33, -60, 43, 15, 62, -109, -26, -91, -11, 42, -88, 35, -17, 7, -72, - -52, -30, -65, -12, -2, 70, -60, 14, 118, -124, -3, 23, -126, -86, 25, 73, -43, -99, -47, - -47, -20, 37, -32, -2, 125, -96, -106, -96, 55, -10, 90, 68, 105, -7, -113, -10, -120, -29, - -41, 93, 30, 90, -19, -61, -91, 47, -116, 35, 7, -42, -76, -72, -91, 81, -103, -73, 108, - -12, -76, 85, 112, 18, -109, 89, -73, 101, 67, 32, 48, 37, -108, 55, 90, -97, -2, -56, - -86, -110, -83, -54, 23, -80, 96, -100, -78, 78, 43, -39, 95, -122, 104, 21, 30, 43, 34, - -128, -127, 30, 80, 58, 50, 59, -124, -92, 84, -118, -76, 125, 42, 97, 89, -9, -74, 38, - -12, -69, 126, 38, 10, 7, -57, -113, 92, 20, -41, -24, -62, -34, 28, -31, -103, -19, -99, - 36, -4, -9, 23, -59, 94, 75, 1, 39, -110, -111, -120, -15, 96, 10, 106, -93, 114, -106, - -100, -77, -111, -70, 46, 40, 35, 76, 87, -106, -34, 29, -76, 122, -82, -123, 102, -71, 5, - -60, -113, -77, 99, -10, 112, 84, 5, 98, 114, -20, -10, 16, -128, 110, -49, -15, 84, 31, - 57, -60, -65, -35, -115, -105, -125, -12, 95, 14, -53, -113, -94, -18, -12, -76, -12, 72, -113, - -26, 114, 96, -29, 58, 116, 65, 18, 52, 73, 4, 83, -9, 18, -110, 54, 61, 111, -62, - 14, -5, 82, 26, 30, 60, -13, -9, -13, -103, 42, -113, 96, 61, -41, -42, -35, -85, -67, - -100, 28, 91, -119, 3, 28, -20, -82, -49, 63, 111, 47, -11, 99, -121, -113, -73, -60, -33, - -3, 123, -11, -31, -71, 119, 11, -101, 44, -53, -71, 49, -105, 42, -76, -113, -128, 72, -14, - -89, -100, 11, 119, 30, 123, 114, -94, -90, -55, -46, -97, 102, 62, -98, -6, 68, -127, 39, - -52, -35, 75, -93, -67, -12, 27, -9, 118, -83, 120, -13, -15, 111, -91, 53, -118, 90, 126, - 8, 124, -45, 74, -83, -63, -77, -90, -79, 102, 114, -46, 2, 75, -91, 20, 34, 96, -127, - -43, -11, -60, 15, 34, 63, 114, -73, 60, 75, -94, -27, -113, -103, 19, 20, -41, 75, -5, - 20, 56, 35, 16, -98, -35, -71, -108, -43, 25, 91, -66, 0, -55, -39, 66, 114, -117, -114, - 14, -110, -20, -75, 65, 69, 4, -83, -49, -74, -93, -88, 38, 43, 46, -75, 6, 34, 108, - -54, 60, -98, 87, 61, 15, -30, 113, 62, 45, -43, 17, -126, -22, 19, 118, 18, 51, -114, - 47, 74, -74, -111, 105, 32, -92, 70, -90, -5, -5, -90, 6, -118, 37, 39, 14, 37, -95, - 123, 99, 22, 44, 89, 24, -20, -31, 56, -83, 41, 34, 116, 49, -110, -31, -37, -99, 78, - -92, -62, 15, 28, 45, -46, 56, -2, -48, 56, 123, 33, -95, 66, 28, -93, -7, -69, -66, - -70, 19, -6, 65, 29, 11, 106, 78, -39, 60, -56, -115, 28, 42, -13, -37, -76, -111, -3, - 45, 90, -80, -110, -16, -91, 100, 8, 77, 109, 60, -17, 34, -109, -110, -29, -108, 36, 22, - 107, -56, -125, -48, -97, -125, 25, 39, -75, 110, 76, 70, -98, -80, -44, -17, -105, -41, -114, - -72, 11, -22, 34, 34, -104, -104, 76, 53, -62, -66, -44, -12, 90, 74, 122, -46, -18, 51, - -35, 99, 77, 95, 56, -32, 24, 32, -40, 106, -53, -83, -65, 40, 105, -62, -72, 94, -1, - 52, 107, 58, 61, 104, -10, -14, -109, 0, 11, -69, -60, 125, -67, -31, -14, -43, 61, 36, - 69, 19, 18, -5, -15, 34, -113, 18, -4, -28, 96, 119, 118, -96, -22, 13, -127, 110, 122, - 48, -111, -38, 76, 32, 93, -72, -36, -23, -124, -25, -122, -11, -38, 97, 19, -126, -15, 59, - 2, -69, 94, 5, -102, -23, 75, -98, -29, -77, -99, 91, -117, 58, 103, -74, -108, 30, 23, - -89, -95, 119, 98, 34, 50, 44, 63, 77, -53, -49, -7, -85, -57, 73, -94, 83, -91, -66, - -17, -28, 26, -71, -20, 104, 91, 8, -48, -70, -31, -106, 54, -97, 52, -39, -109, 82, 106, - -90, -108, 69, 18, 40, -32, -37, 106, 109, -87, -82, 20, -109, -30, 26, 16, -10, -100, 104, - -72, 86, -118, 24, 97, 115, 21, 31, 93, -120, 52, 2, 38, -128, -11, -83, -90, -31, -20, - -28, 50, -111, -69, 121, -12, 30, -86, 1, 13, 122, -48, 51, -13, 42, -109, -105, 80, -127, - 29, 97, -78, -111, -108, 32, -28, -28, 87, -18, 4, 80, -26, -16, -12, -92, -46, 93, -112, - 106, -66, -23, 116, -29, -24, 14, 48, 124, 120, -89, 9, -104, 59, 80, 11, 19, -109, -109, - 26, -31, 40, 63, -46, -27, -62, 78, -49, -90, -126, 109, -57, -109, -123, -30, -65, -53, -25, - 48, 12, -118, -34, 3, 82, -85, -96, -4, 49, 78, -1, -56, -18, -105, -102, 125, 19, -125, - 32, -54, -108, -6, -72, -16, -106, 93, 27, 74, -25, 80, 68, -119, 84, 5, -84, 119, 11, - 119, -36, 57, -25, -87, 78, 7, -61, -20, -1, -92, 36, -109, -37, -23, 126, -54, -62, -33, - 120, -110, 97, 35, 71, 122, -91, -18, 47, 95, 92, 44, -56, 101, -97, 121, 14, 45, 10, - 114, 17, 32, 100, 22, 10, -102, -49, -47, -69, -49, 49, -115, -118, -50, 76, -87, -93, -92, - -38, 3, 81, -55, 32, -87, -90, 54, 112, -94, 101, 4, 7, 35, 93, -71, -14, 7, -45, - 36, 92, 9, -92, -126, 50, -40, -42, -51, -127, 115, -8, 69, -114, -48, -89, 99, 69, 4, - 82, -124, 24, 7, 6, 107, 30, 123, -42, 29, 122, 65, -91, -77, -31, -21, 76, -101, 59, - 89, 0, 43, -9, 28, -17, 29, 61, 42, -91, -75, -69, 81, 75, 108, 30, 104, -107, -13, - 36, -17, -82, -6, 44, -56, -108, -108, 16, -17, -44, -50, -30, -54, 61, 34, 124, -96, 89, - -109, -116, -99, 46, -111, -61, 120, -72, 69, -76, -98, 36, 86, -65, -125, 1, -58, -71, -47, - -48, -88, 58, 5, -108, -35, 8, 24, -31, -116, 3, -41, 67, -114, 52, 15, 20, 112, -63, - 6, -93, 6, 65, -82, -88, -106, 125, -111, 107, 100, -117, -36, -78, -114, 92, -116, 118, -45, - -75, 61, 27, -2, 50, -56, -50, -11, 126, 119, -100, 62, 1, -44, 46, 124, 0, -110, 43, - 65, 14, 6, -116, -24, 98, 108, -87, 55, 70, 29, 24, -127, 15, 20, -122, 48, -27, 122, - 107, 41, 21, 121, -96, -65, 76, -74, 1, -95, -5, -9, 13, 33, 18, -60, 55, -19, 85, - -25, -105, 79, 9, -94, 105, -16, 112, 95, 6, -122, 124, -122, -53, 55, -45, 21, -91, 112, - -127, 66, -50, 50, -37, -91, -101, 62, 81, -30, -123, -68, 36, -96, 59, 51, 79, 62, 92, - 104, 115, 95, -4, -74, 42, 91, 19, 116, -89, 91, 122, 92, -78, -56, -24, -98, -63, 125, - -72, -33, 69, -49, 66, 88, -64, 10, 101, -77, -93, -52, -98, -114, -116, 111, -45, -128, 45, - 80, -80, 27, 83, 77, 15, 22, 14, -6, 15, 63, 75, -48, 89, -36, -2, 71, -68, 40, - -83, 22, 62, -115, 48, -66, 116, -124, 116, -102, 76, -63, -127, 90, -121, -8, 71, 53, -54, - 68, 95, -77, -67, 2, 44, 15, -68, -49, 51, -101, -115, -127, -126, 59, -17, 49, 70, 57, - 65, 6, -68, -60, 91, -79, 57, -83, 70, 12, 82, -36, -124, -126, -50, -70, -94, 57, 77, - -85, 46, 60, -41, -27, 97, -52, -112, -66, 46, -99, -115, -72, -67, -71, 66, 65, 13, -85, - 43, 121, 70, -43, 60, -102, -77, 102, 91, -39, 108, -57, 68, 112, -67, -126, -110, -4, 118, - -56, 102, -64, 66, 31, 72, 75, -2, 89, 120, -128, 60, -70, -55, 24, 61, 94, 31, -56, - 64, 79, 0, -99, -50, 10, 31, -87, 1, 81, 91, 22, -83, -90, -15, 119, 17, -23, -23, - -65, -116, -52, -42, -106, 35, 82, 113, 62, -124, 93, 5, -36, 76, 72, -28, -44, 8, 90, - -4, -62, -52, -60, 8, -24, -67, 77, 98, 62, -18, 62, -122, -115, -108, -72, 33, 20, -13, - -34, -97, 100, 100, -117, 6, 39, -108, -12, 31, 114, -120, 82, 105, 19, -39, -40, -23, -84, - 37, 51, 105, 42, -96, 26, -56, -49, 8, 125, -7, -70, -25, -98, 100, -39, -52, -102, 106, - -100, 19, 125, -11, 64, 78, 9, 30, 107, -5, 11, -122, 32, -75, 60, 12, -49, 7, -97, - 39, 105, 5, -79, 113, 21, 76, -75, 71, -9, -119, -38, 89, -115, -104, 118, 66, -39, -11, - -105, 56, -114, 54, -9, 112, -32, 31, -125, 67, 75, 30, 32, 94, -77, -21, -82, 61, -73, - -70, 16, -50, -50, -10, -7, 20, 87, -66, -112, 81, -78, 0, -7, -120, 74, 20, 108, -23, - -15, 24, -125, 119, 34, 8, 0, -16, -36, 15, 12, -126, -80, 79, -65, -32, 75, -43, 61, - -37, 118, -4, -87, -52, -45, -82, -99, 39, -53, -123, -48, -50, -91, 18, -5, 85, 73, -14, - -36, 51, 84, 9, -112, 35, -17, -53, -22, -102, -40, 81, -104, 62, -128, 89, 37, -14, -123, - 57, 94, 30, -6, -79, -123, 110, 92, -45, 11, -48, 107, -62, 26, -67, 70, 111, -87, -85, - 6, -108, -121, -55, 29, 30, 56, 77, 91, 4, 33, -36, 40, 97, -5, -29, -67, 117, -62, - -118, 49, -115, 0, -75, -111, 96, 23, -28, 10, 101, -103, -75, 30, 36, 84, -22, -95, 75, - -110, 115, -80, -48, -3, 83, -104, -101, -53, -68, 9, 97, 99, 124, 46, -112, 20, -50, 41, - -93, 106, 16, -72, -111, 21, 88, -70, 93, 73, 33, -12, 57, 36, 120, -94, -38, -5, 85, - 120, 107, -98, -41, -6, 17, -78, 41, -60, -76, -106, 111, 12, 103, -39, -20, -17, 106, -102, - -23, -33, 99, -110, 20, -109, -26, -103, 108, -2, 34, -28, 102, -13, -119, -24, -68, -77, -71, - -2, -64, 23, -119, 34, 53, 112, 112, 69, 89, -45, 90, -43, 100, 9, 90, 49, -9, 26, - 107, 30, 59, 92, -9, -33, 91, 57, 93, -123, 19, 18, -59, 43, 61, -82, -47, 86, -89, - 73, 108, 41, -83, 67, -18, -100, 100, 6, -123, 26, 24, 68, 52, 8, 102, -17, 16, -125, - -17, 11, 87, -96, 64, 86, -81, 5, 113, -121, -123, 53, -122, -66, -110, -96, -57, -25, -120, - 41, 70, 58, -73, 121, 16, -113, -74, 9, 40, 75, -18, 65, 18, 118, -112, -83, -18, -125, - -43, 0, -16, -22, -88, -8, -126, -85, 76, 112, -73, 73, -89, -127, 82, 17, -113, 81, -23, - 21, 112, 120, -81, -13, -64, -109, -41, 22, 3, -22, -11, 112, -26, -82, 111, 15, 53, 112, - -51, 1, 44, -20, 13, -74, 93, 40, 104, -120, 101, 99, -91, 104, 24, 107, -40, 93, -15, - -22, 63, 50, 114, 17, 118, -115, 41, -73, -42, 39, -97, 111, -14, 30, -16, 68, 73, 112, - 77, 50, 38, -60, 91, 47, -11, 105, -43, -65, -122, -87, -94, -37, 94, 70, -49, -65, -115, - -72, -36, 71, 120, 23, -30, 39, 103, 82, 118, -61, -114, -95, -115, 94, -124, 82, -8, 94, - -82, 1, -116, -42, 38, -90, -35, 14, 37, 24, -76, 23, 49, 86, -122, 41, 36, 19, -42, - 122, -120, -91, -120, 36, -89, 21, 12, -114, 107, 37, -54, -4, -89, 96, -48, 38, 16, 81, - 33, -23, -20, -38, 120, -65, -115, -118, 93, 5, -62, -26, 75, 60, 22, 35, -31, -84, -115, - 72, 107, 99, -115, 115, 9, 83, 58, -72, -47, -5, -23, 46, 28, -108, -66, -109, -60, -83, - -127, 75, 37, -22, 108, -108, 60, 118, 37, -44, -94, -58, -18, -10, -68, 106, -70, 44, -106, - -60, 96, -116, -110, 38, 116, -45, 97, 114, 6, -30, 89, 41, 6, 20, 68, 75, -94, -116, - -29, 100, 96, 71, -76, 11, 101, -27, -109, 93, 23, -96, -90, 23, -122, 112, 97, -97, 60, - -102, -37, -103, 89, 105, 15, -41, 13, 26, 37, 28, 83, 7, 78, -11, 89, 74, 114, -72, - 111, -94, -30, 67, -91, -90, -56, 121, -97, 42, 4, -128, -46, -63, 116, -83, 97, -82, -33, - 22, -77, 17, -91, 25, -38, -38, -31, -39, -72, 46, 71, -39, -123, 81, -53, 117, -9, -13, - 80, 88, -110, -68, 38, -17, 115, 6, -112, 42, 6, -64, -123, -15, 30, -17, 5, 50, -89, - -56, 72, -17, -42, 98, -46, 89, 1, 101, -111, 44, 6, -45, 120, 83, -59, -30, 38, 75, - 31, 93, -97, 22, 16, 121, -1, 77, -122, -117, -79, -27, -93, 110, -43, 88, -64, -86, 40, - -109, 85, 80, -102, 4, -56, -7, -68, -109, -77, 46, 23, -36, 111, 0, 118, 39, 67, 56, - -17, -8, 44, 38, 89, -6, 79, 62, -123, 38, 10, -33, 125, -61, 112, 74, -85, -14, 45, - 108, 1, -74, 56, 63, -28, -91, 30, -121, -62, 16, -63, -85, -114, -22, -38, 117, -99, -80, - 100, -27, 97, -23, 49, 24, -25, 38, -37, 0, -50, -4, 70, -90, 32, 15, -90, -68, -77, - -63, -105, 10, 121, 115, 15, 107, -62, 87, 123, 97, 118, -66, -111, -50, 81, -92, -15, -88, - -79, -124, 14, 60, 23, 122, 33, -5, 92, 74, -112, -66, -47, -58, 125, -16, -87, 123, -120, - -82, -43, 10, 19, -9, -64, 62, 112, 71, -49, -63, 118, -35, -127, 16, 11, -5, -115, -124, - -84, 99, -79, -107, -39, 61, 2, -83, 90, 85, 111, 84, -93, -119, 86, 105, -28, -63, 10, - 1, 115, 88, 10, -2, -32, -29, 92, -85, 81, 77, 40, -70, -7, -102, 108, 43, 72, -20, - 35, 53, 24, -82, 42, 0, 91, -91, 1, 108, -7, -50, 84, -124, 107, 115, 46, 26, -81, - 80, -105, -122, 0, 5, 75, -101, 22, 43, -71, 1, -56, -37, 31, 123, 47, 81, 97, 91, - 45, 52, 107, -30, -88, -69, -49, -104, 1, -102, 121, -8, -30, 105, 49, -116, -116, 7, -112, - -102, 69, 34, -91, -124, -55, -104, -42, -82, 93, -56, 107, 65, -121, 17, 91, 118, -22, -41, - -53, 41, 83, 121, -37, -78, 85, -11, -75, -64, 45, 73, -9, -103, 11, 94, -78, -99, 62, - -80, -93, -108, -66, -78, -10, -110, -89, 65, -106, 41, -19, 33, -128, -27, 14, -10, -82, -100, - -8, 43, 17, -30, 110, -18, -113, 119, 103, 10, 105, 71, -69, -6, 86, 126, 78, -94, -48, - 96, 51, 67, 115, -4, 74, -77, 38, -50, -121, -93, 53, 41, 55, -98, 8, 22, -9, 5, - -66, -122, 118, -119, 110, -38, 86, 117, 32, -128, -29, 33, -2, -7, 120, 5, 112, -59, 51, - -97, -5, 103, -46, -115, 52, 60, 73, 120, 75, -45, -44, 84, 8, -1, -62, 106, -68, -66, - -116, -54, -44, 65, 75, 73, -101, -23, 53, -101, 94, -121, -65, -93, -7, 56, 26, 113, 78, - 9, -17, 15, 91, -47, -66, -112, 24, 4, 71, -18, -44, -15, -75, 66, 100, -115, 16, 46, - -9, 1, 95, -61, -125, -23, -76, -103, 113, 48, -3, -24, -22, -69, -43, -124, -11, 50, -24, - -122, -76, 36, 51, -90, -51, -101, -36, -105, -50, 95, -7, -41, 28, -30, 6, -6, 39, 48, - -65, 15, -28, -47, 95, 88, 4, -87, 94, -113, 62, 103, -96, 37, -20, -44, -41, 116, 23, - -128, -85, -12, -78, 40, 8, -107, 116, 5, -38, -111, 12, -123, -37, 114, 123, 35, 80, -117, - -122, 114, 117, 87, 123, -89, 51, 91, 11, 98, -63, 94, -71, -23, 75, -58, 6, -125, 126, - -114, -56, 5, 122, 20, 20, 122, 68, -34, -107, 85, -23, -112, -53, -107, -35, -125, -100, -84, - 114, -11, 90, 53, -51, -122, -5, 110, -15, -8, 53, 34, -26, 103, -65, 58, 40, 72, -86, - -34, 39, -98, 12, 17, -8, 87, -15, -31, -115, 46, -81, -75, 83, 35, -86, -91, -103, 16, - 116, -61, -6, -127, -76, 95, 108, 101, -62, -101, -16, -101, -91, -53, -46, -30, -110, -55, 57, - -83, -69, -69, -59, -22, 55, -95, -109, 87, 24, 67, -68, -91, -76, 77, 120, -20, 33, 60, - 102, -9, -22, -61, -32, 65, 5, 43, 58, 112, 69, -99, 13, 103, 126, 94, -67, 86, -31, - 77, -1, 116, 93, -13, -65, 6, -94, 37, -105, 26, -81, -16, -44, -95, -12, -59, 55, -25, - 3, -120, 34, -54, -17, 80, -94, 19, -87, -109, -116, 33, 87, -125, 7, 121, -85, 22, 69, - -52, -104, 118, 66, 44, -114, 78, 105, -11, -96, 111, -33, -78, 16, -32, 48, 102, -69, 55, - 20, -6, 78, -38, 103, -24, -70, -98, -56, 30, -56, 121, -70, -113, 4, 68, -72, 23, -7, - -96, -15, -52, -21, -37, -119, 19, 90, 8, 107, -51, 73, -122, -62, -54, -11, -122, -92, 116, - 11, 90, 67, -21, -83, -19, -92, -73, -61, 58, 47, -100, -17, -111, 14, 46, 96, 58, -52, - -66, 35, 60, 102, -3, -27, -118, -54, 56, 122, 43, 17, 74, 93, -80, -41, -81, -24, -33, - 84, -99, -119, 49, -114, -28, 50, 22, -81, -58, -3, -9, 102, -51, -54, -104, -1, -59, -40, - 28, -2, -59, -94, 63, 11, -99, 74, 109, -104, -100, -85, -100, -70, -20, 4, 66, -104, 125, - -59, -113, -1, -17, 100, -24, 94, -21, 103, 98, -13, 68, 6, -7, -79, -52, 122, -60, 71, - 83, -71, -109, -31, -22, -68, -59, 66, 52, -105, 93, -61, 69, -124, 47, -89, -96, 117, 42, - 63, -116, 67, 113, 90, 43, 17, -67, 9, -11, 53, 30, -91, 5, -109, -107, -108, 48, 45, - -107, -110, -6, -96, 87, -41, -97, 109, -67, 109, 27, 91, -122, -91, 56, 87, -21, 86, 47, - 16, 8, -52, 14, 61, 76, 5, 41, -102, -22, 94, 15, 79, 32, 3, -73, -61, 66, 91, - 85, -38, -38, -33, 39, -76, -71, -57, -62, 10, 8, 29, -17, -84, 125}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling/input_tensor.h b/Tests/UnitTest/TestCases/TestData/avgpooling/input_tensor.h new file mode 100644 index 00000000..ecc8451a --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/avgpooling/input_tensor.h @@ -0,0 +1,284 @@ +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. +#pragma once +#include + +const int8_t avgpooling_input_tensor[5280] = { + 88, -115, 20, 33, -35, 14, 16, 37, 65, 81, -8, 127, -71, 6, 26, -67, -28, 8, -99, + -118, -94, -96, 58, -95, 2, 15, -19, 77, 77, 101, -51, -109, 9, 75, 11, 78, -108, -79, + -94, -22, 77, 15, -40, 34, -52, -45, -104, -77, -99, 7, 18, 87, 46, 99, -56, -120, -81, + -72, -93, -123, -66, -57, 95, 107, 1, 15, -116, -48, 81, 35, -98, 39, 115, -67, 48, -97, + 44, -58, -94, -116, 22, 105, -76, 45, 53, -121, -54, 53, 72, -63, -115, 78, 69, 93, -16, + -20, -101, -51, -92, 72, 103, 31, 25, 61, -56, 56, -62, 123, 30, 73, -59, 87, -126, 30, + 34, -5, 115, 11, 15, -51, 8, -85, -90, 80, -92, 82, 56, 109, 42, 108, 104, -14, 70, + 88, -122, 26, 43, 61, -40, 44, -13, 30, 59, -114, 40, -29, -51, 99, 113, 115, 44, -124, + -35, -97, -81, 12, -125, -54, -27, -69, 34, -111, -19, -112, 30, -9, -18, -106, -20, -48, -85, + -25, -116, 66, -19, -88, 116, -126, 114, -20, 49, 64, 109, -36, 115, -3, 122, -68, 34, -118, + -119, -12, -76, -30, -14, 58, 60, 111, 95, 55, -75, -57, -20, 15, 62, -104, -36, 114, 121, + 59, -55, 7, 47, -111, -11, 9, 33, 100, 101, -58, 47, -41, 28, -9, -46, 124, 87, -36, + -34, 67, 109, -73, -110, 71, 52, -89, 121, 23, 85, -25, 110, 95, 80, -20, 78, 42, 45, + 79, 87, 35, 103, 76, -96, -61, -101, 81, 20, 59, -18, -111, -107, 73, 121, 112, 113, 33, + 49, 65, -62, -35, -119, -28, 99, -9, -27, 92, 28, -34, -14, 110, 94, -33, 89, -3, 14, + -43, 43, 106, -28, -48, -99, -18, 87, -10, 55, 18, 75, 75, 54, 36, -55, -38, 84, 91, + 100, 26, 35, 46, 72, 86, -111, -55, 87, 65, 90, 59, -61, 57, -6, 121, 35, -43, 41, + -69, -10, 78, -51, 110, -58, -53, 98, 39, 123, 61, -12, 125, 29, -124, -123, 48, 97, -38, + 6, -9, 25, 124, 118, -7, -26, 77, 40, -73, -108, -28, 9, 29, -58, 45, -22, 48, -33, + 31, 53, 55, 13, -62, -120, 94, -61, 74, 43, -31, 15, -93, 36, -46, -18, 104, -53, 124, + -23, 107, -29, 38, -34, -11, -6, 20, 6, 78, -64, -94, -54, -109, 114, -64, 85, 15, -75, + -60, 26, 29, 0, 94, -122, -101, 2, 23, 14, 125, -100, 69, 41, 9, -64, 107, 82, 97, + -124, -7, 99, -75, -109, -25, 26, 85, 74, -54, -32, -112, -120, -26, -123, 121, 21, -29, -42, + 103, 32, -7, -110, -95, -77, -74, 1, 77, 69, 1, 18, -10, -7, -14, 112, -16, -51, -109, + -91, -32, 59, 25, -71, -83, 27, 52, -69, -71, -34, -42, 101, -37, -6, -101, -4, -104, 63, + -17, 76, -5, 126, 52, 65, -103, 85, 72, 49, -95, -72, -13, 37, 12, 13, -70, 39, -112, + 107, 65, -118, -127, 37, 98, -125, -120, 61, 113, -100, -79, 104, -51, -108, -32, -36, -13, -33, + -46, -42, 73, 72, -7, 105, -104, 79, -88, -126, 10, -6, 19, 15, -59, -65, 25, -66, 43, + 12, 118, 38, 80, -89, 62, -80, 120, -9, -113, 100, 117, -122, 62, 126, -100, 58, 10, 94, + 101, -56, -37, -11, -70, 27, 63, 34, 64, 124, -125, 66, 48, 55, 62, 86, -116, 109, 18, + -123, -57, 38, 5, 13, -78, -44, 1, 106, -76, -96, 70, -38, -81, -51, 65, 7, 86, 50, + -49, -78, -87, 21, -83, 80, 88, -44, 66, 100, -31, -12, 63, -101, 23, -97, -53, 56, 98, + -78, 75, -5, -70, 18, -65, -67, -65, 69, -87, 14, 5, 83, -38, 98, 42, 22, -8, 28, + -9, 39, -51, 1, 73, 70, 46, -38, -107, -37, 37, -80, -49, 57, -117, 10, -15, 62, 109, + 12, -122, 105, 91, -92, 19, -56, 57, -76, -61, -60, 11, 95, -63, -88, 104, -20, 58, -124, + 122, 63, -96, 72, 43, 99, 35, -27, 40, -64, 64, 85, 59, -47, 115, 12, -38, 46, -25, + -55, 94, -25, 46, -117, 66, 60, 43, -62, 92, 98, -108, 4, 57, -36, -19, -67, 6, -107, + -75, -17, -69, -92, 33, -87, -8, 43, -71, 100, -76, -70, 79, 125, -110, -51, 52, 28, 30, + -50, -127, 95, 12, -49, -41, -65, -70, -48, -10, 120, 39, -125, -107, 33, -30, 96, 3, 119, + 93, -2, 40, 68, -40, 13, -104, -91, -87, -52, -48, 70, 94, -86, 96, 45, -16, -53, -13, + 65, 41, -34, -63, -111, 116, -18, 16, 66, -123, 56, -49, 42, 125, 118, -77, 122, 112, 38, + 30, -123, -114, -64, -63, 69, -104, 124, -30, 107, 94, -45, -114, 81, -87, 60, -2, 32, -12, + 51, 119, -115, 113, 43, 56, -121, 52, 18, 78, 68, 115, 47, 54, 13, -49, -121, 88, 51, + -121, 43, 70, -20, 74, 101, -112, 123, -7, 13, -98, 1, 21, 117, 3, -116, 35, 32, -46, + -100, -110, 54, 29, -94, -91, 82, -126, 101, 72, -123, 31, -85, 29, -22, 10, 44, -88, 90, + -48, -19, -96, -20, -108, -94, 4, 50, -78, -94, 9, 48, 120, 73, -100, 100, -102, 70, 73, + -116, 93, 5, -88, -86, 19, -66, 35, 13, 73, -113, -53, -121, -123, 123, -19, 85, -19, 50, + 83, 27, -37, 86, 55, 108, -100, 1, 94, 69, -109, 62, 81, 42, -78, -63, -34, -54, -106, + 1, -125, -32, 2, -61, -15, 88, -53, 22, 8, 126, -53, 29, 102, -12, 104, -39, -104, -111, + 0, -16, -58, 99, 62, 114, 4, 31, -51, -105, 106, 86, 106, -79, 80, 108, -120, -29, -116, + 74, -64, -90, 37, -49, -44, -67, 0, -105, -109, -40, -110, 14, -127, 7, -68, 123, -102, 72, + 22, 84, -107, -100, -19, 109, -65, -6, 108, -122, -85, 34, -22, -68, -119, -30, -7, -90, 1, + 115, -71, 70, -63, -23, 125, -38, -96, -39, 97, -69, 85, -3, -90, 22, -9, -18, 23, -117, + 62, -32, 109, -51, -50, 8, 56, 108, -95, 75, -58, -26, -112, -4, -114, 88, -49, 93, -27, + -15, 15, 0, -92, 83, -61, 46, -93, -19, 106, -98, 104, -2, 40, -7, -102, 16, 86, -125, + 57, 125, -74, -19, -37, -74, -78, -8, 14, -14, 9, 12, -73, -1, -50, 14, -27, -53, 49, + 104, 75, -58, -50, -108, -5, 26, -61, 58, -93, -68, -1, -78, 106, -116, 55, -99, 125, 34, + 5, -53, 17, 62, -16, 117, 64, 1, -97, 103, -44, 11, 112, -96, -25, 59, -122, -21, 21, + -106, -51, 53, -43, -82, -103, -103, 95, 20, -52, -108, 57, 124, 116, 125, -46, 75, -2, 117, + 119, 23, 3, 87, 100, -91, 115, -116, -58, -55, -122, 80, 24, -110, -17, -110, -47, -107, 25, + 70, 70, 17, -34, 48, 126, 27, 28, -42, 3, 1, -45, -12, 58, -100, 67, 111, -101, 112, + 16, 123, 125, -120, 118, -90, 88, -100, -127, -84, 54, -28, 55, -16, -78, -4, -113, 95, -48, + -125, -120, -112, -128, -121, -118, -108, 27, -88, -122, 91, 125, -113, 82, -33, -75, -23, -91, 71, + 48, 12, 97, 101, 65, -95, 90, -83, -118, -71, -35, -83, 50, -22, -21, 10, 49, -109, 90, + -105, -77, -95, -70, -43, -88, -82, -6, -80, -101, 46, -6, -39, 20, 93, 103, -6, -65, 97, + -16, 102, 59, -36, -116, -12, -55, 62, 88, 11, 73, 27, -113, -22, -120, -6, -59, -83, -87, + 70, -127, -109, 56, 99, 126, 8, 118, -125, 70, -8, -39, 18, 121, -113, 60, 36, 114, 119, + -68, 98, 91, -20, -120, 78, 70, -7, -66, 104, -6, -32, 89, -46, -15, -42, -82, 85, -54, + 28, -56, -16, 75, 127, -38, -77, -105, -33, -81, -92, 104, -47, -42, -85, 120, -84, -113, 55, + -39, -100, 6, 31, -31, 7, -86, -67, -82, -125, 47, -77, 44, -57, 41, 79, -37, -63, -97, + 71, 69, 56, -50, -95, -73, 111, -97, -7, -67, -55, -117, -69, 34, 80, 23, -104, 41, 7, + 94, -90, -33, 14, 102, 29, -19, 88, -41, 92, -128, -60, 55, 126, -33, -24, -98, 10, 105, + 13, 112, 121, -121, 121, 107, -80, -62, 62, 127, -117, -29, -127, 18, -47, -52, 71, -103, 31, + -119, -126, -65, -69, 29, -19, -11, -119, 34, 88, -1, 1, 33, 79, 16, 22, 30, 58, -30, + 42, 47, 63, 41, -85, -90, 83, -117, -98, -123, 120, -27, 22, -100, 93, -75, -62, 4, 19, + -122, 83, 6, 36, 102, 95, 16, -117, -102, -53, 40, -23, 13, 40, -102, -109, 17, -87, 91, + -27, 99, -65, -118, 106, 35, 62, 39, 1, -110, -98, -117, 84, 15, -66, 87, -8, -120, 27, + -68, -69, 52, -104, -62, -23, 55, -88, -33, -115, -99, 10, -19, -85, -80, -99, -16, 4, 112, + -123, 88, 115, 86, 2, 74, 9, 117, -121, -108, -88, -37, 49, 61, -127, -41, -103, 118, 55, + -114, 36, 36, -55, -58, -59, -63, 52, -48, -15, -94, 108, 50, 121, 52, -55, -59, 83, -12, + 46, 107, 24, -8, -85, -73, 24, 53, -73, -27, 32, 15, 113, 9, -57, -58, 69, 109, 120, + -37, -89, 47, -111, -19, 83, -61, -36, 92, -14, -28, -20, -19, -82, 16, -92, -43, 108, 109, + -128, -45, -13, 84, -117, 40, 106, -122, 74, 75, -107, 56, -81, 86, -53, -107, 21, -18, 53, + -43, -64, -31, -27, -125, -93, -109, -64, 31, -31, -102, 121, -70, 10, -127, -5, -124, -58, -47, + -12, 11, -89, -34, -108, -109, -49, -36, 69, 108, 82, -47, 35, -94, -29, -7, -80, -29, 92, + 81, -122, -38, -77, -8, -91, -96, 109, -95, 94, 91, -27, -60, 115, -1, 57, 110, 22, -50, + -1, -69, -30, -103, 83, -63, -124, 81, 42, 16, -4, 116, 57, 92, -46, -14, 42, 111, -83, + 17, -22, 97, 30, -107, 70, -54, -70, -28, 96, 88, 103, 45, 119, 101, 99, 15, 69, 15, + -17, 88, 63, -95, 80, 48, 111, -105, -83, 118, -41, 64, 104, 115, -30, -111, 126, -125, 82, + 50, -90, 94, 53, -58, 110, 28, 70, -88, 6, -46, -121, 30, -59, -73, -14, -7, -52, -14, + 119, 0, -22, 57, -16, -9, -53, -99, 69, -58, -117, 111, 61, 108, 36, -50, -36, -57, 44, + 57, 127, -21, -104, -37, -1, -33, 78, -124, -70, 66, 94, 73, 35, 113, 100, 42, 31, -62, + 60, -55, -4, 77, -99, 33, 3, 33, -25, 68, -75, 44, -115, 0, 108, 49, -19, -55, 96, + 21, 69, 124, -24, 73, -124, 59, 1, 75, -99, 42, -102, -118, 57, 121, -112, 93, -70, -82, + -115, 21, 112, -101, 83, -16, -89, 60, 95, -98, -29, -109, 8, -95, 4, -34, 91, 13, 69, + -108, 27, 55, -93, 15, -51, 112, -20, 112, -14, -59, 81, -67, 47, 115, -17, 57, 125, -94, + -23, -78, 33, 57, 114, -98, -22, 5, -109, -84, 90, -104, -122, 60, -107, -77, 59, 86, 31, + 103, 99, 12, 122, 102, 119, 42, 27, -124, -19, 105, -114, -37, 114, 31, -34, -9, 76, -80, + 6, 110, -4, -5, 7, 87, 23, 27, 39, -111, 7, 9, 41, 113, 76, 77, -14, 63, 116, + 127, -116, 50, -16, 48, 61, -90, -118, 7, 39, 115, -64, -15, 44, 33, 119, -76, -10, -14, + 112, -81, -76, -84, 90, -48, -57, -73, 13, -28, -101, 10, -127, 58, -94, -103, 52, -126, 100, + -47, -127, -64, 96, -13, 86, 124, -54, -125, 21, 120, -68, -123, 16, 69, 49, 45, -88, -97, + -37, 115, -26, 100, -103, -23, -96, 57, 67, 53, 44, -51, 53, -61, 12, -35, -104, -96, 106, + 45, 4, 9, 60, -5, 104, 51, -46, -125, -7, 47, 55, 104, 70, -58, 105, 94, -50, -57, + -75, -116, -25, -112, 24, 87, -62, 24, -93, -74, -48, 113, -94, 23, 85, 56, -110, -98, -40, + 66, 98, -118, 85, 41, -55, 58, -2, -79, -21, 22, -38, -87, -27, 45, 78, -17, -89, -107, + 78, 108, 74, -21, 107, 87, -48, -126, -14, 24, -56, 33, 116, -123, 121, 67, -64, -49, 97, + 64, 84, 79, 83, -54, 93, 77, 51, -48, -62, -59, 109, -83, -78, 20, 107, -115, 112, -38, + -92, 104, -94, 103, -123, -127, 75, -73, -42, -55, 21, 96, 108, -67, 22, -51, 84, 47, -98, + 63, -51, 53, 98, 101, -109, -50, -11, 66, 20, -109, 108, 44, -112, 52, -49, 28, 97, 98, + 119, -27, 34, 100, 1, -9, -52, 49, -70, 124, -1, -22, 45, -53, -9, 11, 118, 113, -7, + -2, -55, 84, 104, 40, -63, -71, 90, 118, 12, 0, 126, 89, -8, -79, 64, 25, 76, -4, + -101, 85, -3, -59, -2, 98, 57, 58, 24, -17, 38, 77, 65, -90, -11, 59, 83, -1, 75, + -53, 94, -90, 89, -122, -26, 73, 61, -87, 30, -83, 44, 77, -21, -38, 66, 122, 72, 91, + 77, 43, -109, -19, -62, 24, -28, -6, -122, -4, -18, 115, -52, 50, 69, 29, 99, -25, -52, + -8, -79, 22, 106, -120, -41, 62, 50, 122, -1, 96, 67, -12, -112, -21, -82, -79, -45, -88, + -122, 99, -3, 97, -56, -84, 41, -17, -123, 44, -7, 73, 39, 40, -54, -81, -82, 106, 119, + -54, 96, -20, -125, -91, 64, -126, -26, 59, 54, -119, -53, -48, -102, 116, 3, -20, -91, 91, + -102, -105, -93, 57, 60, -32, -55, 81, -74, 59, 13, 12, -110, 32, -34, -122, -24, -102, -63, + -97, 108, -62, 120, -13, -74, -106, -29, -108, 60, -94, 84, 60, 22, 81, 80, 16, 50, 31, + -12, 44, 43, -87, 110, -110, 115, 21, 2, 38, -67, -75, -104, -26, -51, 28, -25, -97, -68, + -105, -40, 31, 7, -107, 121, -125, 49, -7, 80, -124, -43, -92, -121, 61, 86, 98, -115, 20, + -66, 57, -64, -86, -105, 14, -114, 80, 98, 92, 68, 41, 102, -32, -41, 101, -107, 118, -119, + -74, -27, -77, -7, 40, 77, 75, -54, 90, -83, -19, 60, -123, -110, -79, -93, 96, 52, 76, + -112, -49, 63, 96, 31, -126, -49, -111, 98, 10, -86, -120, -82, -84, -94, -57, 30, -34, 120, + -20, -128, -80, 12, -97, -117, -7, -82, 68, -52, 95, 5, 74, -75, -125, 69, 126, -102, -97, + 58, 83, -79, -103, 10, 43, -10, -28, 80, 90, 100, 115, 76, 12, 95, -107, -71, 79, -66, + 51, 30, -26, 118, -117, -29, -44, 68, 62, -70, -26, -35, -21, -63, -4, -86, -118, 126, 111, + -47, 38, -127, -16, -114, 116, 72, -98, -84, -46, -110, -98, -79, 88, -101, 72, -109, -116, -79, + -63, 72, 3, -92, -104, -68, -105, 92, -83, 41, 49, -88, -116, 90, 55, 28, -66, 96, -95, + -109, -32, -112, -9, -17, 54, 24, 126, -80, 1, 118, -67, 58, 75, 35, -108, -8, 109, -30, + -120, -22, 12, 0, -5, 85, -95, -45, 74, -109, 57, 122, -29, 74, 119, 105, 124, 12, 8, + -18, 24, 4, 75, -81, -78, 44, -26, -28, 123, -3, -87, -78, -118, 113, 99, -116, 115, 41, + -53, -126, -125, 49, -123, -4, 38, 124, -109, -9, 104, -8, -86, -52, 124, 64, -56, 109, 29, + -29, -91, 123, 52, -120, 56, -77, -94, 117, -98, 97, -85, -69, -104, 15, 10, -55, 8, -26, + -6, -25, 100, 41, -70, -71, -47, -51, 4, -84, -21, 89, -48, 108, 123, -92, -3, 21, -12, + 124, -22, -24, 106, -85, -84, 96, 84, 65, 28, -83, 68, 70, 96, 46, -123, -36, 25, -103, + -104, 95, -121, 32, 81, 49, -8, 91, -120, 47, 48, -70, -80, -23, 11, -13, -128, -59, 6, + -60, 48, -74, -125, 69, 41, -7, 114, -91, 22, -99, 119, -112, -70, 63, -38, -7, 11, -46, + -46, 59, 28, 82, 63, 107, -28, 111, 42, -11, -111, -65, -113, -116, 22, -12, 2, -96, 14, + -90, 49, 15, 90, -84, 32, 92, 8, -75, 73, 45, 49, 79, -83, -69, -79, -93, 72, 90, + -47, -92, -69, -39, 107, -92, 52, 99, -109, 64, 49, -3, 40, -64, -48, -89, 13, 63, -3, + 52, -95, -107, 70, -47, 33, -87, -21, -114, -98, 20, -120, -113, 118, 112, 35, 41, -66, 78, + -87, 4, -115, -127, -90, -59, 71, -34, 5, 105, 95, 62, -81, 51, -96, 60, -12, -126, -96, + 27, 61, 36, 121, -86, 19, -60, -102, 51, 39, -124, -67, -66, -17, -59, 105, 99, 1, -85, + 38, 56, -86, 85, -91, -69, 69, -90, 47, -121, -100, -37, -123, -2, 10, 0, -43, 30, 95, + 70, 109, 59, 15, -34, -78, 88, -34, 125, -41, 75, -55, -103, 8, -91, -50, 111, 74, -30, + 7, -29, 12, 57, -117, 112, 106, -42, 109, -111, -94, 69, -56, 59, -53, -10, 119, -115, 97, + 40, -57, 6, 17, -1, 58, 125, 74, -10, -83, 90, 51, -52, 2, -3, -22, -45, -90, 41, + -117, 55, -12, -92, -57, -40, -20, 39, 26, 81, 23, 22, 81, 29, -104, -22, -9, -117, 39, + -51, 79, 46, -114, 91, -7, -125, 0, 37, -35, -35, 4, 25, 62, 10, 36, 14, -46, -40, + 10, -93, -125, 70, -113, 35, -122, -45, 30, -15, -32, -4, -48, -42, -107, 41, 44, -2, 62, + 19, -36, -45, -86, -45, 113, -60, 62, -21, 91, -83, -102, 111, -2, -77, 73, -55, 18, 6, + -67, -4, -41, 88, -71, 93, 54, 36, 99, 96, 108, 74, 80, -114, -85, 93, -23, -103, -8, + 66, 82, 73, -35, -37, 27, -22, -28, 90, -3, -90, 16, 0, 83, 9, 34, -83, -62, 7, + 47, -120, -46, -29, 121, 17, -21, 117, -50, 60, 89, -83, 31, -18, 96, -5, -102, 70, 96, + -128, 53, 11, 65, -36, 36, 48, 3, -112, -67, 62, -13, -57, 116, -5, 108, 22, -9, 0, + -105, 80, -55, -112, 49, 21, 92, 16, -94, -52, 70, -107, -38, -18, -105, 86, 28, 112, -55, + 29, 68, 74, -100, -29, -5, 20, -89, -41, -99, -124, 20, -34, 116, -46, 81, 34, 46, 73, + 93, 27, 14, -29, 123, 5, -75, -11, -59, -118, -42, 79, -8, 60, -99, -73, -88, 63, -86, + 123, -42, 97, -66, -91, 61, 46, -21, 20, -75, -10, -55, -56, -41, 56, -55, 118, -39, -40, + 98, -127, -116, -75, 87, 12, 62, -28, 50, 72, -5, 119, -90, -53, 50, -105, -96, -65, 127, + -95, -67, -117, -58, 115, -117, -93, -35, -121, -33, -43, 36, 83, -42, -3, -47, -110, -44, 83, + -107, -45, -70, -65, 21, -63, -124, 65, 114, 108, 70, -108, 123, -65, 16, -5, -127, 84, -37, + -128, 85, -8, 29, -51, 86, 87, -19, 101, 39, -18, -109, 14, -87, 56, 34, 27, -66, 86, + 20, 23, -85, 70, 101, 83, 11, 57, 105, -8, 119, 110, -57, 75, 80, -55, -32, 8, 40, + -30, 73, -21, -86, 122, -40, 42, -56, 106, -38, 27, -113, 6, 65, -73, 20, -40, -101, -84, + -62, -120, 57, -73, -44, 37, -46, -64, 108, -1, -92, 45, -18, -39, -28, 27, 56, -72, -18, + 99, 36, 1, 70, -13, -51, -89, -117, 20, -55, -70, -64, -114, 127, 3, 7, 33, 43, -85, + -66, -92, 0, 61, -95, -4, 12, -109, -127, 21, -128, 44, 103, -23, 126, 43, -37, -123, -67, + 83, 93, -25, 107, -20, -26, 50, -73, 25, 66, -37, 107, 93, -122, 106, -103, -49, -52, -103, + 52, -83, -103, -54, -58, -103, 62, -93, -108, -47, 106, -12, 0, 47, 36, -68, 37, -126, -73, + -51, -76, 35, 107, -112, -33, -88, 94, -118, -18, 6, -82, -3, 98, 3, -17, 116, 15, 125, + 18, -29, -114, 56, -13, 55, 49, -72, 59, 50, -82, 41, -108, 79, 75, 2, -120, -13, 124, + 104, 60, 29, 42, -38, 40, 54, -48, -107, -17, -124, -15, -29, 7, -8, -7, 123, 23, -80, + 76, 83, 121, 123, 42, 51, 46, 81, -109, -34, 3, -109, -72, -109, -107, -104, -39, -48, -111, + -17, 106, 47, 127, -39, -68, -106, 58, 48, 95, 84, 18, -109, 91, 107, -29, -88, -41, 3, + -72, 78, -61, -42, -44, 118, 31, 66, 28, -19, -53, 119, -40, -15, 46, 102, 94, 36, -39, + -102, -83, -14, 33, -62, -80, -53, 14, -2, -11, -95, 76, -114, 92, 73, -12, -110, 31, 61, + 89, 97, -124, -72, 40, 39, -54, 71, -20, -115, 21, -122, 51, -65, -127, -60, 58, -20, 20, + -69, 63, 6, -5, -83, 69, -120, 31, -120, -95, 98, 93, 77, 29, 0, -46, -59, 84, 120, + 70, 76, -4, 110, 11, -94, 29, -9, -100, 97, -41, 65, 3, 20, -67, 115, -95, 8, 113, + -40, -22, -97, 19, 23, -102, 75, -61, 119, 82, 88, 112, 22, -93, -28, -3, -124, 21, 31, + -32, -117, -26, -71, -53, 91, 97, -91, -91, -19, -94, 69, 10, 68, 37, -60, 99, 89, 68, + 76, -28, 50, 46, 73, -57, -106, 63, -97, 105, 97, -117, -98, 104, 62, -9, 97, 27, 91, + -107, -53, -56, -42, -125, -89, -50, -74, 83, 102, 62, -17, 12, -82, 85, -57, -102, 67, 23, + -112, 15, 10, -90, 86, 88, -119, 77, -28, -122, -32, -103, 111, 21, 122, 116, 125, 42, -1, + -83, -96, -70, -33, 70, 105, 2, 15, -115, 24, 29, -86, -61, 4, 31, 121, 103, -89, -76, + -31, -52, 21, 36, 109, 27, 72, 83, -42, 78, 61, 56, -94, -112, -42, 54, 56, -126, 4, + -92, -72, 101, 27, 81, -119, 24, -30, -105, -104, -33, 74, -37, -6, -46, 58, 19, -125, 100, + -98, 20, 115, 28, -77, -102, 74, 76, -85, -2, 99, 118, -111, -117, 105, 16, 120, 98, -122, + 65, 104, -26, -15, 15, -82, 99, 7, 2, 120, 9, 5, 48, 29, 14, -10, -50, 73, -77, + -87, 90, -16, -4, -44, -80, 12, -12, 55, -70, -78, -113, -73, 17, 85, -104, -17, -65, 50, + 66, -74, -58, 36, -125, 125, 88, -71, 86, -74, -23, 115, -72, -39, -87, 24, -112, -64, 84, + 17, -11, 4, -74, -63, 113, 37, 117, 9, -96, -94, -107, -16, 127, 48, -28, -18, 20, 1, + -12, 61, -10, 28, -106, 45, -56, 17, -54, -49, 52, -58, -28, -32, -112, 112, -53, 65, 116, + -28, 60, -89, 9, -85, -126, -21, -21, -120, -109, -44, -110, -81, 63, 78, 37, 61, -127, 33, + 63, -81, 64, 84, 61, 13, 49, 5, -44, -53, 55, -5, -43, -80, 87, 43, -30, 70, -100, + -117, 90, -42, -75, -20, 11, -82, 99, 100, -21, 40, -91, -120, -44, 15, -50, -49, -3, 24, + -30, -68, -82, 76, -53, -54, -44, -33, -9, 112, -44, 91, -103, 70, 1, 81, 96, -15, 32, + -114, 120, -19, 44, -19, 64, -30, -111, 18, -75, -84, 52, 95, -102, -74, 19, 54, 116, -19, + 15, 47, 76, -127, -78, 29, -121, -98, -61, 100, 31, -56, -48, -120, 20, 101, 107, 7, 21, + -62, 106, -111, -42, 39, -66, 125, -57, 62, -50, -12, -65, -29, -30, 23, 55, 74, 1, 109, + -53, -113, 79, -116, 12, 17, -6, 49, -66, 93, 16, 18, 32, -6, -27, 123, -20, 39, -86, + 15, -63, -99, 107, 22, 44, 91, -5, 65, -52, 48, -125, -12, 105, 26, -26, -109, 86, -61, + -117, 49, -37, -114, -72, 79, 15, -42, -101, -99, 105, 119, -101, 59, -40, 64, 31, -95, -70, + 89, -49, -119, 86, -33, 43, -110, 5, 109, 61, 110, -79, 8, 11, -128, 89, 82, 87, -42, + -68, -61, -22, -123, -126, 91, 77, -100, -15, -114, -111, 95, 97, 72, 119, 36, -10, 117, 72, + -126, -87, 115, 124, 41, -94, -44, -59, -101, -84, -58, -52, 33, 50, -50, -35, -27, -99, -9, + 80, 62, -117, 54, 114, -110, -39, -45, 32, -52, -88, 125, -68, -89, 59, 11, -41, 69, 83, + 103, -84, -2, 91, -34, -120, -22, -19, 17, -82, -27, 72, 9, 110, -4, -119, -20, 80, -15, + 11, -77, -72, 6, 61, -126, 64, 100, 71, -31, 56, -120, -83, -56, -6, -4, 63, -22, -51, + -84, -24, 6, 26, -85, -70, 68, -95, -125, -82, -79, 14, -19, 63, 118, -48, -21, -11, 7, + -103, 52, 2, 51, 40, 102, -87, -95, -28, -21, -79, 89, -59, -4, -127, 101, -125, -102, -33, + -75, -21, 40, -94, -52, 52, -14, -9, -119, -47, 97, 51, -83, 82, 54, 36, 3, -88, 45, + 106, 106, 67, -22, 95, -46, -82, -55, -56, 23, 16, 111, 74, -105, 42, 46, 58, 46, 67, + -43, 5, -108, 124, 43, -59, 33, 113, -102, -83, 93, 115, -36, 127, 78, 121, 109, 95, 97, + 60, -117, -31, -45, -79, 72, 3, 58, -56, 47, 91, -54, -38, 3, 2, -72, 124, -32, -26, + -11, 77, 42, 65, -88, -126, 46, 61, 107, 111, 23, -99, -84, -43, -109, -72, 43, -123, 45, + 20, 64, -90, -119, -36, 91, 104, -6, -11, 121, -18, -88, 30, -19, -60, -66, 113, 96, -123, + -18, -43, -113, 87, 21, 1, 53, -92, -14, 53, 25, 64, 92, -81, -65, -127, 101, -56, -29, + 13, 34, 93, -107, -29, -8, -103, 35, -32, 0, -26, 111, 34, 1, 0, -58, -108, 58, -95, + -9, -57, -70, 88, -64, 4, 47, -102, -47, -23, -18, -5, 57, 56, -79, -113, -66, -15, 90, + -52, -124, 0, -111, -67, -76, -120, -95, 8, -19, 31, 3, -29, -26, 7, 0, 16, 104, -125, + 22, -71, 88, -120, -78, 28, -123, -65, 9, 27, 78, -79, 86, -56, -85, -21, 78, 99, 21, + -59, -121, 59, 47, 8, -13, 122, -66, -104, -35, 65, -93, -65, 72, -46, -24, 53, 63, 120, + 41, -75, 92, 67, 0, 43, 28, 113, -38, -81, -37, -4, 16, -102, -44, -25, -45, -32, -94, + 8, -105, -18, -66, -78, -13, -103, 91, 6, 110, 84, 67, 89, -86, -117, 14, 100, -49, 125, + 109, 59, 124, 104, 115, 58, -50, 21, -103, 84, 84, 89, -52, 118, -102, 91, -65, -121, -92, + 120, -9, -96, 23, -16, -26, -114, -113, 124, 89, -19, -126, 13, -122, 27, -11, -20, 79, 111, + -111, 119, -85, 82, 73, 102, 102, 41, -83, 78, 10, 68, 33, 19, 81, -108, -22, 80, 57, + 39, -46, -88, 34, 51, 69, 18, 68, 107, -25, -66, 30, 46, -124, 14, -35, -5, -122, -46, + 97, -90, -118, 115, -74, 76, -3, -111, 40, 93, -25, -27, 43, -42, -6, -8, 88, 71, -63, + -96, -120, 43, 48, 13, 9, 13, 47, 57, 126, -35, -63, 75, -102, 4, -62, 115, 69, -22, + -126, 121, -47, 106, 108, 24, -67, -83, 126, 69, -27, -100, 66, 114, 88, -100, 35, 17, 86, + 117, -74, 8, 15, -2, -45, 122, 1, -9, 3, -36, 92, -43, -79, 48, 88, 19, 80, 103, + 42, -11, -29, -46, 35, -88, -71, 105, 49, 31, -63, 19, 91, -125, -58, 35, 124, -120, -19, + -13, 108, -36, -36, 78, 64, -96, -123, 112, -104, -3, -50, 84, -104, -107, 46, 9, -68, 4, + -67, -38, 0, 76, -117, 68, 20, 45, 47, -78, -53, 80, -62, -106, -108, 61, -92, 55, -70, + -95, -5, 52, 99, -119, 41, 59, 40, -72, -117, 66, -30, 92, -31, 81, 103, 32, 34, -36, + 44, -78, 105, -64, 4, 115, 55, 4, -65, 88, -52, -45, 24, -98, -70, -35, 99, -108, -27, + 86, -36, -104, -23, 1, -13, -16, -103, -65, 65, 11, 110, 84, -58, -4, 77, 45, 12, 47, + -4, -62, -42, 19, -29, 0, 50, -33, 20, 56, 39, 47, 73, 107, 116, -117, 88, 117, 119, + 92, -127, 107, -59, 15, 44, -53, -6, 94, 114, 119, -109, -80, 55, -46, -87, 106, 94, 107, + 5, 50, 47, 40, -42, 10, 10, -34, -69, -92, 55, 122, 46, 121, 97, -95, -43, -13, 103, + 56, 49, -49, 50, 78, -32, -36, 27, 77, 64, 33, 23, 53, -5, 125, 96, -76, 98, 77, + 108, 17, -123, -33, -55, -92, 52, -85, 74, 99, 76, -6, -35, -44, -111, -98, 25, -76, 66, + -61, -7, 119, 126, -51, -17, 51, 25, -28, -26, 80, 70, 23, 33, 114, -78, -107, -122, 110, + -8, 108, -118, -95, 23, -83, 2, 125, 41, -48, 94, -11, 64, -42, -4, 108, -19, -47, 57, + 71, -81, -62, -6, 77, 59, -100, -31, -123, -71, -117, 83, 117, -62, -19, -100, 66, -40, -60, + -60, 80, -61, 101, -94, -20, 25, 119, 125, -13, 114, 96, 95, 66, -83, 9, -86, -58, 96, + -73, 91, 49, 103, 64, -15, 63, 100, 83, -88, 109, 32, 69, -46, 27, 21, 68, 20, -14, + -121, -63, -126, 54, 10, -27, 73, 88, -101, -34, -91, 121, 94, -78, 5, 102, -75, -119, -10, + 40, -89, 24, -120, -102, 94, -100, 95, -64, -33, -47, -124, 119, 116, 68, -14, 17, -26, 46, + 60, -50, 0, 108, -100, 39, -116, 30, -29, -93, -126, 124, 116, 73, -80, 92, 91, 56, 34, + 52, -80, 14, -51, 2, 72, -12, 65, 127, -77, -7, 110, 105, 63, -23, 43, -11, -99, -52, + 84, 103, -86, 0, 33, -110, -46, 102, -114, 82, 104, -21, -113, 96, -19, 29, -61, -77, -94, + 71, -31, -64, -18, 102, 44, 122, 73, -77, 107, -68, 85, 75, 29, 5, -7, -37, -125, 17, + -12, -72, 30, 80, 87, -13, 2, -111, 38, -15, -118, -88, 63, -107, -17, -90, 83, -119, 19, + 51, 87, 70, -128, 2, -128, -61, -32, 29, 29, 80, -98, 118, 126, 47, 111, -70, 46, 101, + -11, -52, 106, 5, 37, -25, -21, 125, 13, -69, -58, -72, 4, -51, -52, -91, -81, -104, 61, + -64, -56, 105, 72, 0, -49, 57, 18, 49, 87, -75, -117, 102, 112, 108, 62, 64, 75, -17, + -94, 98, 40, 100, -73, -50, 112, 19, 125, -39, 27, 98, -71, 36, -21, 93, 82, -78, 69, + -32, -54, -73, 24, 87, 117, 111, -52, 2, 98, 3, -46, 15, -19, -69, -117, -101, 24, 50, + -66, -17, -98, 123, -114, 51, 66, 5, 57, -44, 64, 26, -65, 15, -29, -77, 40, -127, -37, + 8, -47, 102, 117, 11, 69, -4, 117, -78, -35, -32, -106, 100, -21, 25, -54, 95, 14, -117, + -6, 65, 84, -111, 18, 19, -56, 70, -58, 82, 106, 99, -125, 95, 113, 35, 86, 48, -103, + -116, -1, 58, -126, -75, 114, 107, -99, -105, -119, -105, 86, -101, -99, -60, 114, 28, -114, -62, + 45, -29, 123, 64, 38, -11, -81, 60, -113, 87, -98, 29, -7, 14, 122, 47, -71, -117, -121, + -128, 58, 1, 17, 47, -87, 83, 75, 91, -68, 113, 115, -113, 4, 29, 60, -69, 57, 25, + -74, -124, -78, -91, 86, -86, 109, 86, -20, -112, -15, 44, -82, 75, 35, -58, -14, -53, 12, + -64, 121, -16, -106, -48, 105, -78, -64, 67, -114, -109, -46, 112, -101, -107, 125, 52, -3, -65, + -61, -119, -57, 113, -81, -105, -63, -91, -96, -117, -69, 41, 88, -94, -103, -119, 97, -40, 11, + -34, -69, 18, 23, -107, -106, 21, 120, -65, -125, -108, 118, -25, -19, -4, 16, -124, 127, -117, + -71, -6, -122, 20, -65, -72, 119, -88, 11, 6, -3, 7, -71, -123, -78, 45, 109, -33, -26, + -12, 92, -37, -98, -48, 70, 0, 90, 95, -82, 89, 94, 64, -94, 107, 15, -87, -111, -27, + 61, 79, 68, 39, -43, -55, 20, -5, -96, 57, -85, 45, -107, 52, 61, 44, 41, 43, -20, + 64, -44, -107, 5, -111, 120, -113, 16, -47, 16, -35, -64, -9, 107, -16, 125, -60}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling/output.h b/Tests/UnitTest/TestCases/TestData/avgpooling/output.h new file mode 100644 index 00000000..531dfffb --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/avgpooling/output.h @@ -0,0 +1,14 @@ +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. +#pragma once +#include + +const int8_t avgpooling_output[180] = { + -31, -24, 15, 11, -6, -15, 0, -20, 7, -2, -17, -19, 20, 0, -4, -23, 6, -12, -12, -40, 4, 13, -10, + 2, 15, 17, 10, 1, -9, 10, -14, -30, -4, -7, -21, 4, 24, 0, 4, -5, -27, 5, 2, 24, 6, 6, + 9, 7, 2, 2, 33, -6, -26, -13, 22, -4, 16, -14, -5, -1, -50, 7, 26, -3, -3, -12, 25, 3, 15, + 8, -9, -14, -12, -10, -33, -9, -4, -7, 5, -22, 2, -8, -8, -12, -5, 21, 4, -2, -15, -21, -3, -8, + 5, -16, -15, 3, -30, -13, 15, -7, -4, 1, 13, -8, 14, 1, 23, 3, 9, 0, 16, -9, -15, 18, -6, + -28, 11, -5, -7, 1, -25, -8, 2, 17, 10, 24, -48, -11, -2, -8, 11, 30, -10, 5, 19, -5, 18, -21, + -3, 10, -22, 13, 1, 4, 3, -9, 7, 14, 4, 22, 34, -4, -1, -39, 10, 12, 2, -11, -4, 22, 30, + 13, 5, 14, -5, -6, -14, -16, -22, -9, -40, 9, 1, 15, 8, 15, 2, 17, -14, 27}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling/output_mult_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling/output_mult_data.h deleted file mode 100644 index 01f58c83..00000000 --- a/Tests/UnitTest/TestCases/TestData/avgpooling/output_mult_data.h +++ /dev/null @@ -1,40 +0,0 @@ - -/* - * Copyright (C) 2010-2020 Arm Limited or its affiliates. All rights reserved. - * - * SPDX-License-Identifier: Apache-2.0 - * - * Licensed under the Apache License, Version 2.0 (the License); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an AS IS BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#pragma once -// Generated by generate_test_data.py -#include - -const int32_t avgpooling_output_mult[17] = {2053179100, - 2053179100, - 1368786067, - 2053179100, - 2053179100, - 2053179100, - 2053179100, - 2053179100, - 2053179100, - 2053179100, - 2053179100, - 2053179100, - 2053179100, - 2053179100, - 2053179100, - 1368786067, - 1368786067}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling/output_ref_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling/output_ref_data.h deleted file mode 100644 index c382a0fc..00000000 --- a/Tests/UnitTest/TestCases/TestData/avgpooling/output_ref_data.h +++ /dev/null @@ -1,22 +0,0 @@ -// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). -// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. -#pragma once -#include - -const int8_t avgpooling_output_ref[360] = { - -1, 3, -7, -17, -13, -5, 4, 0, 7, -10, 2, 26, -14, -15, 15, -10, 6, 23, -1, -21, -10, 2, -12, - 24, -8, -17, 17, 25, -8, 2, 2, -6, 3, 1, -27, 11, -6, -1, 11, -23, 26, 16, -1, 31, 2, -19, - 20, 7, -4, -1, 0, 16, -22, 22, -39, -12, -2, 18, -2, 14, 1, -7, -8, 8, 5, 12, -31, 12, 14, - 18, 21, -8, -8, 1, 26, 16, -6, -10, 3, -7, 24, 13, 4, -13, 13, -11, 4, 20, -17, 20, -23, -2, - 0, 19, 5, 10, -19, -23, -8, 10, 10, 4, 4, -19, -17, 18, 10, -41, 6, 3, 12, -13, -18, 1, 5, - 24, 14, 9, -5, -29, -18, 15, -29, 14, -11, -11, -11, -35, 47, 0, 8, -25, 34, 4, 1, 14, -40, -24, - -15, 22, -9, 1, 0, 5, -10, -18, 33, -35, -13, 10, 7, -9, -4, -7, 10, -10, 6, 12, -4, 5, 35, - -7, -5, 20, 5, -27, -12, -37, -12, -13, -20, -40, -1, 12, -17, -7, 3, -37, -13, -19, -11, -20, -41, -11, - -32, 11, -18, 4, 25, -10, -12, 24, -1, -7, 20, 4, 6, 7, -38, 19, -14, 18, -26, -13, -22, 4, -3, - -10, -6, 21, 14, 16, 24, -15, -3, -29, -15, 12, -18, 11, 5, -14, -32, 13, -2, 3, -34, 5, 2, 9, - -16, -22, 0, 0, -27, 11, -15, -4, 31, 10, 18, 2, -26, 22, -3, -8, -4, 0, 6, 4, -7, 13, -7, - -10, -21, -12, -10, 8, -4, -22, -13, 1, -8, 16, -6, -25, -20, -21, 4, 1, -11, 3, 22, -1, -14, -27, - 8, 2, -12, -2, 4, -25, -10, -23, 8, -10, 20, -19, -1, 0, -5, 0, -11, -8, -10, 0, -12, 10, -8, - -8, 3, -5, -13, -37, 28, -2, -41, -15, 19, 3, 13, 27, 4, 21, 2, 13, -10, -3, -35, -24, -6, 34, - -6, -5, -24, -28, -29, -10, -5, -29, 2, -14, -14, 26, -1, -6, 6, 1, -41, -19, -13, -5, -31, -11, -38, - 3, 28, -9, -29, 4, -5, 5, 13, -12, -16, -19, 2, -9, -6, 30}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling/output_shift_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling/output_shift_data.h deleted file mode 100644 index 66ecf8ba..00000000 --- a/Tests/UnitTest/TestCases/TestData/avgpooling/output_shift_data.h +++ /dev/null @@ -1,25 +0,0 @@ - -/* - * Copyright (C) 2010-2020 Arm Limited or its affiliates. All rights reserved. - * - * SPDX-License-Identifier: Apache-2.0 - * - * Licensed under the Apache License, Version 2.0 (the License); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an AS IS BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#pragma once -// Generated by generate_test_data.py -#include - -const int32_t avgpooling_output_shift[17] = - {-11, -11, -11, -11, -11, -11, -11, -11, -11, -11, -11, -11, -11, -11, -11, -11, -12}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling/test_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling/test_data.h index b00cc3f6..0e46bdee 100644 --- a/Tests/UnitTest/TestCases/TestData/avgpooling/test_data.h +++ b/Tests/UnitTest/TestCases/TestData/avgpooling/test_data.h @@ -1,5 +1,3 @@ -// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). -// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. #include "config_data.h" -#include "input_data.h" -#include "output_ref_data.h" +#include "input_tensor.h" +#include "output.h" diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_1/config_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling_1/config_data.h index ca4eef46..30cc8192 100644 --- a/Tests/UnitTest/TestCases/TestData/avgpooling_1/config_data.h +++ b/Tests/UnitTest/TestCases/TestData/avgpooling_1/config_data.h @@ -1,19 +1,20 @@ -// Generated by generate_test_data.py using TFL version 2.6.0 as reference. +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. #pragma once -#define AVGPOOLING_1_OUT_CH 3 -#define AVGPOOLING_1_IN_CH 3 +#define AVGPOOLING_1_BATCH_SIZE 1 +#define AVGPOOLING_1_INPUT_N 1 #define AVGPOOLING_1_INPUT_W 9 #define AVGPOOLING_1_INPUT_H 5 -#define AVGPOOLING_1_DST_SIZE 3 -#define AVGPOOLING_1_INPUT_SIZE 135 -#define AVGPOOLING_1_OUT_ACTIVATION_MIN -128 -#define AVGPOOLING_1_OUT_ACTIVATION_MAX 127 -#define AVGPOOLING_1_INPUT_BATCHES 1 -#define AVGPOOLING_1_FILTER_X 9 -#define AVGPOOLING_1_FILTER_Y 5 -#define AVGPOOLING_1_STRIDE_X 1 -#define AVGPOOLING_1_STRIDE_Y 2 -#define AVGPOOLING_1_PAD_X 0 -#define AVGPOOLING_1_PAD_Y 0 +#define AVGPOOLING_1_INPUT_C 3 +#define AVGPOOLING_1_FILTER_W 9 +#define AVGPOOLING_1_FILTER_H 5 +#define AVGPOOLING_1_STRIDE_W 1 +#define AVGPOOLING_1_STRIDE_H 2 +#define AVGPOOLING_1_PAD VALID +#define AVGPOOLING_1_ACTIVATION_MAX 127 +#define AVGPOOLING_1_ACTIVATION_MIN -128 +#define AVGPOOLING_1_OUTPUT_C 3 #define AVGPOOLING_1_OUTPUT_W 1 #define AVGPOOLING_1_OUTPUT_H 1 +#define AVGPOOLING_1_PADDING_H 0 +#define AVGPOOLING_1_PADDING_W 0 diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_1/input_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling_1/input_data.h deleted file mode 100644 index 89a9ff67..00000000 --- a/Tests/UnitTest/TestCases/TestData/avgpooling_1/input_data.h +++ /dev/null @@ -1,12 +0,0 @@ -// Generated by generate_test_data.py using TFL version 2.6.0 as reference. -#pragma once -#include - -const int8_t avgpooling_1_input[135] = { - -53, 14, -3, 14, -2, 53, 120, 64, 26, -112, -26, -111, -37, -31, 96, 17, 110, 81, 105, 1, - -99, -78, 86, 38, -61, -77, -58, 57, 87, -63, -67, -9, -111, -3, 66, 126, 39, 80, -37, 22, - -34, 34, 14, 83, -40, -102, -15, 109, 43, -121, 82, -88, -99, -75, 115, 33, -36, 75, -84, 6, - -66, 25, 75, -1, 88, -108, -91, 15, -126, -12, 23, 12, -113, -103, 106, -24, 108, 3, 101, 62, - 19, -64, -94, 40, -11, 106, -124, -103, -64, -48, 32, 5, 8, 79, 122, 3, 57, 96, 50, 119, - 98, -10, -128, 23, -68, -40, 51, 85, 6, 110, -99, 115, 2, 126, 120, -71, -82, 125, 97, 0, - 105, -90, -76, -63, -76, 3, -87, 111, 93, 55, -124, -10, 113, -9, -97}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_1/input_tensor.h b/Tests/UnitTest/TestCases/TestData/avgpooling_1/input_tensor.h new file mode 100644 index 00000000..0dc5c811 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/avgpooling_1/input_tensor.h @@ -0,0 +1,13 @@ +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. +#pragma once +#include + +const int8_t avgpooling_1_input_tensor[135] = { + 70, -107, 121, -90, 56, -15, 66, 39, -67, -3, -80, -44, -40, 74, 97, 41, -2, 11, 82, 17, + -41, 23, 50, -103, -65, 54, 41, 63, -93, 53, -117, -90, 24, 28, 89, -24, 106, 67, -91, -126, + -45, 119, 122, -88, 44, 23, 54, 118, -13, -55, 127, 63, -103, -94, -90, 50, 59, 81, -67, 1, + 2, -123, 117, -93, -48, -61, -96, 87, -107, 34, -82, 105, 61, 40, -37, -7, -5, -59, 39, 74, + -104, -36, -9, 4, -121, 25, -22, -3, 23, -62, 9, -86, -69, -54, -107, 4, 60, -33, -103, -112, + -112, -52, -127, -87, -26, 96, -3, 37, -7, -96, 32, 4, 77, 11, -2, 6, 78, -120, -18, -119, + 22, -49, 79, -86, -66, 34, 126, -47, -26, 61, 60, -81, -28, -22, -103}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_1/output.h b/Tests/UnitTest/TestCases/TestData/avgpooling_1/output.h new file mode 100644 index 00000000..5ca26b5d --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/avgpooling_1/output.h @@ -0,0 +1,6 @@ +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. +#pragma once +#include + +const int8_t avgpooling_1_output[3] = {-3, -17, -4}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_1/output_ref_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling_1/output_ref_data.h deleted file mode 100644 index e3962904..00000000 --- a/Tests/UnitTest/TestCases/TestData/avgpooling_1/output_ref_data.h +++ /dev/null @@ -1,5 +0,0 @@ -// Generated by generate_test_data.py using TFL version 2.6.0 as reference. -#pragma once -#include - -const int8_t avgpooling_1_output_ref[3] = {5, 14, -5}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_1/test_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling_1/test_data.h index 31add6d5..0e46bdee 100644 --- a/Tests/UnitTest/TestCases/TestData/avgpooling_1/test_data.h +++ b/Tests/UnitTest/TestCases/TestData/avgpooling_1/test_data.h @@ -1,4 +1,3 @@ -// Generated by generate_test_data.py using TFL version 2.6.0 as reference. #include "config_data.h" -#include "input_data.h" -#include "output_ref_data.h" +#include "input_tensor.h" +#include "output.h" diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_2/config_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling_2/config_data.h index 9357c93e..b57e7f5d 100644 --- a/Tests/UnitTest/TestCases/TestData/avgpooling_2/config_data.h +++ b/Tests/UnitTest/TestCases/TestData/avgpooling_2/config_data.h @@ -1,19 +1,20 @@ -// Generated by generate_test_data.py using TFL version 2.6.0 as reference. +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. #pragma once -#define AVGPOOLING_2_OUT_CH 5 -#define AVGPOOLING_2_IN_CH 5 +#define AVGPOOLING_2_BATCH_SIZE 1 +#define AVGPOOLING_2_INPUT_N 1 #define AVGPOOLING_2_INPUT_W 12 #define AVGPOOLING_2_INPUT_H 1 -#define AVGPOOLING_2_DST_SIZE 60 -#define AVGPOOLING_2_INPUT_SIZE 60 -#define AVGPOOLING_2_OUT_ACTIVATION_MIN -128 -#define AVGPOOLING_2_OUT_ACTIVATION_MAX 127 -#define AVGPOOLING_2_INPUT_BATCHES 1 -#define AVGPOOLING_2_FILTER_X 3 -#define AVGPOOLING_2_FILTER_Y 1 -#define AVGPOOLING_2_STRIDE_X 1 -#define AVGPOOLING_2_STRIDE_Y 2 -#define AVGPOOLING_2_PAD_X 1 -#define AVGPOOLING_2_PAD_Y 0 +#define AVGPOOLING_2_INPUT_C 5 +#define AVGPOOLING_2_FILTER_W 3 +#define AVGPOOLING_2_FILTER_H 1 +#define AVGPOOLING_2_STRIDE_W 1 +#define AVGPOOLING_2_STRIDE_H 2 +#define AVGPOOLING_2_PAD SAME +#define AVGPOOLING_2_ACTIVATION_MAX 127 +#define AVGPOOLING_2_ACTIVATION_MIN -128 +#define AVGPOOLING_2_OUTPUT_C 5 #define AVGPOOLING_2_OUTPUT_W 12 #define AVGPOOLING_2_OUTPUT_H 1 +#define AVGPOOLING_2_PADDING_H 0 +#define AVGPOOLING_2_PADDING_W 1 diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_2/input_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling_2/input_data.h deleted file mode 100644 index 7572b016..00000000 --- a/Tests/UnitTest/TestCases/TestData/avgpooling_2/input_data.h +++ /dev/null @@ -1,8 +0,0 @@ -// Generated by generate_test_data.py using TFL version 2.6.0 as reference. -#pragma once -#include - -const int8_t avgpooling_2_input[60] = {-82, -104, 10, -28, -52, -51, -66, 52, 124, -74, -21, 4, 37, -7, -33, - 102, 110, 24, 52, 121, 13, -55, -79, -92, -35, -103, 86, 95, 46, 32, - -24, -123, 120, 29, -77, -97, -69, -68, 58, 38, 3, 3, 79, -47, 112, - -52, -113, -46, 107, 68, 83, -70, 91, 14, 113, 74, 73, -103, -98, 25}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_2/input_tensor.h b/Tests/UnitTest/TestCases/TestData/avgpooling_2/input_tensor.h new file mode 100644 index 00000000..8a11ec48 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/avgpooling_2/input_tensor.h @@ -0,0 +1,9 @@ +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. +#pragma once +#include + +const int8_t avgpooling_2_input_tensor[60] = { + 82, -22, -108, -95, -120, 106, -86, -16, -14, 102, -124, 60, -21, -75, 83, 107, 72, 34, -110, 45, + 14, -45, -58, -102, 53, 48, 123, -21, -91, -41, 61, 58, 22, 27, -46, 96, 103, 60, 46, -120, + 103, 38, 12, -63, -100, 123, -1, -23, -19, 87, 41, -46, 59, 62, 91, 61, 47, 119, 59, -18}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_2/output.h b/Tests/UnitTest/TestCases/TestData/avgpooling_2/output.h new file mode 100644 index 00000000..06f00e8e --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/avgpooling_2/output.h @@ -0,0 +1,9 @@ +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. +#pragma once +#include + +const int8_t avgpooling_2_output[60] = {94, -54, -62, -55, -9, 21, -16, -48, -61, 22, 30, 15, -1, -66, 77, + -1, 29, -15, -96, 60, 56, 50, -15, -101, 19, 41, 45, -19, -55, -11, + 68, 95, 20, -6, -69, 87, 66, 31, 3, -89, 107, 47, 16, -12, -44, + 89, -3, 16, -7, 26, 75, 0, 52, 34, 53, 51, 1, 89, 61, 37}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_2/output_ref_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling_2/output_ref_data.h deleted file mode 100644 index fc47c917..00000000 --- a/Tests/UnitTest/TestCases/TestData/avgpooling_2/output_ref_data.h +++ /dev/null @@ -1,8 +0,0 @@ -// Generated by generate_test_data.py using TFL version 2.6.0 as reference. -#pragma once -#include - -const int8_t avgpooling_2_output_ref[60] = {-67, -85, 31, 48, -63, -51, -55, 33, 30, -53, 10, 16, 38, 56, 5, - 31, 20, -6, -16, 18, 4, 47, 13, 2, 39, -38, -31, 45, -6, -27, - -75, -35, 49, 44, -2, -39, -63, 44, 13, 24, -49, -60, -12, 39, 73, - 11, -60, 41, 25, 98, 35, -37, -19, 8, 69, 79, 2, -6, -42, 69}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_2/test_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling_2/test_data.h index 31add6d5..0e46bdee 100644 --- a/Tests/UnitTest/TestCases/TestData/avgpooling_2/test_data.h +++ b/Tests/UnitTest/TestCases/TestData/avgpooling_2/test_data.h @@ -1,4 +1,3 @@ -// Generated by generate_test_data.py using TFL version 2.6.0 as reference. #include "config_data.h" -#include "input_data.h" -#include "output_ref_data.h" +#include "input_tensor.h" +#include "output.h" diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_3/config_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling_3/config_data.h index fc2e2f1e..05d87882 100644 --- a/Tests/UnitTest/TestCases/TestData/avgpooling_3/config_data.h +++ b/Tests/UnitTest/TestCases/TestData/avgpooling_3/config_data.h @@ -1,20 +1,20 @@ -// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). -// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. #pragma once -#define AVGPOOLING_3_OUT_CH 2 -#define AVGPOOLING_3_IN_CH 2 +#define AVGPOOLING_3_BATCH_SIZE 1 +#define AVGPOOLING_3_INPUT_N 1 #define AVGPOOLING_3_INPUT_W 9 #define AVGPOOLING_3_INPUT_H 1 -#define AVGPOOLING_3_DST_SIZE 20 -#define AVGPOOLING_3_INPUT_SIZE 18 -#define AVGPOOLING_3_OUT_ACTIVATION_MIN -128 -#define AVGPOOLING_3_OUT_ACTIVATION_MAX 127 -#define AVGPOOLING_3_INPUT_BATCHES 2 -#define AVGPOOLING_3_FILTER_X 1 -#define AVGPOOLING_3_FILTER_Y 1 -#define AVGPOOLING_3_STRIDE_X 2 -#define AVGPOOLING_3_STRIDE_Y 1 -#define AVGPOOLING_3_PAD_X 0 -#define AVGPOOLING_3_PAD_Y 0 +#define AVGPOOLING_3_INPUT_C 2 +#define AVGPOOLING_3_FILTER_W 1 +#define AVGPOOLING_3_FILTER_H 1 +#define AVGPOOLING_3_STRIDE_W 2 +#define AVGPOOLING_3_STRIDE_H 1 +#define AVGPOOLING_3_PAD VALID +#define AVGPOOLING_3_ACTIVATION_MAX 127 +#define AVGPOOLING_3_ACTIVATION_MIN -128 +#define AVGPOOLING_3_OUTPUT_C 2 #define AVGPOOLING_3_OUTPUT_W 5 #define AVGPOOLING_3_OUTPUT_H 1 +#define AVGPOOLING_3_PADDING_H 0 +#define AVGPOOLING_3_PADDING_W 0 diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_3/input_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling_3/input_data.h deleted file mode 100644 index 8166de8a..00000000 --- a/Tests/UnitTest/TestCases/TestData/avgpooling_3/input_data.h +++ /dev/null @@ -1,8 +0,0 @@ -// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). -// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. -#pragma once -#include - -const int8_t avgpooling_3_input[36] = {-21, -36, 122, 114, 23, 44, -71, -55, 18, -91, 51, -87, - 103, -100, 55, 66, -91, -79, -96, -22, -104, -50, -113, -84, - 41, 42, 27, 103, 47, -60, -28, -48, 110, 32, 37, -128}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_3/input_tensor.h b/Tests/UnitTest/TestCases/TestData/avgpooling_3/input_tensor.h new file mode 100644 index 00000000..03324a9c --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/avgpooling_3/input_tensor.h @@ -0,0 +1,7 @@ +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. +#pragma once +#include + +const int8_t avgpooling_3_input_tensor[18] = + {-32, 83, 113, -23, -6, 76, -3, -73, 43, -60, 22, -110, -32, 2, -42, -118, 28, 96}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_3/output.h b/Tests/UnitTest/TestCases/TestData/avgpooling_3/output.h new file mode 100644 index 00000000..7318da28 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/avgpooling_3/output.h @@ -0,0 +1,6 @@ +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. +#pragma once +#include + +const int8_t avgpooling_3_output[10] = {-32, 83, -6, 76, 43, -60, -32, 2, 28, 96}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_3/output_ref_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling_3/output_ref_data.h deleted file mode 100644 index 7487a4e4..00000000 --- a/Tests/UnitTest/TestCases/TestData/avgpooling_3/output_ref_data.h +++ /dev/null @@ -1,7 +0,0 @@ -// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). -// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. -#pragma once -#include - -const int8_t avgpooling_3_output_ref[20] = {-21, -36, 23, 44, 18, -91, 103, -100, -91, -79, - -96, -22, -113, -84, 27, 103, -28, -48, 37, -128}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_3/test_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling_3/test_data.h index b00cc3f6..0e46bdee 100644 --- a/Tests/UnitTest/TestCases/TestData/avgpooling_3/test_data.h +++ b/Tests/UnitTest/TestCases/TestData/avgpooling_3/test_data.h @@ -1,5 +1,3 @@ -// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). -// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. #include "config_data.h" -#include "input_data.h" -#include "output_ref_data.h" +#include "input_tensor.h" +#include "output.h" diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_4/config_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling_4/config_data.h index e5c92ce0..11b0e47f 100644 --- a/Tests/UnitTest/TestCases/TestData/avgpooling_4/config_data.h +++ b/Tests/UnitTest/TestCases/TestData/avgpooling_4/config_data.h @@ -1,20 +1,20 @@ -// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). -// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. #pragma once -#define AVGPOOLING_4_OUT_CH 2 -#define AVGPOOLING_4_IN_CH 2 +#define AVGPOOLING_4_BATCH_SIZE 3 +#define AVGPOOLING_4_INPUT_N 1 #define AVGPOOLING_4_INPUT_W 1 #define AVGPOOLING_4_INPUT_H 20 -#define AVGPOOLING_4_DST_SIZE 42 -#define AVGPOOLING_4_INPUT_SIZE 40 -#define AVGPOOLING_4_OUT_ACTIVATION_MIN -128 -#define AVGPOOLING_4_OUT_ACTIVATION_MAX 127 -#define AVGPOOLING_4_INPUT_BATCHES 3 -#define AVGPOOLING_4_FILTER_X 1 -#define AVGPOOLING_4_FILTER_Y 3 -#define AVGPOOLING_4_STRIDE_X 1 -#define AVGPOOLING_4_STRIDE_Y 3 -#define AVGPOOLING_4_PAD_X 0 -#define AVGPOOLING_4_PAD_Y 0 +#define AVGPOOLING_4_INPUT_C 2 +#define AVGPOOLING_4_FILTER_W 1 +#define AVGPOOLING_4_FILTER_H 3 +#define AVGPOOLING_4_STRIDE_W 1 +#define AVGPOOLING_4_STRIDE_H 3 +#define AVGPOOLING_4_PAD SAME +#define AVGPOOLING_4_ACTIVATION_MAX 127 +#define AVGPOOLING_4_ACTIVATION_MIN -128 +#define AVGPOOLING_4_OUTPUT_C 2 #define AVGPOOLING_4_OUTPUT_W 1 #define AVGPOOLING_4_OUTPUT_H 7 +#define AVGPOOLING_4_PADDING_H 0 +#define AVGPOOLING_4_PADDING_W 0 diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_4/input_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling_4/input_data.h deleted file mode 100644 index b19e6ef1..00000000 --- a/Tests/UnitTest/TestCases/TestData/avgpooling_4/input_data.h +++ /dev/null @@ -1,12 +0,0 @@ -// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). -// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. -#pragma once -#include - -const int8_t avgpooling_4_input[120] = { - 15, 62, -99, -23, -105, 113, 53, -8, -6, -82, 126, 53, 63, 84, 66, -38, -79, -104, -117, 109, - 107, -121, -60, -22, -109, 3, -84, -51, 33, 115, 106, 109, 15, 117, -34, -72, -31, 62, 116, -79, - 65, -58, 28, -27, 52, 94, 70, 8, -111, -72, -50, 87, -109, 99, -50, 4, 93, -1, 16, 38, - 90, 33, -32, -19, -71, -6, 94, -20, 95, 7, 84, 84, -26, -80, 109, -21, 56, -105, 126, 64, - -24, -71, 19, 42, 10, 85, -71, 113, -78, 105, 32, -115, -62, -18, -17, -102, -85, -34, 121, -115, - 53, 9, 50, -19, -15, 76, -79, -89, -75, -75, 96, 106, -45, -45, 111, 111, -65, -59, -102, 63}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_4/input_tensor.h b/Tests/UnitTest/TestCases/TestData/avgpooling_4/input_tensor.h new file mode 100644 index 00000000..95697cf1 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/avgpooling_4/input_tensor.h @@ -0,0 +1,12 @@ +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. +#pragma once +#include + +const int8_t avgpooling_4_input_tensor[120] = { + 119, 17, -110, -96, -33, -16, -112, -99, -79, -30, -107, -64, 109, 32, 90, 69, 41, 47, -112, 59, + 118, 106, -13, 100, 91, -41, 59, -30, -96, -125, -35, 50, 40, -30, 37, 31, 98, -102, 41, 51, + 51, 72, -44, -55, 85, 117, -26, -93, -23, -36, 37, -13, -95, -80, 66, -76, 11, -51, 27, -25, + 100, -22, -47, 16, -98, -115, -13, 92, 95, 57, 8, 8, 46, -56, 105, 55, -115, 70, -71, 19, + -102, -33, 108, 58, 38, 0, 50, 55, -87, -78, 12, -90, -114, -104, 29, -10, -36, 5, -91, -54, + 23, 83, -66, 52, 79, -56, -22, 43, 69, -83, -57, -104, -100, 27, 106, -56, -27, -98, 68, -115}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_4/output.h b/Tests/UnitTest/TestCases/TestData/avgpooling_4/output.h new file mode 100644 index 00000000..ac2c8b31 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/avgpooling_4/output.h @@ -0,0 +1,8 @@ +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. +#pragma once +#include + +const int8_t avgpooling_4_output[42] = {-8, -32, -99, -64, 80, 49, -2, 88, 18, -65, 14, 17, 70, -26, + 31, 45, -4, -47, -6, -69, 27, -10, -5, 11, 53, 2, -93, 45, + 15, 8, -8, -38, -40, -36, -45, 27, 42, -32, -17, -44, 21, -107}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_4/output_ref_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling_4/output_ref_data.h deleted file mode 100644 index 358849c0..00000000 --- a/Tests/UnitTest/TestCases/TestData/avgpooling_4/output_ref_data.h +++ /dev/null @@ -1,8 +0,0 @@ -// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). -// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. -#pragma once -#include - -const int8_t avgpooling_4_output_ref[42] = {-63, 51, 58, -12, 17, -19, -23, -11, -53, 22, 29, 51, 43, -9, - 48, 3, -30, 8, -22, 34, 25, 17, 39, -6, 56, -6, 91, -21, - 2, 19, -39, 34, -55, -51, 75, -42, -56, -29, 54, 57, -84, 2}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_4/test_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling_4/test_data.h index b00cc3f6..0e46bdee 100644 --- a/Tests/UnitTest/TestCases/TestData/avgpooling_4/test_data.h +++ b/Tests/UnitTest/TestCases/TestData/avgpooling_4/test_data.h @@ -1,5 +1,3 @@ -// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). -// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. #include "config_data.h" -#include "input_data.h" -#include "output_ref_data.h" +#include "input_tensor.h" +#include "output.h" diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_5/config_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling_5/config_data.h index 3b33e77f..88f2eb57 100644 --- a/Tests/UnitTest/TestCases/TestData/avgpooling_5/config_data.h +++ b/Tests/UnitTest/TestCases/TestData/avgpooling_5/config_data.h @@ -1,19 +1,20 @@ -// Generated by generate_test_data.py using TFL version 2.6.0 as reference. +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. #pragma once -#define AVGPOOLING_5_OUT_CH 1 -#define AVGPOOLING_5_IN_CH 1 +#define AVGPOOLING_5_BATCH_SIZE 1 +#define AVGPOOLING_5_INPUT_N 1 #define AVGPOOLING_5_INPUT_W 3 #define AVGPOOLING_5_INPUT_H 3 -#define AVGPOOLING_5_DST_SIZE 9 -#define AVGPOOLING_5_INPUT_SIZE 9 -#define AVGPOOLING_5_OUT_ACTIVATION_MIN 0 -#define AVGPOOLING_5_OUT_ACTIVATION_MAX 6 -#define AVGPOOLING_5_INPUT_BATCHES 1 -#define AVGPOOLING_5_FILTER_X 1 -#define AVGPOOLING_5_FILTER_Y 3 -#define AVGPOOLING_5_STRIDE_X 1 -#define AVGPOOLING_5_STRIDE_Y 1 -#define AVGPOOLING_5_PAD_X 0 -#define AVGPOOLING_5_PAD_Y 1 +#define AVGPOOLING_5_INPUT_C 1 +#define AVGPOOLING_5_FILTER_W 1 +#define AVGPOOLING_5_FILTER_H 3 +#define AVGPOOLING_5_STRIDE_W 1 +#define AVGPOOLING_5_STRIDE_H 1 +#define AVGPOOLING_5_PAD SAME +#define AVGPOOLING_5_ACTIVATION_MAX 6 +#define AVGPOOLING_5_ACTIVATION_MIN 0 +#define AVGPOOLING_5_OUTPUT_C 1 #define AVGPOOLING_5_OUTPUT_W 3 #define AVGPOOLING_5_OUTPUT_H 3 +#define AVGPOOLING_5_PADDING_H 1 +#define AVGPOOLING_5_PADDING_W 0 diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_5/input_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling_5/input_data.h deleted file mode 100644 index 7d22978e..00000000 --- a/Tests/UnitTest/TestCases/TestData/avgpooling_5/input_data.h +++ /dev/null @@ -1,5 +0,0 @@ -// Generated by generate_test_data.py using TFL version 2.6.0 as reference. -#pragma once -#include - -const int8_t avgpooling_5_input[9] = {-63, 42, 77, 104, -38, 88, -5, -22, -18}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_5/input_tensor.h b/Tests/UnitTest/TestCases/TestData/avgpooling_5/input_tensor.h new file mode 100644 index 00000000..88eef8ca --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/avgpooling_5/input_tensor.h @@ -0,0 +1,6 @@ +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. +#pragma once +#include + +const int8_t avgpooling_5_input_tensor[9] = {-81, -72, -74, -122, -39, 66, -69, -19, 73}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_5/output.h b/Tests/UnitTest/TestCases/TestData/avgpooling_5/output.h new file mode 100644 index 00000000..827adac7 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/avgpooling_5/output.h @@ -0,0 +1,6 @@ +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. +#pragma once +#include + +const int8_t avgpooling_5_output[9] = {0, 0, 0, 0, 0, 6, 0, 0, 6}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_5/output_ref_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling_5/output_ref_data.h deleted file mode 100644 index cc449f79..00000000 --- a/Tests/UnitTest/TestCases/TestData/avgpooling_5/output_ref_data.h +++ /dev/null @@ -1,5 +0,0 @@ -// Generated by generate_test_data.py using TFL version 2.6.0 as reference. -#pragma once -#include - -const int8_t avgpooling_5_output_ref[9] = {6, 2, 6, 6, 0, 6, 6, 0, 6}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_5/test_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling_5/test_data.h index 31add6d5..0e46bdee 100644 --- a/Tests/UnitTest/TestCases/TestData/avgpooling_5/test_data.h +++ b/Tests/UnitTest/TestCases/TestData/avgpooling_5/test_data.h @@ -1,4 +1,3 @@ -// Generated by generate_test_data.py using TFL version 2.6.0 as reference. #include "config_data.h" -#include "input_data.h" -#include "output_ref_data.h" +#include "input_tensor.h" +#include "output.h" diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_int16/config_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling_int16/config_data.h index 9da27605..9ba99ef4 100644 --- a/Tests/UnitTest/TestCases/TestData/avgpooling_int16/config_data.h +++ b/Tests/UnitTest/TestCases/TestData/avgpooling_int16/config_data.h @@ -1,19 +1,20 @@ -// Generated by generate_test_data.py using TFL version 2.8.0 as reference. +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. #pragma once -#define AVGPOOLING_INT16_OUT_CH 17 -#define AVGPOOLING_INT16_IN_CH 17 +#define AVGPOOLING_INT16_BATCH_SIZE 1 +#define AVGPOOLING_INT16_INPUT_N 1 #define AVGPOOLING_INT16_INPUT_W 6 #define AVGPOOLING_INT16_INPUT_H 4 -#define AVGPOOLING_INT16_DST_SIZE 204 -#define AVGPOOLING_INT16_INPUT_SIZE 408 -#define AVGPOOLING_INT16_OUT_ACTIVATION_MIN -32768 -#define AVGPOOLING_INT16_OUT_ACTIVATION_MAX 32767 -#define AVGPOOLING_INT16_INPUT_BATCHES 1 -#define AVGPOOLING_INT16_FILTER_X 2 -#define AVGPOOLING_INT16_FILTER_Y 3 -#define AVGPOOLING_INT16_STRIDE_X 2 -#define AVGPOOLING_INT16_STRIDE_Y 1 -#define AVGPOOLING_INT16_PAD_X 0 -#define AVGPOOLING_INT16_PAD_Y 1 +#define AVGPOOLING_INT16_INPUT_C 17 +#define AVGPOOLING_INT16_FILTER_W 2 +#define AVGPOOLING_INT16_FILTER_H 3 +#define AVGPOOLING_INT16_STRIDE_W 2 +#define AVGPOOLING_INT16_STRIDE_H 1 +#define AVGPOOLING_INT16_PAD SAME +#define AVGPOOLING_INT16_ACTIVATION_MAX 32767 +#define AVGPOOLING_INT16_ACTIVATION_MIN -32768 +#define AVGPOOLING_INT16_OUTPUT_C 17 #define AVGPOOLING_INT16_OUTPUT_W 3 #define AVGPOOLING_INT16_OUTPUT_H 4 +#define AVGPOOLING_INT16_PADDING_H 1 +#define AVGPOOLING_INT16_PADDING_W 0 diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_int16/input_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling_int16/input_data.h deleted file mode 100644 index 9fb51db2..00000000 --- a/Tests/UnitTest/TestCases/TestData/avgpooling_int16/input_data.h +++ /dev/null @@ -1,35 +0,0 @@ -// Generated by generate_test_data.py using TFL version 2.8.0 as reference. -#pragma once -#include - -const int16_t avgpooling_int16_input[408] = { - 17455, 21160, -20313, 5904, -20289, -26790, 2856, -25088, -21724, 29883, -22748, 153, 4391, -15386, - 8337, 32369, 31361, 7054, 2383, -30121, 10452, 7493, 4535, 32494, -4744, -16495, 16272, 12063, - 27566, -18173, 29255, -20396, 28622, -11159, 10864, 3468, -14167, -15681, -4775, 13875, 417, -17392, - 30716, -30006, 18032, -32210, -3860, -29914, -27871, 18370, 32076, 5524, 30533, 10379, 16305, -3284, - -17750, -21890, -5860, -4450, -25220, 20537, -27969, 26586, 18906, 11527, 17373, -20971, 18136, 32551, - -8123, -17611, 22959, -2247, 8210, 30700, 32007, -26946, 1035, -6583, 30664, 4602, 10426, -29274, - -19000, -29434, -17624, 22989, -22677, -30092, -641, -30925, -19002, 3406, -23649, -7354, 831, 30573, - 23522, 2236, -17258, 9264, -8781, 16606, 5154, 4547, 15223, 8622, -13035, -3945, 8062, 20148, - 21977, -14251, -13995, -25552, 14787, -13281, 24223, 24672, 27466, -24838, 11870, -32513, -3885, 11548, - -23076, -20644, -1973, -4270, 13301, 15012, -13906, 2870, 23313, -25104, 18445, 11067, -2737, -31184, - 22662, -10639, 4305, -19749, -7373, 26863, 27064, -12388, 31256, -1780, 11936, -5753, 67, 32002, - 25944, 22224, -13948, -8830, -23548, 27157, 28415, -8963, 21097, -13542, -23997, -19336, 14488, 20291, - -22286, -5162, 27933, 5046, 4150, 23515, -28443, -3705, 21027, 9598, 7427, -19990, -30000, -26204, - -27322, 847, -5486, -17329, 20293, 3200, -30888, -6742, -5121, 22285, 11892, 7586, 17804, -10199, - 30643, 11413, 19898, -26355, -20576, 4051, -10482, 29703, 27440, 19423, 17420, 26102, -25404, -7520, - -5691, 31897, -5045, 9315, -18486, -13819, -22172, -14379, 18205, -14019, 9117, -5046, -16098, 7235, - -11295, -23269, -32238, -29347, 22815, 20541, 4146, -25611, -32731, -16013, 15906, 26460, 7576, -28540, - 21654, -7493, 242, 15360, 8831, 22656, -29709, -18301, -13516, -14703, 32294, -20546, 14042, 13197, - 32578, 17285, -11665, -9742, -7387, -17700, 22175, 6832, 17072, -961, -3024, -28671, -18994, -15034, - 30027, -26547, -29166, 18479, 122, -26531, -15260, 31673, -25203, -13442, -19381, -6116, 1569, 12447, - -7381, 3782, -20298, 27751, 5375, 2250, 6538, -13419, 14635, -25907, 18246, 23655, -17246, 68, - -2051, 7874, -9471, -16650, -2511, 10, -31082, 7976, 25919, 22005, 9975, 19623, -4467, 8358, - -19149, 11591, -15127, -676, -26521, -9238, -17143, 21023, 31696, -7999, -26507, -16719, -19904, -10894, - -25518, -26222, 13925, 5754, -26902, 21042, -6685, -30393, -11854, -6578, -19892, -6727, 2293, 25310, - 4957, -12328, 18955, -22555, -31, -17456, -9822, -6824, 26302, -6059, -18506, -1831, 27645, -11493, - 14805, 18772, -18977, -31590, 3287, 31172, 31618, -11618, 12136, -1302, -31413, 11218, -964, -29936, - 28221, -13652, -8890, 1636, 4941, 976, -23986, -22935, 20352, 23076, -7900, -26594, 25513, 17083, - 20025, -30016, -21943, -15081, 25606, 23596, -22775, 25229, -1144, -9960, -23789, -6521, 14048, -13065, - -31402, 19243, 18505, 22343, -19523, -29896, 3823, -29235, -25003, -20050, -24438, 12754, 8909, 3842, - -26521, -28654}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_int16/input_tensor.h b/Tests/UnitTest/TestCases/TestData/avgpooling_int16/input_tensor.h new file mode 100644 index 00000000..6b9e1fa5 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/avgpooling_int16/input_tensor.h @@ -0,0 +1,36 @@ +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. +#pragma once +#include + +const int16_t avgpooling_int16_input_tensor[408] = { + -8296, 15727, 13119, 27665, 19127, 9209, 14522, -29511, 16812, 27536, 9997, -11915, 10082, -21858, + -29924, 21141, 15187, 17088, 6187, 9148, -16602, -2146, 13701, 19275, -27401, 10865, -31840, -13973, + 2102, 24057, 21019, -7432, 28804, 22020, 19089, 8377, 15705, 11932, 22232, -31817, -25250, 2518, + 13763, -11451, -1215, -13996, 1014, 16873, 2510, -1901, 6648, 24207, 22781, -25267, -27212, -3009, + -1983, 9565, -8123, 31069, 9466, 2423, -26243, 32300, -12456, 27828, -14743, 19084, 2143, -10720, + -30568, -6541, -650, 8334, 3679, -28084, 15218, 31134, 4051, -8700, -124, 25364, 18850, 30537, + -27840, 22151, 10160, -32555, 11332, -16698, 23180, -15650, 16614, 20565, 21649, -24456, 10572, 7100, + 20320, 19451, -28747, -12298, 8137, -25482, 2812, 30573, -12768, 21164, 766, -1412, 29990, 23362, + -11667, 21344, -5897, 32289, -13103, -16571, -11254, -3471, -18582, -7412, -3922, 6049, -8823, 21881, + 23032, -5713, -19530, 1364, 5629, -16518, -3184, 15198, 28372, 14740, 4259, -30901, 31, 31118, + 11757, -17698, -986, 18372, 13389, 22690, 13253, -24685, -11295, 12294, 26823, 24871, -27226, -15726, + -2878, 13134, 6940, 12671, -2300, 14823, -26592, -30139, 9992, -29579, 3155, -7246, 19295, -5314, + 6797, 5433, 23554, 10757, -19947, 17648, 12707, 14109, 29467, -31909, 7638, -26910, 29183, -20173, + -21726, 7280, 12749, -10021, -17980, -13661, -26805, 30126, -28081, -21606, 24116, -15381, 23409, -15515, + -32603, -5968, 14177, -10547, 14863, -29274, -14391, -4600, -26488, 452, 18786, 11877, 24836, 28465, + -26638, 13413, -27414, -29065, 24430, -21254, -3383, -26177, 16753, -16353, -23435, -15607, 10370, -25707, + -24419, -11770, 20013, 17293, -2822, 19607, 18738, -23504, -16319, -8648, -9903, 1781, -19953, 3442, + 6910, -4824, 2563, -25039, 23976, -30424, -10286, -11642, -7033, -4661, -4946, -1606, 18528, -27245, + -629, 26656, -17478, -23081, 10509, 24868, -8051, -18008, -8525, 23147, 26792, 16435, -21352, -13063, + -25393, 16952, 24543, 4848, 10048, 11093, 32558, -24200, 11325, -30730, 26027, 3331, -13529, -18574, + 17319, 2069, 5316, -20455, -22293, 30896, -17799, 10657, 4589, 21397, -22247, 10204, 21854, 10964, + -24647, 24111, -9962, 4697, 12289, 6361, 5167, 25684, 17761, 21120, 25252, 3039, 143, -2394, + -12327, 5585, -2014, -1130, 12650, -9601, -7948, 28677, -31475, 26831, -19350, -20170, -24347, -30063, + -31076, 15993, -19285, 25456, -10043, 24674, 20794, -27493, -26581, 22567, -2563, 3000, 3403, -5220, + 3910, 23143, -29777, 15669, -28159, -30506, -20667, -24541, -857, 27506, -15496, 19341, -2607, -28634, + -16004, 12459, -14146, 22879, -23457, 18827, 28429, -2401, -8123, 7741, -26158, 30917, -6256, -19148, + -30162, 25701, -8418, -14789, -27474, 4881, 22752, -28893, -23161, 27220, 1464, 1087, -32447, 18304, + -24851, -24843, 16010, -9173, 11163, 1294, -28350, -10058, -31339, 29999, 21376, -7664, 11596, -29636, + -23281, -30884, 14923, 2902, -16477, 24625, -1264, 25514, 14269, 24033, -13880, 17239, 14225, -29342, + 10437, 134}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_int16/output.h b/Tests/UnitTest/TestCases/TestData/avgpooling_int16/output.h new file mode 100644 index 00000000..851981d9 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/avgpooling_int16/output.h @@ -0,0 +1,21 @@ +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. +#pragma once +#include + +const int16_t avgpooling_int16_output[204] = { + 3365, -5538, 4417, 9429, 2566, 8813, 14111, -8823, 12989, -118, -3570, 4290, 2931, 7067, + -8815, 15437, 10173, 7957, -655, 901, 5695, 10913, -13450, -462, -3456, 7021, 7674, -3780, + -15442, 3693, 9002, 12962, 3756, 985, 8547, -4152, -13236, -1411, -6562, 17435, 529, -4993, + 6977, -1683, 703, -1031, -6324, 16957, 5444, -5656, -15680, -4773, -1888, 1791, 4195, 3888, + 13955, 7850, -4117, 7358, -1800, -2226, -3402, -51, -1302, -2788, 4240, 3450, 2610, 511, + 5172, -1719, 8270, -15458, 1836, 221, 6247, 781, -5521, -14795, 8376, 5551, 9344, 8621, + -408, 14690, -10509, -5236, -2420, 1791, 8071, 2116, -8084, 8320, 1271, 2415, -3235, -3651, + 19414, 4183, 2215, -9182, -3549, -9154, 268, 1609, 4835, 13414, -257, -662, 5182, 3270, + -6309, 3272, -9836, -3873, 3238, -14058, -5319, -9700, -11121, 4612, -7622, 10076, -6283, -1324, + -649, 2624, -5064, -10855, -10591, 1279, 12420, -4437, 10673, 4579, 5946, -14115, -5271, 2320, + 1024, -4069, 10884, -7912, 8469, -4932, 5096, -7537, -7164, 19171, -3528, 2378, -537, -6490, + -2714, 1552, -4250, 8932, 17036, -6047, -6398, 1703, 3947, -6887, -1835, -9150, -13085, 4333, + -24037, -8850, -11683, -8236, 3626, -20947, 9007, -4425, -5446, 1082, 8124, -15766, -12201, -10504, + 6554, 10732, -12033, 8093, 12316, 6446, -17160, -10451, 6088, 3761, -15659, 12804, -9743, 14673, + 7480, 1840, -9807, -2677, 23220, -1161, 9671, 4840}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_int16/output_ref_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling_int16/output_ref_data.h deleted file mode 100644 index 42ce5d9b..00000000 --- a/Tests/UnitTest/TestCases/TestData/avgpooling_int16/output_ref_data.h +++ /dev/null @@ -1,20 +0,0 @@ -// Generated by generate_test_data.py using TFL version 2.8.0 as reference. -#pragma once -#include - -const int16_t avgpooling_int16_output_ref[204] = { - 10100, 16904, -17530, 8193, -7522, -4380, 8466, -14213, -12700, 16083, 1756, 6692, -3191, -6397, - 1400, 17756, 4830, 16709, 17753, 3925, -11127, 1443, -9516, 2497, -3647, 2483, -1817, 13023, - -24141, 8662, 425, 3971, 1926, 1503, 4959, -2729, 3069, -5474, -3323, 1325, 1475, 9775, - 8160, -9986, -6227, -3015, 1890, 2099, 2807, -18586, 10065, 10466, 11823, -7577, 7930, -13127, - -9546, -196, -357, -5884, 12965, -6179, -3297, -8492, -4010, 8377, 10763, -17, 13125, 9355, - -293, -1162, 3573, 278, -3447, -5985, -5376, -6827, 11559, -14514, 3690, -2378, 11157, 4185, - -5364, -3555, 6501, 1788, -8764, -5434, -478, 2557, 7013, 1435, -6445, -7532, -2565, 3485, - 6094, 6628, -12965, 12420, 1266, 11613, -1404, 2652, -10008, -7064, -15573, 1100, -3468, 5461, - -237, -8868, -6394, -8282, 5015, 1942, -11396, 8452, 2802, -1516, -7639, 11169, -247, -7942, - 2289, -7422, -1020, 7871, -532, -3098, -9806, 10606, 6815, 1901, -5167, -5653, 6769, 3882, - 2816, -8253, -2297, 3187, -5072, 1753, -13617, -1474, -4786, 1232, 1194, -10716, 11608, -2074, - 6402, 2815, -126, -10690, -11780, -22988, 8405, -2056, 3648, -4782, -13064, -9846, -2559, 3108, - 405, -16874, 66, -5050, -7146, -176, 13296, 8176, -19778, 1266, -7049, -13520, 8425, 8299, - -7627, -17886, 7852, 17233, 4125, -15533, -2019, 10802, 1225, 5764, -14427, -10599, -2071, -6915, - -34, -15778, -635, 6240, 6780, 2149, -9122, 4913}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_int16/test_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling_int16/test_data.h index 33faa135..0e46bdee 100644 --- a/Tests/UnitTest/TestCases/TestData/avgpooling_int16/test_data.h +++ b/Tests/UnitTest/TestCases/TestData/avgpooling_int16/test_data.h @@ -1,4 +1,3 @@ -// Generated by generate_test_data.py using TFL version 2.8.0 as reference. #include "config_data.h" -#include "input_data.h" -#include "output_ref_data.h" +#include "input_tensor.h" +#include "output.h" diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_int16_1/config_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling_int16_1/config_data.h index 47dad8c7..35da8307 100644 --- a/Tests/UnitTest/TestCases/TestData/avgpooling_int16_1/config_data.h +++ b/Tests/UnitTest/TestCases/TestData/avgpooling_int16_1/config_data.h @@ -1,20 +1,20 @@ -// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). -// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. #pragma once -#define AVGPOOLING_INT16_1_OUT_CH 2 -#define AVGPOOLING_INT16_1_IN_CH 2 +#define AVGPOOLING_INT16_1_BATCH_SIZE 3 +#define AVGPOOLING_INT16_1_INPUT_N 3 #define AVGPOOLING_INT16_1_INPUT_W 9 #define AVGPOOLING_INT16_1_INPUT_H 1 -#define AVGPOOLING_INT16_1_DST_SIZE 30 -#define AVGPOOLING_INT16_1_INPUT_SIZE 18 -#define AVGPOOLING_INT16_1_OUT_ACTIVATION_MIN -32768 -#define AVGPOOLING_INT16_1_OUT_ACTIVATION_MAX 32767 -#define AVGPOOLING_INT16_1_INPUT_BATCHES 3 -#define AVGPOOLING_INT16_1_FILTER_X 1 -#define AVGPOOLING_INT16_1_FILTER_Y 1 -#define AVGPOOLING_INT16_1_STRIDE_X 2 -#define AVGPOOLING_INT16_1_STRIDE_Y 1 -#define AVGPOOLING_INT16_1_PAD_X 0 -#define AVGPOOLING_INT16_1_PAD_Y 0 +#define AVGPOOLING_INT16_1_INPUT_C 2 +#define AVGPOOLING_INT16_1_FILTER_W 1 +#define AVGPOOLING_INT16_1_FILTER_H 1 +#define AVGPOOLING_INT16_1_STRIDE_W 2 +#define AVGPOOLING_INT16_1_STRIDE_H 1 +#define AVGPOOLING_INT16_1_PAD VALID +#define AVGPOOLING_INT16_1_ACTIVATION_MAX 32767 +#define AVGPOOLING_INT16_1_ACTIVATION_MIN -32768 +#define AVGPOOLING_INT16_1_OUTPUT_C 2 #define AVGPOOLING_INT16_1_OUTPUT_W 5 #define AVGPOOLING_INT16_1_OUTPUT_H 1 +#define AVGPOOLING_INT16_1_PADDING_H 0 +#define AVGPOOLING_INT16_1_PADDING_W 0 diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_int16_1/input_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling_int16_1/input_data.h deleted file mode 100644 index 1276f3aa..00000000 --- a/Tests/UnitTest/TestCases/TestData/avgpooling_int16_1/input_data.h +++ /dev/null @@ -1,10 +0,0 @@ -// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). -// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. -#pragma once -#include - -const int16_t avgpooling_int16_1_input[54] = { - 27192, -2386, -28826, -3924, 23736, 6052, -30572, -3732, 20092, 12962, 32476, 28707, 5913, 1851, - -18098, 9489, 16431, 8809, -1394, 12620, 22332, -8130, 23113, -15952, -5506, 10398, 14291, 72, - -25480, -23582, 1007, 32440, 32305, 27510, 1392, 4414, 19817, 2381, -10575, -8733, 23500, -28769, - 17265, 7489, 434, -19722, 31607, -25898, -31498, 10483, -30391, -30468, -19374, 18529}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_int16_1/input_tensor.h b/Tests/UnitTest/TestCases/TestData/avgpooling_int16_1/input_tensor.h new file mode 100644 index 00000000..f0e68971 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/avgpooling_int16_1/input_tensor.h @@ -0,0 +1,10 @@ +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. +#pragma once +#include + +const int16_t avgpooling_int16_1_input_tensor[54] = { + 27758, 8744, 30107, -31160, -13574, 13052, 487, -7899, 9681, 22910, 13410, 13874, 15670, -28523, + -23398, -939, 23640, -20728, 10340, 13031, -19137, -9360, 9770, -15073, 27774, -1155, 17142, 11952, + 566, 11883, 8570, -22733, -24714, 30625, -1125, 27645, -223, -24428, 19516, -4038, 16994, -27959, + -222, -1752, -28619, -1621, 9101, 25816, -19294, 31677, -28123, 22137, 17170, 31555}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_int16_1/output.h b/Tests/UnitTest/TestCases/TestData/avgpooling_int16_1/output.h new file mode 100644 index 00000000..39bccf8d --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/avgpooling_int16_1/output.h @@ -0,0 +1,8 @@ +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. +#pragma once +#include + +const int16_t avgpooling_int16_1_output[30] = { + 27758, 8744, -13574, 13052, 9681, 22910, 15670, -28523, 23640, -20728, 10340, 13031, 9770, -15073, 17142, + 11952, 8570, -22733, -1125, 27645, -223, -24428, 16994, -27959, -28619, -1621, -19294, 31677, 17170, 31555}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_int16_1/output_ref_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling_int16_1/output_ref_data.h deleted file mode 100644 index 94316b79..00000000 --- a/Tests/UnitTest/TestCases/TestData/avgpooling_int16_1/output_ref_data.h +++ /dev/null @@ -1,8 +0,0 @@ -// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). -// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. -#pragma once -#include - -const int16_t avgpooling_int16_1_output_ref[30] = { - 27192, -2386, 23736, 6052, 20092, 12962, 5913, 1851, 16431, 8809, -1394, 12620, 23113, -15952, 14291, - 72, 1007, 32440, 1392, 4414, 19817, 2381, 23500, -28769, 434, -19722, -31498, 10483, -19374, 18529}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_int16_1/test_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling_int16_1/test_data.h index b00cc3f6..0e46bdee 100644 --- a/Tests/UnitTest/TestCases/TestData/avgpooling_int16_1/test_data.h +++ b/Tests/UnitTest/TestCases/TestData/avgpooling_int16_1/test_data.h @@ -1,5 +1,3 @@ -// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). -// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. #include "config_data.h" -#include "input_data.h" -#include "output_ref_data.h" +#include "input_tensor.h" +#include "output.h" diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_int16_2/config_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling_int16_2/config_data.h index 72a71c3a..3c3aaa09 100644 --- a/Tests/UnitTest/TestCases/TestData/avgpooling_int16_2/config_data.h +++ b/Tests/UnitTest/TestCases/TestData/avgpooling_int16_2/config_data.h @@ -1,19 +1,20 @@ -// Generated by generate_test_data.py using TFL version 2.8.0 as reference. +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. #pragma once -#define AVGPOOLING_INT16_2_OUT_CH 20 -#define AVGPOOLING_INT16_2_IN_CH 20 +#define AVGPOOLING_INT16_2_BATCH_SIZE 1 +#define AVGPOOLING_INT16_2_INPUT_N 1 #define AVGPOOLING_INT16_2_INPUT_W 9 #define AVGPOOLING_INT16_2_INPUT_H 1 -#define AVGPOOLING_INT16_2_DST_SIZE 100 -#define AVGPOOLING_INT16_2_INPUT_SIZE 180 -#define AVGPOOLING_INT16_2_OUT_ACTIVATION_MIN -32768 -#define AVGPOOLING_INT16_2_OUT_ACTIVATION_MAX 32767 -#define AVGPOOLING_INT16_2_INPUT_BATCHES 1 -#define AVGPOOLING_INT16_2_FILTER_X 1 -#define AVGPOOLING_INT16_2_FILTER_Y 1 -#define AVGPOOLING_INT16_2_STRIDE_X 2 -#define AVGPOOLING_INT16_2_STRIDE_Y 1 -#define AVGPOOLING_INT16_2_PAD_X 0 -#define AVGPOOLING_INT16_2_PAD_Y 0 +#define AVGPOOLING_INT16_2_INPUT_C 20 +#define AVGPOOLING_INT16_2_FILTER_W 1 +#define AVGPOOLING_INT16_2_FILTER_H 1 +#define AVGPOOLING_INT16_2_STRIDE_W 2 +#define AVGPOOLING_INT16_2_STRIDE_H 1 +#define AVGPOOLING_INT16_2_PAD VALID +#define AVGPOOLING_INT16_2_ACTIVATION_MAX 32767 +#define AVGPOOLING_INT16_2_ACTIVATION_MIN -32768 +#define AVGPOOLING_INT16_2_OUTPUT_C 20 #define AVGPOOLING_INT16_2_OUTPUT_W 5 #define AVGPOOLING_INT16_2_OUTPUT_H 1 +#define AVGPOOLING_INT16_2_PADDING_H 0 +#define AVGPOOLING_INT16_2_PADDING_W 0 diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_int16_2/input_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling_int16_2/input_data.h deleted file mode 100644 index b26b8a3d..00000000 --- a/Tests/UnitTest/TestCases/TestData/avgpooling_int16_2/input_data.h +++ /dev/null @@ -1,18 +0,0 @@ -// Generated by generate_test_data.py using TFL version 2.8.0 as reference. -#pragma once -#include - -const int16_t avgpooling_int16_2_input[180] = { - -5957, 30158, -14349, 1559, 11497, 32661, -19774, 11097, -24625, 29471, -24156, -2519, 3854, 18626, - 714, 16581, 9796, -8099, 26191, -29854, 15160, 31489, 4578, -6255, -13595, -26941, 23689, 602, - 53, 14857, -9614, -23497, -15155, 31489, -10760, -15043, 4125, 7338, 12968, 32589, 8742, -9293, - 21028, -25726, 20075, -26840, -631, 20009, -21712, 28135, 14571, 8480, 24731, -31656, -20108, 17082, - 6610, 9407, 27986, -32720, -27431, -32100, -27834, 26916, 1296, -11250, 24641, -18682, 6229, 30809, - 5642, -20157, -125, 17157, -12905, -15856, -31970, 2856, 28020, 11787, -26347, -9654, 28438, -29823, - -4436, -23746, 27152, -27994, -15557, 30303, -29758, -15752, -30747, 3450, -8094, 2661, 19044, 21702, - -1097, 19899, -28586, -13386, 19198, 30768, -21782, -22752, 18072, 26945, 26394, -32225, 14463, 11589, - -27967, -22719, 14473, -20168, 28002, 26454, 17644, -18962, 12794, 30265, -8826, 8470, 13418, -12154, - 17795, 24640, -18437, -12353, -1209, 6628, 18422, -4642, 28791, 17153, 5795, -16147, 30593, 22501, - -22651, -16693, -13160, -19450, 17371, 14272, 28364, -6441, 13926, 20108, 15258, -28133, 3137, 19019, - -3510, -11679, -32379, 7126, -10469, 24132, -14429, 27137, -3711, -13117, 21690, -24365, 25503, -23019, - 17958, -19406, 30844, -13490, 6323, 12822, 21955, 26273, 28396, 6695, -5006, 1364}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_int16_2/input_tensor.h b/Tests/UnitTest/TestCases/TestData/avgpooling_int16_2/input_tensor.h new file mode 100644 index 00000000..bb511363 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/avgpooling_int16_2/input_tensor.h @@ -0,0 +1,19 @@ +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. +#pragma once +#include + +const int16_t avgpooling_int16_2_input_tensor[180] = { + -17986, 9880, -6757, -4767, 21587, -19336, -5048, 13633, -3705, -18560, 29702, -6862, -19212, -16052, + -14865, -4229, 8298, -22193, -7210, 5628, 10276, -27734, 11431, -4385, 16992, 28200, 21914, -24760, + -307, 25237, -5094, 15520, -18073, -29288, 19885, 15041, -31621, 10396, -23723, 17748, -1200, -19188, + -12521, 20351, 27616, 13724, -11409, -5473, -28716, 27366, -28387, -16035, -15083, -10263, -13855, -12813, + -31977, 9275, -15359, -2155, -9953, 21718, 5467, -15407, 11337, 16637, -25566, -6963, -18161, -22182, + 11736, 31126, 17540, -17434, 23207, 29416, 18500, 3580, -15078, 24477, -9177, -25839, -19906, 2463, + 9372, 4946, 12295, -4949, 7064, -30136, -30645, 11844, -413, -21152, 7406, -20422, -23081, -31233, + -15270, -11560, -23265, 11754, -13513, -15765, -18119, 16762, -7441, -26367, 3049, -16595, -28676, 9258, + 10416, 21277, -2678, -11210, -12712, 15370, -13775, -1312, 65, 23632, -7533, -26835, -10564, -32017, + 5865, -4061, 13577, 22468, 30424, 24473, 7747, -31211, -19918, 238, 28366, 24732, -5618, 9105, + 29491, 24243, 9679, -14565, -2923, -3107, -27701, 9638, -29485, -7453, 908, 32394, -24414, 31090, + 22246, 899, -27487, 25572, 30977, -16908, -13461, -24916, 27170, -27811, 25735, 20605, -28967, 5170, + -21704, 13186, -2561, -28669, 25487, -27868, -14376, 8219, -29994, 29650, 12758, -9021}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_int16_2/output.h b/Tests/UnitTest/TestCases/TestData/avgpooling_int16_2/output.h new file mode 100644 index 00000000..1d0c67d1 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/avgpooling_int16_2/output.h @@ -0,0 +1,14 @@ +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. +#pragma once +#include + +const int16_t avgpooling_int16_2_output[100] = { + -17986, 9880, -6757, -4767, 21587, -19336, -5048, 13633, -3705, -18560, 29702, -6862, -19212, + -16052, -14865, -4229, 8298, -22193, -7210, 5628, -1200, -19188, -12521, 20351, 27616, 13724, + -11409, -5473, -28716, 27366, -28387, -16035, -15083, -10263, -13855, -12813, -31977, 9275, -15359, + -2155, -9177, -25839, -19906, 2463, 9372, 4946, 12295, -4949, 7064, -30136, -30645, 11844, + -413, -21152, 7406, -20422, -23081, -31233, -15270, -11560, 65, 23632, -7533, -26835, -10564, + -32017, 5865, -4061, 13577, 22468, 30424, 24473, 7747, -31211, -19918, 238, 28366, 24732, + -5618, 9105, -13461, -24916, 27170, -27811, 25735, 20605, -28967, 5170, -21704, 13186, -2561, + -28669, 25487, -27868, -14376, 8219, -29994, 29650, 12758, -9021}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_int16_2/output_ref_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling_int16_2/output_ref_data.h deleted file mode 100644 index cf4d6992..00000000 --- a/Tests/UnitTest/TestCases/TestData/avgpooling_int16_2/output_ref_data.h +++ /dev/null @@ -1,13 +0,0 @@ -// Generated by generate_test_data.py using TFL version 2.8.0 as reference. -#pragma once -#include - -const int16_t avgpooling_int16_2_output_ref[100] = { - -5957, 30158, -14349, 1559, 11497, 32661, -19774, 11097, -24625, 29471, -24156, -2519, 3854, - 18626, 714, 16581, 9796, -8099, 26191, -29854, 8742, -9293, 21028, -25726, 20075, -26840, - -631, 20009, -21712, 28135, 14571, 8480, 24731, -31656, -20108, 17082, 6610, 9407, 27986, - -32720, -26347, -9654, 28438, -29823, -4436, -23746, 27152, -27994, -15557, 30303, -29758, -15752, - -30747, 3450, -8094, 2661, 19044, 21702, -1097, 19899, 12794, 30265, -8826, 8470, 13418, - -12154, 17795, 24640, -18437, -12353, -1209, 6628, 18422, -4642, 28791, 17153, 5795, -16147, - 30593, 22501, -14429, 27137, -3711, -13117, 21690, -24365, 25503, -23019, 17958, -19406, 30844, - -13490, 6323, 12822, 21955, 26273, 28396, 6695, -5006, 1364}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_int16_2/test_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling_int16_2/test_data.h index 33faa135..0e46bdee 100644 --- a/Tests/UnitTest/TestCases/TestData/avgpooling_int16_2/test_data.h +++ b/Tests/UnitTest/TestCases/TestData/avgpooling_int16_2/test_data.h @@ -1,4 +1,3 @@ -// Generated by generate_test_data.py using TFL version 2.8.0 as reference. #include "config_data.h" -#include "input_data.h" -#include "output_ref_data.h" +#include "input_tensor.h" +#include "output.h" diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_int16_3/config_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling_int16_3/config_data.h index ebd065a3..814c1418 100644 --- a/Tests/UnitTest/TestCases/TestData/avgpooling_int16_3/config_data.h +++ b/Tests/UnitTest/TestCases/TestData/avgpooling_int16_3/config_data.h @@ -1,19 +1,20 @@ -// Generated by generate_test_data.py using TFL version 2.8.0 as reference. +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. #pragma once -#define AVGPOOLING_INT16_3_OUT_CH 21 -#define AVGPOOLING_INT16_3_IN_CH 21 -#define AVGPOOLING_INT16_3_INPUT_W 1 -#define AVGPOOLING_INT16_3_INPUT_H 20 -#define AVGPOOLING_INT16_3_DST_SIZE 147 -#define AVGPOOLING_INT16_3_INPUT_SIZE 420 -#define AVGPOOLING_INT16_3_OUT_ACTIVATION_MIN -32768 -#define AVGPOOLING_INT16_3_OUT_ACTIVATION_MAX 32767 -#define AVGPOOLING_INT16_3_INPUT_BATCHES 1 -#define AVGPOOLING_INT16_3_FILTER_X 1 -#define AVGPOOLING_INT16_3_FILTER_Y 3 -#define AVGPOOLING_INT16_3_STRIDE_X 1 -#define AVGPOOLING_INT16_3_STRIDE_Y 3 -#define AVGPOOLING_INT16_3_PAD_X 0 -#define AVGPOOLING_INT16_3_PAD_Y 0 -#define AVGPOOLING_INT16_3_OUTPUT_W 1 -#define AVGPOOLING_INT16_3_OUTPUT_H 7 +#define AVGPOOLING_INT16_3_BATCH_SIZE 2 +#define AVGPOOLING_INT16_3_INPUT_N 2 +#define AVGPOOLING_INT16_3_INPUT_W 22 +#define AVGPOOLING_INT16_3_INPUT_H 12 +#define AVGPOOLING_INT16_3_INPUT_C 8 +#define AVGPOOLING_INT16_3_FILTER_W 6 +#define AVGPOOLING_INT16_3_FILTER_H 5 +#define AVGPOOLING_INT16_3_STRIDE_W 9 +#define AVGPOOLING_INT16_3_STRIDE_H 5 +#define AVGPOOLING_INT16_3_PAD SAME +#define AVGPOOLING_INT16_3_ACTIVATION_MAX 32767 +#define AVGPOOLING_INT16_3_ACTIVATION_MIN -32768 +#define AVGPOOLING_INT16_3_OUTPUT_C 8 +#define AVGPOOLING_INT16_3_OUTPUT_W 3 +#define AVGPOOLING_INT16_3_OUTPUT_H 3 +#define AVGPOOLING_INT16_3_PADDING_H 1 +#define AVGPOOLING_INT16_3_PADDING_W 1 diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_int16_3/input_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling_int16_3/input_data.h deleted file mode 100644 index 02d3881c..00000000 --- a/Tests/UnitTest/TestCases/TestData/avgpooling_int16_3/input_data.h +++ /dev/null @@ -1,35 +0,0 @@ -// Generated by generate_test_data.py using TFL version 2.8.0 as reference. -#pragma once -#include - -const int16_t avgpooling_int16_3_input[420] = { - -8167, 9953, -30970, -20127, -32204, -3877, 28989, -22233, 23826, -24683, 20218, 16766, 30855, -5392, - 20987, -23457, -19048, -15302, -10875, 23428, -23886, -31971, -9161, -29906, 21922, 15572, -9668, 10966, - -21345, 6308, 5178, -23216, 4630, 1372, -2786, 9473, -19288, 16080, 27229, 13239, -4180, -25824, - -31997, 13716, -16234, -27853, -14440, -32241, 8309, 5021, -29199, 20005, 1482, 1680, -24401, 4431, - 32315, -1168, -17745, 10447, -285, -23602, -29472, -24334, -10942, -21704, -25968, -7404, 1716, 9899, - -7548, -3948, 29019, -11224, 20531, 25412, 12992, 7959, 30012, 22852, 5678, 23457, 9503, 12656, - 22058, 8552, -15227, 25255, 17103, 30747, 4791, -4640, 11916, -11863, -27039, 15682, 11894, 16532, - -8385, -20305, 13086, -13150, -10770, -22948, -25894, 13915, -18128, 6572, 18637, -8827, 28893, 19443, - 6455, -949, -7395, 635, -907, -13538, -30751, -15257, 24735, 19042, -12185, -8829, 22094, -6273, - 777, -14711, -28795, 24244, -1146, 7476, 9804, 10270, -5651, -11305, -23496, -19365, 658, 10288, - -26135, -26423, 22988, -11797, -31161, -6543, 6396, 5973, 22218, 19886, -32510, 28559, 5105, -206, - 19026, 13426, 114, 3482, 21911, 30183, -32592, 7892, -16235, -16720, -3259, 24156, 28369, 3683, - 30123, 17569, 31589, 1169, -1498, -31538, 16643, -2047, 11981, 23635, 4581, 20179, -21518, 11478, - 31914, -27382, -31744, -3637, -2032, -18282, -15921, -32310, 9440, 13132, -29253, 8494, 2705, 29867, - 28631, 29548, -23482, -24112, 10050, -25821, -32194, 29558, 935, 21994, 12690, 31984, 29976, 28593, - -11231, 16759, -1536, -2220, -23410, -4181, -14118, -241, -27889, -10172, -24593, -30006, -496, -18424, - 32076, -31179, -24174, -21241, 18357, 14599, 1503, -5360, -19401, 25057, -20925, 32313, -30412, 24624, - -26796, -8105, 12250, -26985, -2778, -14946, 20070, -31437, 15546, 7509, -30389, -31974, 8736, -26474, - 729, 17146, -25491, 7723, -23758, -16988, 17932, -7644, 30694, -21530, 28883, 1761, 11893, 7781, - 20275, -26992, -2079, -24968, 25503, 16658, 26584, 3241, 999, 28406, -24941, -7314, -15485, -14381, - -27777, 18258, 29126, -21238, 11605, 9045, 10380, 7691, 6596, -21652, -8262, 13600, -18559, -18334, - 28149, 8748, 26829, -17512, -20612, -15127, 25368, 11505, -3282, -32284, 8644, 19363, -2824, -28405, - 14068, -18088, -29360, 17135, -23197, -27602, -31322, 11909, 16061, 7491, -22457, -23392, -20176, 28954, - -25104, 18125, -30537, 4230, -21936, -31834, 20044, -19979, 10540, -1627, 24858, 21106, -23459, 21530, - 2969, -10624, 13139, 16242, -29853, 5145, 13559, 11323, 26925, -29124, -4283, -12468, 13591, 31104, - -11121, -31160, -1231, -27306, -27184, 12603, -16076, -16166, 2866, -23375, 12074, -4593, 29368, 24940, - 18310, -5374, -855, 25115, 2130, -32742, -29370, -24188, 32219, -21616, -8501, -12134, -28482, -32512, - 17168, -7938, -28523, 25767, -11587, 22549, -14045, 18045, -18911, -24919, 27439, -9156, -170, -5959, - 30249, 26915, 5008, -30965, -6963, 14565, 3633, -5592, 2932, -17666, 31786, 21971, 20050, -1879, - 21876, 19463, -30706, 5078, 26977, 25317, -3111, 17098, -4318, -18887, 25004, -177, 6273, -2946}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_int16_3/input_tensor.h b/Tests/UnitTest/TestCases/TestData/avgpooling_int16_3/input_tensor.h new file mode 100644 index 00000000..ef51809c --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/avgpooling_int16_3/input_tensor.h @@ -0,0 +1,308 @@ +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. +#pragma once +#include + +const int16_t avgpooling_int16_3_input_tensor[4224] = { + -2083, 27910, -10287, -16772, 26987, 29941, -18426, 8252, -9938, -15477, 25545, -18570, 26554, -13646, + 16495, -16284, 21104, 3975, 27496, 19280, -18492, -20416, -14193, 27980, -18627, 24023, 25364, -11228, + -6761, 10973, -16475, -25385, -4704, -2164, -17660, 9597, -27954, 16178, 30756, 30687, -18, 18946, + 11811, 26960, 13949, 29647, 18087, -23957, 19619, 13708, 31554, 17171, -19412, 3349, 12435, -32586, + -25784, -22239, -10557, -26133, -3447, -7133, 29245, 3578, 12100, 23907, 12865, 21669, 11658, -32689, + -9607, -29803, -29528, -8088, -24894, -32599, -7200, 18726, 488, 31196, -26630, -3518, 16457, -27222, + 15010, -19154, -31407, 23545, 19036, 5427, -17417, 448, 3275, 13962, 24402, -257, -15563, 18509, + -4158, 27153, -23705, -15708, 2668, 10098, -31368, 30342, 7674, 6383, -30706, -4107, -22249, 10733, + 26088, 11848, -7832, -24227, 21739, 15932, -24169, 28565, 21038, 16061, -28709, -25487, -8384, -626, + 12262, -32104, -11666, 12357, 15244, 6474, -3605, 15935, -31080, -11654, 27328, -20491, -32115, 16140, + -24288, 29215, -14881, 20965, 10550, 18623, -21310, 32450, -30868, 8316, 13858, -10313, 16248, -18815, + -31140, 180, 10896, 20555, -29498, 21748, -25897, -14380, -17258, 32333, -19696, -8069, 22210, 14378, + 30047, 23524, 18276, -1025, -7814, -27859, -29836, -17445, -18063, 19052, 17303, -6698, -30180, -1079, + -11666, 27327, 1996, -939, -20072, -24570, -7371, 19114, -28596, 28980, -26949, -4183, -12116, -16917, + 7753, 14124, 3252, 10140, 16097, 31205, 21038, 22982, -23013, 21851, 27377, 27204, -12798, 4545, + -30553, 2695, 17818, -5898, 2512, -21744, -12098, -12071, 152, 1141, 23179, 12411, -19811, 268, + 2948, -12025, -29197, -29181, -27120, 7672, -17518, 2973, 30776, 21019, 12532, 20485, 13165, 28529, + -30247, 22383, -16574, -23108, -17777, 459, -14518, 24515, 4413, -7615, 2160, 28835, 6555, 17024, + 16505, -9554, 30493, 22416, 28526, 31202, 4297, -7892, 16593, 10757, 17133, 8135, -5549, 10100, + -6638, -24120, -4559, 26585, 25139, -4383, 22067, 2736, -6837, -25096, 17166, 5751, 20659, -13472, + -12999, -9188, -17712, 10748, -13201, -25053, 13262, -13321, -6626, 2894, -25979, 12415, 29587, 18986, + 10639, 146, 16900, -8733, -14598, 7836, 27017, 9513, -5969, -23513, 5136, -26763, -25771, 11571, + -6900, -18547, 12419, -19089, -12183, 14498, -893, 17517, 5680, -21291, -14447, 10622, 29198, 9690, + -25088, -13895, -8719, 30751, 2127, -14123, 4842, 26131, 25930, 20259, 22178, 14165, -19262, 8537, + -18995, -6325, 10466, 15995, -16450, 2503, -692, 24752, -1924, 4013, 16598, 25561, 20254, 25522, + 30135, -32261, -15801, 21855, 18824, -8439, -4417, 26991, 27285, 4140, -22119, -30844, 21054, -10089, + 28370, -8277, 31322, 24336, 1486, -8472, -10701, -6533, 21091, -8272, -23971, -8172, 15629, 26864, + 602, -3117, -24483, 802, 13452, -8322, 27143, -30817, -24404, -14643, -18461, 2669, -1617, 30636, + 30347, 28771, 12188, -25260, 12777, -15277, -15248, 4021, 22095, -31350, 26595, -25269, 13723, -21949, + 15749, 11404, -29926, 13801, 9182, -29663, -5191, 30368, -9475, 19842, -29366, 13817, 2102, -3198, + 2456, -2099, -7347, 2002, 5299, 20824, -19732, 19529, 3873, -14138, -17129, -17879, 6630, 13376, + 21912, 16675, 24787, -26596, 17768, -14812, 11935, 14596, 32588, -543, 21050, -601, 27593, -4114, + 30220, 24013, 16128, 1276, 20975, 25059, -20387, -14724, -8723, 30702, 27744, 27549, -30544, -25375, + 7840, -15768, 3096, -5882, 8641, -24680, -2144, -30976, -4465, 17657, 23794, -28215, 31988, 22920, + -2191, -751, -12227, -12423, -12276, -27076, 29716, -19108, -116, -1672, -8556, -23212, -2933, -19303, + -30179, -20144, -1828, 6522, -24966, 19727, -3393, 19757, -32156, 12505, -1716, 22221, 4377, 10640, + -5111, -27578, -6700, -1792, 230, -15891, -11530, -29657, -22667, -13778, -3117, 8890, 32392, -26149, + 14427, -13942, -18823, -16998, 16414, 32319, 6645, -3506, -32296, 13230, -29271, 1865, 27279, 6700, + -25142, 20612, 12229, -27882, -17469, -2921, 21214, 79, -27138, 27093, -4668, 22414, -21049, 20480, + -1126, 3190, -11931, -21276, 717, -13914, 7480, -1301, -32767, -24619, 7812, -17072, 3144, -8093, + -5016, -31495, 25989, -13588, -13917, -19579, -3599, -8113, -15120, 31307, -9355, 14386, 23609, 23999, + -20778, 6312, 12939, -14303, 26957, 18817, -30132, -30947, -28939, -28904, -23158, 3370, 8701, -30468, + -30734, -20726, 8738, -31883, 30623, -23502, -1649, -12116, 1853, 32466, -7636, -5443, 32004, -25212, + 1597, -27407, 16670, -19088, -24050, -18116, -29758, 15282, 11673, -11996, -27349, 23629, 28688, -20434, + -11589, 16694, 21024, 89, 11392, -16415, 7074, 23950, -10263, 90, 30224, 1107, -19173, 8444, + 21930, -15599, -8725, 17481, 32245, -16729, -12336, -16856, 20234, -30480, 18770, -657, -10699, 4160, + -10674, 1005, 26544, -20202, 15423, -24761, 21359, 27316, 3285, 27464, 1526, -23712, -31575, 32100, + -29953, 18162, 15341, -3345, 23553, -10840, -28286, -3020, -7845, 22494, -12289, -7023, 24507, 6731, + -7294, 13311, -16353, -22953, -8371, 22994, -12006, 10898, 12012, 6848, 30725, -16545, -12983, -12455, + 19061, 5874, -473, 22444, 19568, 28975, 21055, -3921, -24843, -4131, 8699, -13407, 17490, -29533, + 27164, 24678, 23955, -14445, 12500, -16848, 1410, 23072, 11741, 18329, 12256, -913, -28904, -30485, + 4852, -9071, 3984, -172, 17309, 20092, -12, 25532, 27122, 6933, 3786, -13128, -4941, -30257, + 30429, 4409, 21485, -1660, -13376, 13510, 32662, 9713, -32590, -29611, 14643, -10958, 28592, -9262, + -6096, 3238, 6185, 321, -21600, -11335, 31177, -13360, 32474, -22813, -11651, -6483, 17893, 19227, + 25021, -25129, -32138, 6051, 6105, -20538, 1958, 4028, 9000, -1595, 22381, -22149, 16748, 31258, + -31724, -22762, 24135, 12381, -32604, 18657, 32178, 27890, 991, 23719, 6181, -4489, -31168, -5912, + 19833, -3901, 3470, 22339, 19449, 28407, 5903, -7806, 28465, 13174, -3500, -4059, 21085, -13794, + -16792, 17464, -18773, 26440, -4546, -9421, 621, -21469, -31038, 31466, 19702, -29591, -4242, -10352, + -23809, 29175, 5566, 7630, 30863, -18125, -12614, 9286, 9474, -9376, -27298, 15611, 26853, -22996, + 16182, 18639, 11968, -31996, -13868, -11275, -31760, -3581, 27414, 2991, -24099, -32766, 6133, 14394, + -31815, -28271, 28278, 222, 3889, 22695, -23656, 15974, 15215, 26523, -12603, -13063, -5170, -25801, + 12689, 4332, -20398, -12077, -1597, 6426, -788, 27381, 10826, 20956, -23168, 15219, 22473, -7197, + -23233, 5849, -25726, -25157, 4817, 24129, 6881, 31225, -16193, 22744, 3250, 22003, -18687, 14763, + -10703, 28993, 18864, 17313, -3429, -28372, -818, -3739, 15116, 18386, 6555, -11710, -25028, 20802, + -12337, 752, -19369, -17830, 11481, -14065, 15656, 31073, -6617, -9097, -24260, -3370, 18856, -24188, + -10769, 25160, -10386, -31039, 26442, -75, 807, 9846, -1173, -4702, -2577, -29792, -17577, -5199, + 22584, 18051, -11186, 25220, 6386, 20349, 11537, 17679, -25520, -3565, -12323, -13290, -14835, -26540, + 17149, -3550, 9830, 15040, 17813, 17557, -14076, 11397, -2874, -26470, 2330, -16647, -14113, -13882, + 9211, 13295, 14728, -5880, 12332, -11624, 4136, -22252, -19149, -22629, -18213, 26737, -14491, 12720, + -7094, -7078, -13286, -781, 2873, 16008, -16442, 28563, 17279, -20066, 30471, -24082, 28285, 31894, + -4467, -6926, -31000, -29704, 26499, 30890, 26251, -31870, -29468, -8606, -23930, -22836, 30486, -3997, + 31071, 4905, -15966, -5387, -28869, 4844, -29576, 10932, 17311, 6147, 15377, 17773, 10304, 8029, + 24778, -1365, -18203, -16403, -12091, -259, 6501, 3599, 21139, -12475, 20777, -8078, -5493, 30726, + 24808, -11627, 20441, -31575, 2376, 31597, -4792, -26615, -13626, 20548, 9451, -26819, -5948, -23879, + -30937, 18504, -1304, -28424, -10899, 17853, 12352, -3060, 4281, -27137, -30565, -27263, 24830, 22704, + 32627, -31401, -7196, -16914, -30070, 3525, -16236, 7099, -7342, 15101, 16682, 2425, 14819, 4751, + -22956, 6114, 6378, -12007, -29801, -2476, 224, 1885, 14201, 27897, -2792, 22690, 30663, -4170, + 31870, 14402, 18131, -25219, -20165, -32714, 25045, -925, 832, -10955, -27683, 18121, 9972, 14104, + 29032, 15497, -28154, -2308, 15974, 13768, -23068, 21344, -12656, 2415, 31313, -27877, -1870, 2093, + 16578, -5118, -32703, -28549, 22398, 2821, -11143, -31824, -16886, 13089, -21640, 859, -29824, -7847, + -2453, 7666, 25274, -10183, 21244, -8103, 18886, -30795, -28791, -32288, 20113, -17831, -5154, 11657, + 8013, 18218, 2101, 21365, -15170, -8433, -17217, -20425, -20933, -10511, 29453, 8220, -11893, 15548, + 6799, -3418, -12257, -19635, -29898, 30616, -8930, -22444, -22048, 10479, 7156, -2979, -4996, -29408, + 26306, -5062, -12989, -6451, -15769, 14919, -21690, -6293, -7110, 19372, -23867, 16612, 11614, 30274, + 26987, -2457, -21722, -1874, 15893, 25140, -14144, -26037, 17016, 31117, -10993, -3915, 9319, 24980, + -1981, 11377, -18910, -12123, -25869, 1642, -29447, 17403, 27299, -10550, -14560, 28596, -28171, 11037, + -20934, -27423, -32731, 29888, 3242, -30759, -5702, -4142, -22218, 21657, -7994, 20152, -14490, 17104, + -12277, -20393, -5503, 22677, 30294, 15110, 28085, -20149, 585, -26540, 9078, 6003, -25423, -5930, + -8663, -3222, 676, -12208, 21207, 31928, -17417, -12116, -26510, -31842, 12318, -2366, 18589, 6744, + 11501, 3269, -26291, -25183, -1177, -9398, -13371, 925, 32747, -6700, 12820, 23570, -19464, -20027, + 9831, 24789, -7396, 3327, 8680, -13715, 19784, -18349, 5075, -20896, 18226, -3646, -18225, 1134, + 3114, -1081, 31243, -1061, -19260, 18251, -1569, -13177, 30007, 29446, 31685, 30785, -27799, -12908, + 13087, -1831, -3739, -1116, -17588, -9467, 10754, -29567, -4397, 2099, -6291, 3951, -28509, -1874, + -23285, -9408, -32146, 794, 25249, -9432, 4871, 22399, 6980, -32566, -12597, 24834, 17063, 30831, + 23022, -21983, -8588, -28918, 13616, -18124, 26950, -27442, 6947, -12551, -226, 23915, -17474, 22772, + 12377, 958, 28229, -2666, 101, -22379, 51, -18227, -23400, 16196, 10387, -19274, 10937, -24326, + -15954, -19001, 28097, -25988, -291, -3125, 32375, -23226, 5426, 28450, -16223, 16497, 9008, 25150, + 19911, -20527, 14850, 31289, 10462, 13175, -20506, 25731, 32302, 22783, 4208, 30547, -24321, 26953, + 26069, 15206, -12691, -16888, 32742, 25808, -29800, 23115, -11972, 29100, 5760, 3896, 13285, 31253, + 5752, 26882, -21111, 23156, -15693, -9883, 15747, -7000, 32407, -15372, 22013, 19784, 7078, -29493, + 30263, 32209, -18839, 3208, -25527, 31587, -14391, -11263, 14873, 31635, -2759, -3855, 2652, -32221, + -24541, -13972, 26571, -24998, 9465, -7477, 18022, 8036, -18905, -31300, -7380, 6512, 29422, -20612, + -19884, 22383, -22196, -12331, -11228, 28107, 7570, 20028, -5826, 7897, -24305, -20890, 7319, 2105, + 19588, 12563, 9787, 25897, 5410, -16158, -8163, -30688, -12632, -21916, -24478, -5775, -24445, -11981, + 3999, -26553, -10268, 6589, 31358, 13865, 27269, 31369, -6816, -24289, 17125, 5419, -28131, -31870, + 26608, -32432, -27753, 16663, 2538, 165, 15916, 25359, 23182, -27281, 8797, 7314, -25274, -10427, + 2438, 5766, -14421, -5009, 22996, 16136, 32547, 8884, -29214, 19981, 17796, -15216, 32105, -1332, + -6757, -26725, -25794, -1280, 3754, 17476, -31142, 17064, -9274, 27199, 1493, 19953, 3326, 5035, + -1513, 21407, 23067, -21521, -23931, 30559, 1587, 31107, -27215, -6943, -23294, 26701, 6116, -23343, + -26055, 14203, 14004, 24448, -9903, 10083, 14113, -6164, -20256, -12427, -16574, 27322, -4498, -16298, + -26033, 317, -30281, 12610, -981, 22758, -31805, -7654, 9583, -5052, -26066, 7501, -14687, -11818, + -24170, 8120, -12790, -24925, -17817, 8402, 7649, -5259, -31106, 20029, 13671, -18037, 19978, 24717, + 20552, -3047, 29623, -24028, 9525, 30818, -15125, 24924, 7045, 599, 20395, 17536, -19215, -20402, + 12024, 20246, 6389, 22558, 8827, 23683, -17873, 6098, 12802, -15170, 3276, 16134, 14859, -30565, + 26704, 3022, 8459, 11381, -3057, 8836, 10549, 996, -8429, 22544, -19466, 32054, 15493, -12626, + -25261, -15763, 12840, -21871, 22928, 4327, -19146, -3541, -22193, -25592, 25369, 2141, -8281, -16468, + 13703, -1754, -26198, -5865, 19737, 5522, 9809, 19518, 2706, 20695, 25236, 14558, 13430, 19342, + 27205, 6266, 1417, -9773, 19004, -10985, 15244, 642, 24508, -30089, -2116, -8868, 4618, -26174, + 5608, 20438, -27797, -30869, -24732, -8680, 19978, -24179, 17649, 28191, -4489, 6166, -17920, 14925, + -2449, -7551, 9510, 8983, -1144, 31576, 23509, 3201, 24744, -27486, -7243, 26548, -22165, 22148, + 4834, -22234, -4264, 10673, 13630, -17904, 5066, -30328, -25504, 30786, 23524, 12878, 12145, -614, + 4416, -16514, -19626, -24864, 16035, 29763, -31419, -19187, -18168, -10413, 5446, 13965, 30410, 4399, + 20949, 16140, -26709, 21691, 12646, -8402, -2685, -5792, -3284, 30486, -25543, 25183, 22177, -15786, + 19424, 4863, 31300, 22178, 21872, -26859, -25626, 11655, -9504, 6283, -22087, -26980, -11706, -6411, + 6050, 23180, 4923, -4403, 950, -6808, -13664, -15203, -28240, 22928, -20099, -408, -31213, 28057, + 14325, -25826, 20422, 32061, -1091, -18337, -24586, 11644, 23190, -24899, 169, -5742, 17160, 17162, + -19591, 2812, -6078, -20375, -1833, -21706, -19814, 25050, 5066, 28675, -24973, 7961, -31710, -2113, + -13068, -5653, 28633, -1401, -25851, -21829, 32717, 23637, 13344, 32610, -21365, -6512, 28755, 317, + -2391, -2741, 24175, 11113, 21755, -4296, -5778, 20865, 30347, 19071, 15026, 4021, -32214, -23275, + -12974, 5494, -15689, -31040, -3335, 15439, -10316, 26402, -32215, 18523, 10756, 29924, -7078, -25823, + 21137, -30668, -15072, -14810, -3410, 18748, 4188, -19792, 10908, -27291, -13531, 18547, 9696, 31983, + -26262, 5225, 21999, -20625, -16716, -25798, 32507, -15864, 3311, -26972, -31871, -23194, 26496, 18999, + -3719, -25307, -2470, -13022, -26979, 2824, -11457, -20061, 11281, 9209, 16975, -8930, 1697, 3059, + 25231, -125, 25556, 21607, 31073, -22337, -8610, 5563, -7371, -13696, 10713, -18916, -10591, -7918, + 26769, 21848, -14710, -23942, -9071, -19402, 569, 6456, -538, -11455, -25124, -29709, 31919, 2832, + -7082, -5987, 4775, -27377, 27147, 23585, 3564, 25458, -14863, -2917, 20123, 4496, 2495, 32755, + -17590, 5636, 7118, -14415, 13705, -32061, -30897, 3935, -27862, 9014, -19825, -4372, 29695, 11991, + 15257, -3782, 21164, 11367, -32633, 10411, 9713, -15543, -15936, -609, -29037, -20581, 10534, -20279, + -10771, -22125, 7510, -1053, 20718, 20639, -6737, -19224, -2205, 18027, 4981, -2124, 27151, -13178, + 11024, 11731, 8625, 28935, 24479, 7092, 2740, -11256, 20949, 13760, 15956, 21118, -14244, 7612, + -6174, -29277, -25992, -13080, 8518, -10149, 19450, 17944, -29202, -1308, -13369, 661, -18829, 14807, + -7829, 5244, -17270, 7525, 22507, 12278, -2958, -29038, -9431, -5758, 31330, 12206, 14567, -13693, + 28110, -6598, 21675, -4564, -8311, 22215, -11625, 31880, -23401, -28984, 28180, 22162, 9336, -6358, + -27350, 21796, -4975, -24187, -81, 16754, 15742, -81, -7661, 12148, -18345, -32381, 22751, -14652, + -18938, -496, -881, 31899, 28966, -23219, -15280, -18313, 21787, -29832, 28508, 26810, 26899, -11035, + -20435, 10317, 17667, 15239, 14750, -24554, -8362, 21088, 10670, -5292, 14514, 29013, 29269, 24639, + -15700, -130, -18825, 17673, 30501, 28075, -12263, 23594, -19928, -10458, 4187, -27697, 25907, -6763, + -3259, 28763, 8426, 10218, 2435, 1875, -5718, 1443, -4891, -9690, -4512, -20232, 3926, -11426, + -18351, -20113, -20788, -23924, -31952, -28976, -25524, 3620, 23527, 10511, 28307, 4916, 2069, 14006, + 31406, 32580, 19496, -13349, 17849, -7952, 21035, 25445, 12250, 8751, -11247, -29555, 13781, 11653, + -2108, 19965, -23627, -31496, 2923, 12388, 15272, 26748, 14759, -21920, 24924, 9977, 3083, 25119, + 18755, -2404, 1389, 19116, -27381, -14415, 6438, -19190, 23682, -18887, 11864, -23330, -16669, 20094, + -1437, -12552, 15150, 16532, 29733, -6046, 31598, 28614, -20186, -6173, -12764, -31469, 6832, -4708, + 14390, 29395, -3659, -1433, -6747, -15388, -29422, 26214, -23709, 18279, 19663, 21351, 9146, -16736, + 10573, -31314, -31061, -13879, 11461, -30678, 12168, 6551, 28935, -11670, -26736, -16133, -30597, 30799, + 20646, -20300, 1152, -9929, 1609, -6736, -17954, -22439, 30840, 17800, -18314, 21768, -29275, 29862, + 6522, -30474, -21958, -15754, -28113, 5003, 12837, 29666, -30389, 22299, -19418, 14778, 23122, 5211, + -2962, 22920, 27280, 20240, 6407, 10150, 3568, 15908, 27546, 27560, -17514, 32279, 17146, 2515, + -8987, 8587, 30511, 11204, -31444, -5239, -17050, 8836, -6870, 14705, -10185, 25220, 4086, 4793, + -12046, -17366, -2711, -26665, 8689, -4722, 20781, -4217, -7305, -18537, -16094, 14790, -21063, 16796, + -31010, 26812, 17157, -4756, 11644, -29147, -7496, -5432, -27492, -24335, 1012, -18469, -14560, 26627, + -24479, 11825, -30512, 825, -1967, 13527, -20828, 8664, -26659, 32156, -29097, -26202, 5349, -2339, + -26071, 16332, 23587, 1616, -12966, 11973, 5410, 27441, -21854, 16069, -6506, -22474, -18518, 11167, + 15044, 21052, 14778, -32545, 3841, 32387, 21788, 7671, -29123, 11800, 310, 24279, -4496, -17152, + -8254, -14231, 27562, 3852, 8171, -12454, 21027, -16049, -31875, -19231, 19352, -8521, 11667, -2334, + -15948, -11539, -13156, -15522, 4022, 2268, -12447, -11084, -2265, -2983, -720, -31780, -25311, -9695, + 26189, -13615, 14465, -9155, 21887, 31312, 4894, -12852, 2245, 24428, -11155, -27516, 18988, -22221, + -20745, 20483, -8617, -213, 12361, 7698, -4479, 30091, -22924, 7507, 25418, 5918, -11843, -31006, + -14090, 24763, -4138, -23574, -168, -13362, -28184, 13217, -9781, -21553, -23131, 18351, -16663, -29853, + 6440, 29358, -13064, 3016, 24148, 12798, -18411, 10655, -16933, -5582, 12978, -4604, -31905, 682, + 501, -883, 31339, -9685, -11271, -900, 29659, -5986, -14797, 12611, 28944, 20204, 29307, -27262, + 25890, -19355, 9611, -31934, 17588, -6834, -4693, -25255, -20187, 17554, 2924, 4902, 23841, -766, + -24118, 10219, -22954, 26580, 5303, -11306, 1618, -5010, -8798, -25901, -20231, -8699, 15894, 9978, + -22277, 9298, 23488, -11682, 8790, 2993, -7406, -22603, -30593, 7128, -18111, -22231, -26648, 25865, + 22767, 23556, -3863, -22031, -25363, -5068, -32200, 18410, -9206, -30617, 1226, 24851, -4685, -20445, + 26762, 25363, 6003, 8693, 32095, 31659, -17616, 7693, -22635, -20482, -24565, -3195, -7681, -6755, + 13469, 4938, -21363, -667, -32231, -31713, -16264, -19915, 627, -30910, -6414, 25593, 4434, -30620, + -8993, -29944, 32405, 7931, 16554, 19664, -13216, -22238, 30217, -14188, -1785, -26119, 4954, -28338, + 10594, 2631, 19008, -4361, -12024, -8541, 9451, -8885, -16229, 23667, -11108, -31587, 16393, 1849, + 4928, -21686, 26416, 20702, -27088, -5881, -6734, -12654, 5953, 20828, 20478, -18391, -10546, 7367, + -5489, 27066, -28031, -17208, 2047, 1883, -21701, -5895, -27679, -5063, 3550, -7602, 10845, -240, + -11509, 22402, 23979, 4089, 1073, 25743, -3614, -29907, 19695, -8985, 11224, -11167, 8589, 5497, + -2308, -20166, -32018, 23478, 31271, 18708, -11338, -21365, 886, 4884, 160, 1428, -16002, -24332, + 26230, -29614, -27775, 19804, 32257, 16981, -11343, -25948, 6181, 31476, 23880, -26832, 9641, -14364, + -25885, 21343, 1105, -543, -25681, -15545, 22965, 10949, 22486, -4263, -19359, 31669, 31052, 3772, + -27260, 26853, 19767, -25336, 6549, -32686, -13700, -7307, 4146, 6628, -26758, 18413, 14236, -15356, + -23783, 21369, -26849, -24580, 30846, -21831, 3462, 21569, 30853, -29946, 13586, 27140, -24613, 15780, + 30304, 23452, -25836, 10688, -25292, 6624, -1277, -19016, 22156, 10028, 14475, 4076, 8727, -30810, + 9435, 5731, 2385, 31724, 21481, -15782, 18135, 20017, -24681, 13705, -18687, -21871, 32000, 31110, + 71, 32757, -12141, -15845, 21064, -15543, 2060, -18316, -26334, 13424, -2129, 26034, 7777, -8497, + -16267, 22259, 5779, 18548, -15071, 723, 6894, -14041, 28723, -15419, -19283, 20115, 5090, -21294, + 30133, -25182, 13225, -13149, -22889, 5893, 23730, -31832, 7793, -19971, 19697, -5048, -642, 31337, + -32531, -20377, -13402, 7355, 15311, -4895, 21337, -24022, 14362, -32371, 32341, -26514, -15187, -6956, + -11112, 16169, 16349, -16330, -32431, 30803, 10725, 759, -14727, -19276, -28695, -3089, 26557, 829, + 32044, 24616, 19045, -32192, 6070, -11134, 29521, -28391, -434, -32482, -4888, -1239, -5085, -20122, + -489, 21458, -27071, 27211, -6783, 26003, -15794, 25274, -25254, -32079, 20372, 11187, 11469, 22782, + -14149, 18219, -24254, -16675, 4759, -29243, 7369, 298, 24269, -30290, 3165, 21076, 8571, 27183, + -24272, -13075, -23568, 6021, -20240, 8439, 3149, 28624, 12188, 11170, -24590, -23700, 26924, 21606, + 26984, -32651, 252, -32100, -3022, -22365, -439, -14000, 1829, 29298, 29949, 18094, 19142, 25687, + 14078, 2065, -54, -21077, 15568, -18997, -12986, 12011, 30937, -22141, -1920, -474, -19684, 3006, + 28873, 2786, 19697, -7455, -3173, 17594, -18487, 16320, -2877, -18441, -13532, 32714, -14059, 252, + 23597, -8888, -9793, -32271, -30967, -31510, 28160, 8959, 30976, -1247, -18918, 17146, -25675, -30977, + -26462, -16622, 10799, 7666, -13312, -5424, -10418, 21037, -30488, 19677, -13532, 8755, -19315, 10615, + 3091, -30721, 12021, -22559, 24753, 15769, -26631, -10368, 21385, -20406, -3084, -17169, -25716, 18104, + -1744, 29639, -12011, -9916, -32715, 6656, 8407, 25411, -10393, -31919, -21629, 25423, 24737, 6733, + 10047, -2081, 7004, 13997, -24671, -26986, -25454, -11811, -30760, -5563, 11163, 2621, -11992, 10036, + 5888, -15851, 16997, -12538, 30138, 5427, -16434, -3155, 8844, 25724, 5692, -31360, -10852, -23328, + 10573, -10849, 24522, 21518, -8336, -13404, 25061, 7101, -5221, 13807, -9384, 21861, -18579, 3124, + 9178, -11961, -11194, -6807, -27218, 10149, 2114, 820, 32100, -28248, 25418, -29798, -18769, 21729, + 21906, 2162, -20609, -13950, -4510, -16669, 16059, 12670, -24711, 29471, 5789, 25213, 11338, 9754, + 25287, -25357, 5922, -21071, 32693, -15115, -2256, 26281, -9890, 26777, -20922, 20270, -15772, 244, + -5882, 25392, 23920, -5608, -27457, 14484, 16395, 11396, -30295, -24285, -4304, 26667, -7940, -25725, + -21502, -6492, 13126, 2721, 22575, -28813, -10051, 7244, -7039, 15697, -16331, -9484, 16401, 22311, + -20809, -24229, 2996, 12326, -13918, -17028, 12779, -15827, -20964, 30829, 10935, -24209, -10296, -15205, + -30359, -27474, -13146, 24582, -30928, 11626, -11663, 13981, 12054, -1706, -27707, 1677, -16189, -27292, + 22730, -20882, -14651, -5706, 28743, -11356, 3808, 786, -1681, 26157, -7458, 2234, 454, -25642, + 1522, 26963, -15158, -23220, -9306, 17252, -4540, -17606, 24082, -22164, 12651, 16708, 26202, -7524, + 457, 1280, -11946, -24021, 9266, 8503, -2549, -11243, -26579, -32367, 6407, -6742, 1428, -5322, + -16496, 3510, -32490, -7866, 215, 2663, 26547, -24054, -11307, -17216, -5307, -658, 8593, 8442, + 32356, -6116, -16749, 22351, -16842, 21944, 1433, 30776, -25624, 7272, 27617, -17649, 10032, 23323, + 13072, -25345, -28087, -3053, -2539, 21599, 31132, -26338, 15331, -19862, -12598, -25896, 10255, 24014, + 25069, 24776, 28751, 2263, 19805, -18743, 6116, 26756, 9375, -14327, 20205, -23501, -6313, -18341, + -29395, 26945, -30004, 26170, 26387, -11673, 21361, 30433, -9108, -7371, -12308, 15121, -24830, 196, + -31363, -8725, -30575, -10132, 5403, 21081, -30223, -6113, 13159, 28301, -31032, -21839, 4160, 9183, + -30017, 10550, 30468, -3297, 22292, 2227, 25000, -24034, -24334, -28558, 13679, -1112, 13742, 31757, + -8511, -15834, 24359, -6536, 29972, -28224, -30752, 25942, 8854, 5670, 23148, -2464, -5511, 20316, + 12193, 13087, 2575, -2726, 2224, 15690, -23252, -630, 13505, -31745, -16876, 16732, 20551, -13024, + 13199, 23998, 21332, -11074, -8864, 14998, 27983, 6463, -19739, -32051, 29557, 17637, 23450, 32595, + 8282, 23842, 26808, -21874, -16312, 15523, -22131, 13639, -28972, 12557, 8545, 31382, -6027, 20916, + 15885, 31680, -14508, 20565, 5122, -26991, 16874, 5750, -6927, 9153, -8838, -3260, 15817, 28542, + -3050, 21322, 7464, -8636, -16488, -2865, -8905, -23461, -30956, -27884, 28582, -28953, 4002, 456, + -16314, -29520, 26209, 22374, -20275, 3741, -29084, 20269, 10740, 9301, -10360, 1937, -1357, -21903, + -27247, 8937, 1012, -14716, -20126, -2121, -10720, -395, 31480, -11201, 112, 28341, -15631, -23286, + -26973, -28519, 32112, 8432, -27748, -1260, -29280, 12826, 32030, 31990, 25630, -22914, -17341, 6763, + -18257, -25535, 21174, -14767, -338, -20819, -26972, -11946, -17080, 3017, -12910, 31528, -18784, 9333, + 28193, 18677, 9681, -11699, 1580, 7831, 23456, -9044, -14098, 19103, 11860, 15867, -2598, -16325, + 24031, 30730, -4854, -13439, -25148, 25654, 4576, -14170, 29558, -20557, 27409, -17635, 5852, 22830, + -2149, 5799, -29435, -9432, -23159, 17396, 12192, 15346, 12260, -29750, 763, 14884, 15707, -7449, + 2376, 16164, -480, -26237, 23026, 22127, 24916, 5346, -29533, -29688, 23119, 9912, -22292, -10808, + 1698, 8180, -5544, -26575, 19324, -27211, 25761, -27898, -27169, 14318, 18990, 29785, 12681, 11379, + -5590, -28941, -763, 23951, 402, 10885, 29616, 23523, 25062, 21109, -31665, 10732, -23216, 26539, + 12348, -25580, 14258, 27403, -2682, -10388, -17960, -6685, 22762, -5041, -18204, 16424, -31431, -5364, + 3332, 370, 19944, 3497, -30931, -19283, 22143, -17586, 2002, -29776, -32035, 11171, -2163, -6722, + 11875, 1003, 26566, -29331, 16122, -15781, -9571, -12844, -6641, 14056, 22162, 26512, -183, -17306, + 272, -11043, 919, 15575, -21888, 22360, 26634, -19874, -22727, -27875, -4370, 6348, -11347, -30355, + -2604, 30888, 21640, -14103, -7511, 32644, 1072, -25417, 9945, -30738, -13240, 29208, -30473, 2004, + 32359, -4833, -10006, 24213, -9221, -22695, 19137, 6782, -5792, -471, -3334, 7845, -28467, 3141, + -12951, -23080, 4099, -25571, 30774, 18691, -24011, 4462, 7930, -18581, -22447, -23491, -11161, -3378, + 26589, 7321, -21919, -16095, -20664, 14136, -2749, 28584, 31194, 27543, -14942, -9868, -17212, 27965, + -31278, 4155, -12907, -26790, -1541, 26302, 4183, -7452, 21372, 27940, -10486, -30604, -9066, 25936, + 22533, 11045, -22973, -21989, 13363, 11501, 4874, 12762, -29846, -4208, 10045, -18558, 23296, -17947, + 16248, 11068, -24144, -22326, -28157, -1815, 3556, -11265, 22501, -19655, -9307, 31877, -15635, -24003, + -3870, -27033, -21186, -20859, 3108, 18082, 4004, 2651, -14856, 31389, -17069, -15211, 15685, 6640, + 22156, -21226, -8628, -25423, -30444, 26214, 16045, -32595, 9143, 3636, -6523, 12448, -9257, 13870, + -22507, -24500, -22260, -3246, 3385, -17539, 20467, 14919, 32342, 13855, -19372, -5064, -32075, -21843, + -16145, -25335, -3405, 12693, 23541, 29098, 29350, -13690, -5961, -29578, 25748, 20042, -15339, 730, + 12326, -18344, 27401, 24685, -21393, -25930, 25984, -13743, -17136, 1742, -6313, 11351, 19210, -9070, + -25168, -14087, 20693, 17808, -9697, 23428, 12605, -28621, -6251, 25896, -10305, -25361, -19509, 13510, + 9538, -32093, 12800, -11322, -22582, 8318, 16229, -29059, -20479, 30946, -21290, 26332, 24298, 1268, + -18125, 8944, 22779, 858, -9611, -25227, 12436, 16934, 12708, -2336, -22167, 24387, 18145, 18445, + 32744, 19407, 31540, 27375, 27428, 15367, -26027, 18515, -4552, 3178, -2807, -15624, -10175, 12929, + -24844, 29806, -4110, -4590, -22733, 22456, 13477, -1482, 1213, 11368, -26662, 775, -24761, -7061, + 25367, -18376, -27213, 21147, 19432, 23135, 30553, 11977, 18075, 10596, -16727, -15109, 27043, 5189, + 11791, -907, -11666, -18652, -32765, 7657, 20572, 19531, 24221, -16222, -30693, 26924, 14963, 9699, + -10619, -32251, 8194, -5445, 13873, -7325, -30466, -10302, 23021, 9346, -20777, 29487, 27663, -26360, + 16716, -23295, -11306, 6404, 23684, 29275, -11363, 30264, 16639, -1004, 12512, -7650, -7198, 21638, + 23458, 24783, -21017, -441, -5190, 2603, -12440, 24702, -22809, -9541, -4962, -6933, -3192, 13369, + 1563, -14290, -21515, 22555, -1555, 3932, -20800, -23217, 7401, 32581, 19237, -10708, 9461, 28546, + 8259, 6603, -31027, -16119, 21811, -30939, -16642, -20396, 2287, -5826, -20364, 22752, 12062, 1082, + -9103, 20308, 25038, 19298, -8219, -7645, -25276, -9611, -1448, -5910, 8881, -10548, 3000, -32562, + -5695, 10854, 29163, -2751, -23848, -22590, -11237, -9439, 30751, 9133, -25476, -21958, -21122, -22675, + 5624, 2167, 23584, -17502, 24977, -5950, 30705, -14144, 21228, 2449, 13437, -12712, 14883, -17198, + 30425, -2271, -29105, -16403, 6876, 17778, 9433, -24871, -20836, -13493, -22038, -15095, -6372, 19189, + -12795, -23843, 17200, -4776, 12662, -26121, 24268, -16412, 4329, 2946, -24132, 11016, -21866, -11883, + -26546, -31498, -30424, 32282, 11845, -767, -28204, 2439, 16763, 3691, 19829, 27654, 27184, -12086, + 4842, -4668, -7042, 11618, 29811, 17027, 3490, -24649, -2289, -32126, -12618, 32493, 27895, -19974, + 23302, 29106, 9832, -4183, -25575, 23052, -22422, 22699, 519, -3444, 16835, 18103, 24276, 3356, + 15902, -9986, -7353, 5579, -2860, -1702, 21849, -433, -20563, 1373, 22422, -24074, 2610, -27266, + 30644, 23675, 31846, 24181, -23592, -5392, 14001, 13549, -23207, 19628, -7694, 31348, 3733, -14980, + 3115, -23411, -18783, -23135, 13553, -28036, -16238, -7855, 23019, 8678, -9719, -17924, 14839, 27440, + -13116, -3825, 32181, -5052, 8981, -6831, 16620, -12041, 23951, -25138, -29408, -20032, -9598, 15318, + -25238, 27784, 18306, 7502, 11117, -18302, -15306, -13860, 2317, -11116, -15205, 882, 7067, 1296, + 22498, 25339, -17542, 26359, 4427, 4617, -20476, -14545, -21870, -14516, -21726, 16481, -16432, 7296, + 31323, -14777, 12134, -10730, -21801, -18698, -4163, 5471, 18556, 16312, 18581, 15596, 22422, 19478, + 13157, -15272, -32764, 30306, -25573, -29312, -22729, 19888, -19488, 10506, 19969, 7367, 8660, -7316, + -18568, -19017, 24883, -23446, 7628, -9083, -12184, -11712, 14304, 32762, 15138, -19982, -1913, 27158, + 15241, 20965, 6888, 17951, -19394, -8146, -26366, 12035, 17740, 9180, 1524, 14922, 31423, 4131, + 23073, -3028, 18832, 29178, -4728, 22144, -29766, 21999, 18479, 19167, 10689, 30620, 23070, -27399, + 7254, 29412, 15723, 17810, -15594, -7862, -19998, -28749, 22765, 13437, -27813, 9108, -19180, -20609, + -30548, -22091, -15445, -8715, -8411, -4246, -19223, -11064, -5882, -7012, 30888, -21364, 15697, -27886, + -19494, -3336, 8564, -31777, -31828, -19568, -23100, -1963, -7475, 5883, -25305, 2189, 19711, -4545, + 28196, -15087, -23812, 23436, 13080, 8014, 1744, 13369, 1204, 4276, 3262, -21395, -14421, 28522, + 12720, -31929, 28970, -15290, -29250, 11037, -12430, 23435, -17049, 3583, 19855, -25051, -5299, 4912, + 1283, -14293, 11683, 30501, -3287, -12131, 28095, -30032, -31674, 30135, -4574, -27645, 27484, -18258, + 29516, -29317, 11260, -990, -8323, -4896, 18723, -25912, 25265, -12463, 21585, -13891, 34, 20037, + -5779, -2197, 23615, 23312, -20427, 16146, -20015, 11186, -26597, 10615, 7088, 25425, 26727, -17724, + 29804, 14284, -30292, 2189, -22890, -19614, -9871, 13601, 25084, 27651, -24834, -28078, -27180, -31638, + 23109, -21072, -26100, -24533, -22601, 5371, 504, -5907, -24688, 218, -25418, 21532, 20316, 10749, + 25329, -24134, 11086, 22075, 23452, -20442, -16745, 10699, -25970, -17280, 23105, 8673, -3979, -20418, + 32239, 2152, 9965, 24851, -13009, 20920, 7761, 3191, 31690, -19369, -30050, -15028, -22541, 23205, + -13783, -8586, -24444, -637, 1404, -22062, -27483, 25493, 28739, 29198, 14167, -1779, 23751, -19673, + 23722, 8844, 4482, 20556, 27481, -692, -15313, 26656, -30734, -30671}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_int16_3/output.h b/Tests/UnitTest/TestCases/TestData/avgpooling_int16_3/output.h new file mode 100644 index 00000000..530d951d --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/avgpooling_int16_3/output.h @@ -0,0 +1,15 @@ +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. +#pragma once +#include + +const int16_t avgpooling_int16_3_output[144] = { + -5648, 2658, 3601, -5563, -5144, 3742, 2267, 5209, -1501, 9388, 5345, -1617, -1, -1564, 5416, -3923, + -453, 237, -3434, 7987, 74, 4064, -2980, 1589, -4471, -1935, 4728, -804, 8360, 2892, -34, -2276, + 1126, 5698, 3866, -1530, -1535, 2527, -4954, -5756, 4352, 3757, -1507, 927, -6576, 6119, -5555, 2628, + 1009, -779, -370, 1793, 4345, 6930, 624, 2589, 5005, 3166, 7200, -3052, 1496, -3549, -3592, -1859, + -2575, 4321, -106, -2142, 1375, -1109, 2125, -2968, -3211, -6835, 4247, -6484, 3369, -2788, -2939, 792, + -4136, -2532, 1018, 55, -1384, -1333, 4057, 9351, -7205, -3978, 5110, 7798, -4056, 1349, -989, 4914, + -3275, -1037, -6765, -3191, -2073, 64, 3867, 2609, 5851, -3980, -2831, 1061, -1045, 1836, 2135, -2315, + 2668, 1822, 3325, 2158, -255, 4170, -5173, -2204, 614, -6979, 2583, 2268, -76, 1896, -934, -1544, + 2447, -298, -2024, -4646, 4152, -5030, 1462, -278, 5146, 5826, 3103, -1044, -3518, 4974, -5069, 5754}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_int16_3/output_ref_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling_int16_3/output_ref_data.h deleted file mode 100644 index e4c12afb..00000000 --- a/Tests/UnitTest/TestCases/TestData/avgpooling_int16_3/output_ref_data.h +++ /dev/null @@ -1,16 +0,0 @@ -// Generated by generate_test_data.py using TFL version 2.8.0 as reference. -#pragma once -#include - -const int16_t avgpooling_int16_3_output_ref[147] = { - -24045, 4836, -25703, -8686, -10357, -15262, 16088, -12852, 312, 167, -505, 7692, 2609, -1249, - 20925, -14638, -6904, 7458, 693, -1451, -26394, 3880, -6839, -10120, 5975, 291, 20452, 11378, - -1911, 2340, 3254, -12543, 11769, 7923, -409, -5228, 11481, 18327, -6552, 1286, 2883, -6504, - 12291, 8359, 7560, -2366, 8638, -6319, 8747, 9083, 6585, 4148, -5144, 7575, 3108, -3609, - 4557, -23347, -8492, -6231, -3012, 1181, -1947, -16300, 2266, 12218, -17466, 5799, -10629, 13458, - 531, -2149, -7135, -25230, -7578, -13754, -10183, 10066, -4899, 1776, -12980, 6122, 17770, 1207, - 10706, 8964, 9915, -11577, -17228, -15867, 9640, -7972, 15223, -8229, 5430, 10910, 6038, -3415, - 14011, -12828, -17697, -5365, 5302, -9834, -7691, -429, 2768, -915, 1953, -19279, 4779, 22484, - 1510, 13225, -20172, 8354, -10758, -16995, 7259, -18429, 3866, -8158, -3650, -6071, -13113, -9019, - 5788, -2503, -23095, 28777, 5192, 21300, -7962, 19961, 276, -27813, 16259, 8911, 12574, -4535, - 23674, 11299, -6940, -2981, -3570, 10419, 344}; diff --git a/Tests/UnitTest/TestCases/TestData/avgpooling_int16_3/test_data.h b/Tests/UnitTest/TestCases/TestData/avgpooling_int16_3/test_data.h index 33faa135..0e46bdee 100644 --- a/Tests/UnitTest/TestCases/TestData/avgpooling_int16_3/test_data.h +++ b/Tests/UnitTest/TestCases/TestData/avgpooling_int16_3/test_data.h @@ -1,4 +1,3 @@ -// Generated by generate_test_data.py using TFL version 2.8.0 as reference. #include "config_data.h" -#include "input_data.h" -#include "output_ref_data.h" +#include "input_tensor.h" +#include "output.h" diff --git a/Tests/UnitTest/TestCases/TestData/maxpool_int16/config_data.h b/Tests/UnitTest/TestCases/TestData/maxpool_int16/config_data.h index 366a875d..4b647e27 100644 --- a/Tests/UnitTest/TestCases/TestData/maxpool_int16/config_data.h +++ b/Tests/UnitTest/TestCases/TestData/maxpool_int16/config_data.h @@ -1,20 +1,20 @@ -// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). -// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. #pragma once -#define MAXPOOL_INT16_OUT_CH 2 -#define MAXPOOL_INT16_IN_CH 2 +#define MAXPOOL_INT16_BATCH_SIZE 3 +#define MAXPOOL_INT16_INPUT_N 3 #define MAXPOOL_INT16_INPUT_W 4 #define MAXPOOL_INT16_INPUT_H 3 -#define MAXPOOL_INT16_DST_SIZE 12 -#define MAXPOOL_INT16_INPUT_SIZE 24 -#define MAXPOOL_INT16_OUT_ACTIVATION_MIN -32768 -#define MAXPOOL_INT16_OUT_ACTIVATION_MAX 32767 -#define MAXPOOL_INT16_INPUT_BATCHES 3 -#define MAXPOOL_INT16_FILTER_X 2 -#define MAXPOOL_INT16_FILTER_Y 2 -#define MAXPOOL_INT16_STRIDE_X 2 -#define MAXPOOL_INT16_STRIDE_Y 2 -#define MAXPOOL_INT16_PAD_X 0 -#define MAXPOOL_INT16_PAD_Y 0 +#define MAXPOOL_INT16_INPUT_C 2 +#define MAXPOOL_INT16_FILTER_W 2 +#define MAXPOOL_INT16_FILTER_H 2 +#define MAXPOOL_INT16_STRIDE_W 2 +#define MAXPOOL_INT16_STRIDE_H 2 +#define MAXPOOL_INT16_PAD VALID +#define MAXPOOL_INT16_ACTIVATION_MAX 32767 +#define MAXPOOL_INT16_ACTIVATION_MIN -32768 +#define MAXPOOL_INT16_OUTPUT_C 2 #define MAXPOOL_INT16_OUTPUT_W 2 #define MAXPOOL_INT16_OUTPUT_H 1 +#define MAXPOOL_INT16_PADDING_H 0 +#define MAXPOOL_INT16_PADDING_W 0 diff --git a/Tests/UnitTest/TestCases/TestData/maxpool_int16/input_data.h b/Tests/UnitTest/TestCases/TestData/maxpool_int16/input_data.h deleted file mode 100644 index aaba62dd..00000000 --- a/Tests/UnitTest/TestCases/TestData/maxpool_int16/input_data.h +++ /dev/null @@ -1,12 +0,0 @@ -// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). -// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. -#pragma once -#include - -const int16_t maxpool_int16_input[72] = { - -22012, -29431, 2991, -28947, 12941, 3386, 11750, -29207, 17250, 18326, 16851, 1688, - -15668, 28591, 3549, -4776, 13876, 4256, -26361, -31668, 25657, 18810, -32577, -18153, - -31149, -147, -9833, -6236, 6723, 11380, -7486, 25972, -14678, -17307, -10130, 31587, - 27852, 18878, 622, -13260, -15289, -28452, 11183, -19196, 14652, 29495, 6456, 11697, - -31734, -21972, 15666, -11806, 30811, -22963, -14598, 19502, -26188, 19171, 25456, 28770, - -32329, 31839, -11684, -16437, -20179, 30603, 21581, -26805, 492, 29700, 5852, -21574}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpool_int16/input_tensor.h b/Tests/UnitTest/TestCases/TestData/maxpool_int16/input_tensor.h new file mode 100644 index 00000000..737b757f --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/maxpool_int16/input_tensor.h @@ -0,0 +1,11 @@ +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. +#pragma once +#include + +const int16_t maxpool_int16_input_tensor[72] = { + 10633, 19925, -6637, -19472, -31698, 14426, 16578, 1247, -2461, -30782, 8949, -31109, 31233, 5917, 884, + -6665, 7451, 24434, -16644, 10504, -5188, -4222, -24147, 1657, -13953, 17557, -206, 5281, 29904, 5509, + 14452, 28575, -216, -22561, 7175, -31242, -13644, -32061, 23161, 31746, 2537, -9456, 15348, 23480, -29545, + 25601, -15227, -9262, -7564, -12629, 22144, 11067, -9524, -15623, 1041, -157, -13596, 2922, -27926, -24805, + 19808, -14187, -796, 23251, 9387, -19680, 9650, 2159, -19456, 4171, 8259, -14472}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpool_int16/output.h b/Tests/UnitTest/TestCases/TestData/maxpool_int16/output.h new file mode 100644 index 00000000..8be8f08a --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/maxpool_int16/output.h @@ -0,0 +1,7 @@ +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. +#pragma once +#include + +const int16_t maxpool_int16_output[12] = + {10633, 19925, 31233, 14426, 7175, 17557, 29904, 31746, 22144, 11067, 19808, 23251}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpool_int16/output_ref_data.h b/Tests/UnitTest/TestCases/TestData/maxpool_int16/output_ref_data.h deleted file mode 100644 index 7c39223e..00000000 --- a/Tests/UnitTest/TestCases/TestData/maxpool_int16/output_ref_data.h +++ /dev/null @@ -1,7 +0,0 @@ -// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). -// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. -#pragma once -#include - -const int16_t maxpool_int16_output_ref[12] = - {17250, 18326, 12941, 28591, -9833, 31587, 27852, 25972, 25456, 28770, 30811, 31839}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpool_int16/test_data.h b/Tests/UnitTest/TestCases/TestData/maxpool_int16/test_data.h index b00cc3f6..0e46bdee 100644 --- a/Tests/UnitTest/TestCases/TestData/maxpool_int16/test_data.h +++ b/Tests/UnitTest/TestCases/TestData/maxpool_int16/test_data.h @@ -1,5 +1,3 @@ -// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). -// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. #include "config_data.h" -#include "input_data.h" -#include "output_ref_data.h" +#include "input_tensor.h" +#include "output.h" diff --git a/Tests/UnitTest/TestCases/TestData/maxpool_int16_1/config_data.h b/Tests/UnitTest/TestCases/TestData/maxpool_int16_1/config_data.h index b322b76b..05fd4a4d 100644 --- a/Tests/UnitTest/TestCases/TestData/maxpool_int16_1/config_data.h +++ b/Tests/UnitTest/TestCases/TestData/maxpool_int16_1/config_data.h @@ -1,20 +1,20 @@ -// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). -// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. #pragma once -#define MAXPOOL_INT16_1_OUT_CH 2 -#define MAXPOOL_INT16_1_IN_CH 2 -#define MAXPOOL_INT16_1_INPUT_W 4 -#define MAXPOOL_INT16_1_INPUT_H 5 -#define MAXPOOL_INT16_1_DST_SIZE 40 -#define MAXPOOL_INT16_1_INPUT_SIZE 40 -#define MAXPOOL_INT16_1_OUT_ACTIVATION_MIN -30000 -#define MAXPOOL_INT16_1_OUT_ACTIVATION_MAX 30000 -#define MAXPOOL_INT16_1_INPUT_BATCHES 2 -#define MAXPOOL_INT16_1_FILTER_X 3 -#define MAXPOOL_INT16_1_FILTER_Y 3 -#define MAXPOOL_INT16_1_STRIDE_X 2 -#define MAXPOOL_INT16_1_STRIDE_Y 1 -#define MAXPOOL_INT16_1_PAD_X 0 -#define MAXPOOL_INT16_1_PAD_Y 1 -#define MAXPOOL_INT16_1_OUTPUT_W 2 -#define MAXPOOL_INT16_1_OUTPUT_H 5 +#define MAXPOOL_INT16_1_BATCH_SIZE 2 +#define MAXPOOL_INT16_1_INPUT_N 4 +#define MAXPOOL_INT16_1_INPUT_W 5 +#define MAXPOOL_INT16_1_INPUT_H 1 +#define MAXPOOL_INT16_1_INPUT_C 2 +#define MAXPOOL_INT16_1_FILTER_W 3 +#define MAXPOOL_INT16_1_FILTER_H 3 +#define MAXPOOL_INT16_1_STRIDE_W 2 +#define MAXPOOL_INT16_1_STRIDE_H 1 +#define MAXPOOL_INT16_1_PAD SAME +#define MAXPOOL_INT16_1_ACTIVATION_MAX 30000 +#define MAXPOOL_INT16_1_ACTIVATION_MIN -30000 +#define MAXPOOL_INT16_1_OUTPUT_C 2 +#define MAXPOOL_INT16_1_OUTPUT_W 3 +#define MAXPOOL_INT16_1_OUTPUT_H 1 +#define MAXPOOL_INT16_1_PADDING_H 1 +#define MAXPOOL_INT16_1_PADDING_W 1 diff --git a/Tests/UnitTest/TestCases/TestData/maxpool_int16_1/input_data.h b/Tests/UnitTest/TestCases/TestData/maxpool_int16_1/input_data.h deleted file mode 100644 index 9a670b32..00000000 --- a/Tests/UnitTest/TestCases/TestData/maxpool_int16_1/input_data.h +++ /dev/null @@ -1,12 +0,0 @@ -// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). -// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. -#pragma once -#include - -const int16_t maxpool_int16_1_input[80] = { - -4687, 31802, 16109, -26834, -7429, -12711, -17211, -25572, 12211, 26418, -11543, 30572, 9275, 15652, - -32141, 27498, 26033, 32149, 15794, 21722, -7188, -21746, 19786, 27338, 22856, 28629, 18734, -1781, - 29496, 16731, -21468, 8236, 29837, 9269, -10198, -26070, -1393, 14867, -22031, 31923, -20734, -32353, - 15918, 21735, -12492, 8170, 3206, 13786, 27394, -26777, 26463, 3455, -8961, 18711, 28157, 27060, - -10048, -15193, -22904, -27767, -1557, -26978, -18525, -30070, -2264, -3433, -21567, -27752, -21009, 25411, - 25068, -6040, -13214, 22454, -11129, 23354, 26684, 14529, 12759, -27057}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpool_int16_1/input_tensor.h b/Tests/UnitTest/TestCases/TestData/maxpool_int16_1/input_tensor.h new file mode 100644 index 00000000..6a867d04 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/maxpool_int16_1/input_tensor.h @@ -0,0 +1,8 @@ +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. +#pragma once +#include + +const int16_t maxpool_int16_1_input_tensor[20] = {-16271, -2613, 13724, 31677, -3799, 17151, 22596, + 17813, -13387, 14245, -24985, 21310, -17454, -179, + -15968, -23878, 3501, 30969, -24872, 10660}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpool_int16_1/output.h b/Tests/UnitTest/TestCases/TestData/maxpool_int16_1/output.h new file mode 100644 index 00000000..3b052607 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/maxpool_int16_1/output.h @@ -0,0 +1,7 @@ +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. +#pragma once +#include + +const int16_t maxpool_int16_1_output[12] = + {13724, 30000, 22596, 30000, 22596, 17813, -17454, 21310, 3501, 30000, 3501, 30000}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpool_int16_1/output_ref_data.h b/Tests/UnitTest/TestCases/TestData/maxpool_int16_1/output_ref_data.h deleted file mode 100644 index 23928246..00000000 --- a/Tests/UnitTest/TestCases/TestData/maxpool_int16_1/output_ref_data.h +++ /dev/null @@ -1,9 +0,0 @@ -// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). -// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. -#pragma once -#include - -const int16_t maxpool_int16_1_output_ref[40] = {16109, 30000, 9275, 27498, 26033, 30000, 19786, 27498, 29496, 30000, - 29496, 27498, 29837, 30000, 29496, 30000, 29837, 28629, 29496, 30000, - 27394, 21735, 28157, 27060, 27394, 21735, 28157, 27060, 27394, 25411, - 28157, 27060, 26684, 25411, 26684, 25411, 26684, 25411, 26684, 25411}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpool_int16_1/test_data.h b/Tests/UnitTest/TestCases/TestData/maxpool_int16_1/test_data.h index b00cc3f6..0e46bdee 100644 --- a/Tests/UnitTest/TestCases/TestData/maxpool_int16_1/test_data.h +++ b/Tests/UnitTest/TestCases/TestData/maxpool_int16_1/test_data.h @@ -1,5 +1,3 @@ -// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). -// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. #include "config_data.h" -#include "input_data.h" -#include "output_ref_data.h" +#include "input_tensor.h" +#include "output.h" diff --git a/Tests/UnitTest/TestCases/TestData/maxpool_int16_2/config_data.h b/Tests/UnitTest/TestCases/TestData/maxpool_int16_2/config_data.h index bb464717..97d862a2 100644 --- a/Tests/UnitTest/TestCases/TestData/maxpool_int16_2/config_data.h +++ b/Tests/UnitTest/TestCases/TestData/maxpool_int16_2/config_data.h @@ -1,19 +1,20 @@ -// Generated by generate_test_data.py using TFL version 2.6.0 as reference. +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. #pragma once -#define MAXPOOL_INT16_2_OUT_CH 3 -#define MAXPOOL_INT16_2_IN_CH 3 +#define MAXPOOL_INT16_2_BATCH_SIZE 1 +#define MAXPOOL_INT16_2_INPUT_N 1 #define MAXPOOL_INT16_2_INPUT_W 7 #define MAXPOOL_INT16_2_INPUT_H 7 -#define MAXPOOL_INT16_2_DST_SIZE 75 -#define MAXPOOL_INT16_2_INPUT_SIZE 147 -#define MAXPOOL_INT16_2_OUT_ACTIVATION_MIN -30000 -#define MAXPOOL_INT16_2_OUT_ACTIVATION_MAX 30000 -#define MAXPOOL_INT16_2_INPUT_BATCHES 1 -#define MAXPOOL_INT16_2_FILTER_X 3 -#define MAXPOOL_INT16_2_FILTER_Y 3 -#define MAXPOOL_INT16_2_STRIDE_X 1 -#define MAXPOOL_INT16_2_STRIDE_Y 1 -#define MAXPOOL_INT16_2_PAD_X 0 -#define MAXPOOL_INT16_2_PAD_Y 0 +#define MAXPOOL_INT16_2_INPUT_C 3 +#define MAXPOOL_INT16_2_FILTER_W 3 +#define MAXPOOL_INT16_2_FILTER_H 3 +#define MAXPOOL_INT16_2_STRIDE_W 1 +#define MAXPOOL_INT16_2_STRIDE_H 1 +#define MAXPOOL_INT16_2_PAD VALID +#define MAXPOOL_INT16_2_ACTIVATION_MAX 30000 +#define MAXPOOL_INT16_2_ACTIVATION_MIN -30000 +#define MAXPOOL_INT16_2_OUTPUT_C 3 #define MAXPOOL_INT16_2_OUTPUT_W 5 #define MAXPOOL_INT16_2_OUTPUT_H 5 +#define MAXPOOL_INT16_2_PADDING_H 0 +#define MAXPOOL_INT16_2_PADDING_W 0 diff --git a/Tests/UnitTest/TestCases/TestData/maxpool_int16_2/input_data.h b/Tests/UnitTest/TestCases/TestData/maxpool_int16_2/input_data.h deleted file mode 100644 index 41c32039..00000000 --- a/Tests/UnitTest/TestCases/TestData/maxpool_int16_2/input_data.h +++ /dev/null @@ -1,16 +0,0 @@ -// Generated by generate_test_data.py using TFL version 2.6.0 as reference. -#pragma once -#include - -const int16_t maxpool_int16_2_input[147] = { - 10231, -1881, -5938, -32236, 3431, 18544, -22188, 16103, 25978, -5177, 16801, 26022, -15111, 1161, - -24127, -21577, 18554, 20821, 23178, -27629, -32107, 8173, -10639, -22922, -24604, 29731, 21181, -23008, - 6314, 15754, 16706, -23908, 18952, -30356, 27432, -21148, -31650, 6980, -17351, -17362, -12035, -15178, - 26744, 8455, 7985, 7971, 31272, 11085, 18918, -30469, 23365, -10695, 29695, -14057, -8956, 11174, - 31197, -26907, -28299, 2950, 16525, -2737, 32071, -31862, 1131, -21993, -23720, 24157, -6964, 9970, - -30665, 8036, 7891, -20388, 20793, 10149, 19243, -3171, -4007, 3169, -4756, 6291, 9056, -16019, - 15050, 11383, -30377, 11929, -11922, 8188, -8621, -18596, 10671, -13012, -26778, -10772, -9167, 14231, - 7896, -16152, -22371, 7830, 26864, -26479, 1264, 25040, -15647, 24800, 7652, -930, 9660, -8777, - -20613, 15834, -26330, 3750, 25534, 24233, 31288, -13854, 4670, -19677, -2366, 16960, -5193, 21214, - -24475, -22060, -29310, -31937, 28226, 23864, -1397, -14417, -15751, -9613, -21775, -6951, 23014, 25951, - 4013, -32743, 32660, -18489, 23532, 10196, 32409}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpool_int16_2/input_tensor.h b/Tests/UnitTest/TestCases/TestData/maxpool_int16_2/input_tensor.h new file mode 100644 index 00000000..23531db1 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/maxpool_int16_2/input_tensor.h @@ -0,0 +1,17 @@ +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. +#pragma once +#include + +const int16_t maxpool_int16_2_input_tensor[147] = { + -5966, 7454, 14182, -10681, 21864, 3855, -22213, 25677, -27516, 28822, 2314, 1946, -27934, -23066, + -8426, -32398, -16106, 30098, -6757, -31578, -13883, -4070, -15332, -19537, 22022, 15112, 26653, 31132, + 13639, 31235, 28492, -5175, -20261, 2081, -14421, -106, -7622, 29035, -29036, -4518, -16601, 12175, + -9186, 13071, -2598, -13248, 5673, -23680, 23139, -24658, -5364, -17896, 12127, -27976, 29133, 2116, + -30043, -12142, 25769, -29759, -20136, -17929, -13968, 20439, -11999, -9960, -577, -14853, -11271, -15527, + -20549, -24937, -11804, -17943, -4709, 21215, 21573, -2920, -29558, 28326, 29127, -22911, -18265, 14744, + -31645, -2362, -20705, -935, -16981, 7091, 6208, -31277, 12263, -8773, -18966, 12277, -26552, -13582, + 10978, 12032, 17360, 13475, 31282, -5595, 5304, -5081, 8275, -22574, 19653, 19024, -11872, 8584, + -5956, 16981, 10167, 8505, -7956, -2995, 17077, 4118, -1390, -31201, 22798, 26102, -2691, 17358, + 22456, 29209, 27076, -1542, -25723, -27483, -6359, 13354, 14706, -32028, 13894, -23962, 23876, -19725, + -12166, 27328, -20642, -7737, -27795, -29611, -15422}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpool_int16_2/output.h b/Tests/UnitTest/TestCases/TestData/maxpool_int16_2/output.h new file mode 100644 index 00000000..d3fb70eb --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/maxpool_int16_2/output.h @@ -0,0 +1,11 @@ +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. +#pragma once +#include + +const int16_t maxpool_int16_2_output[75] = { + 30000, 25677, 30000, 30000, 25677, 30000, 30000, 25677, 30000, 29133, 29035, 30000, 29133, 29035, 30000, + 30000, 15112, 30000, 30000, 15112, 30000, 30000, 21573, 30000, 29133, 29035, 29127, 29133, 29035, 29127, + 23139, 13071, 12263, 23139, 12127, 12277, 29133, 21573, 12277, 29133, 28326, 29127, 30000, 28326, 29127, + 20439, 19024, 16981, 19653, 19024, 16981, 21215, 21573, 16981, 21215, 28326, 29127, 30000, 28326, 29127, + 22456, 29209, 27076, 19653, 19024, 16981, 23876, 17077, 16981, 27328, 17360, 22798, 30000, 17360, 22798}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpool_int16_2/output_ref_data.h b/Tests/UnitTest/TestCases/TestData/maxpool_int16_2/output_ref_data.h deleted file mode 100644 index 7444b48f..00000000 --- a/Tests/UnitTest/TestCases/TestData/maxpool_int16_2/output_ref_data.h +++ /dev/null @@ -1,10 +0,0 @@ -// Generated by generate_test_data.py using TFL version 2.6.0 as reference. -#pragma once -#include - -const int16_t maxpool_int16_2_output_ref[75] = { - 26744, 30000, 25978, 18918, 30000, 26022, 18918, 29695, 30000, 16706, 29695, 30000, 23178, 27432, 30000, - 26744, 30000, 23365, 18918, 30000, 23365, 18918, 29695, 30000, 16706, 29695, 30000, 16525, 27432, 30000, - 26744, 30000, 23365, 18918, 30000, 23365, 18918, 29695, 30000, 10149, 29695, 30000, 26864, 19243, 30000, - 25040, 24157, 24800, 11929, 24157, 25534, 24233, 30000, 25534, 24233, 30000, 25534, 26864, 30000, 21214, - 25040, 28226, 24800, 11929, 28226, 25534, 24233, 30000, 25534, 24233, 30000, 25534, 26864, 30000, 30000}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpool_int16_2/test_data.h b/Tests/UnitTest/TestCases/TestData/maxpool_int16_2/test_data.h index 31add6d5..0e46bdee 100644 --- a/Tests/UnitTest/TestCases/TestData/maxpool_int16_2/test_data.h +++ b/Tests/UnitTest/TestCases/TestData/maxpool_int16_2/test_data.h @@ -1,4 +1,3 @@ -// Generated by generate_test_data.py using TFL version 2.6.0 as reference. #include "config_data.h" -#include "input_data.h" -#include "output_ref_data.h" +#include "input_tensor.h" +#include "output.h" diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling/config_data.h b/Tests/UnitTest/TestCases/TestData/maxpooling/config_data.h index 634f541d..6af4e258 100644 --- a/Tests/UnitTest/TestCases/TestData/maxpooling/config_data.h +++ b/Tests/UnitTest/TestCases/TestData/maxpooling/config_data.h @@ -1,20 +1,20 @@ -// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). -// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. #pragma once -#define MAXPOOLING_OUT_CH 8 -#define MAXPOOLING_IN_CH 8 +#define MAXPOOLING_BATCH_SIZE 2 +#define MAXPOOLING_INPUT_N 2 #define MAXPOOLING_INPUT_W 22 #define MAXPOOLING_INPUT_H 12 -#define MAXPOOLING_DST_SIZE 144 -#define MAXPOOLING_INPUT_SIZE 2112 -#define MAXPOOLING_OUT_ACTIVATION_MIN -128 -#define MAXPOOLING_OUT_ACTIVATION_MAX 127 -#define MAXPOOLING_INPUT_BATCHES 2 -#define MAXPOOLING_FILTER_X 6 -#define MAXPOOLING_FILTER_Y 5 -#define MAXPOOLING_STRIDE_X 9 -#define MAXPOOLING_STRIDE_Y 5 -#define MAXPOOLING_PAD_X 1 -#define MAXPOOLING_PAD_Y 1 +#define MAXPOOLING_INPUT_C 8 +#define MAXPOOLING_FILTER_W 6 +#define MAXPOOLING_FILTER_H 5 +#define MAXPOOLING_STRIDE_W 9 +#define MAXPOOLING_STRIDE_H 5 +#define MAXPOOLING_PAD SAME +#define MAXPOOLING_ACTIVATION_MAX 127 +#define MAXPOOLING_ACTIVATION_MIN -128 +#define MAXPOOLING_OUTPUT_C 8 #define MAXPOOLING_OUTPUT_W 3 #define MAXPOOLING_OUTPUT_H 3 +#define MAXPOOLING_PADDING_H 1 +#define MAXPOOLING_PADDING_W 1 diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling/input_data.h b/Tests/UnitTest/TestCases/TestData/maxpooling/input_data.h deleted file mode 100644 index f793f516..00000000 --- a/Tests/UnitTest/TestCases/TestData/maxpooling/input_data.h +++ /dev/null @@ -1,229 +0,0 @@ -// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). -// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. -#pragma once -#include - -const int8_t maxpooling_input[4224] = { - -16, -95, -94, 9, 32, 22, -110, -83, -94, -38, -56, -127, -4, -109, -5, 55, 72, 9, 9, - -49, 120, -9, 31, 113, -27, 65, -105, -54, -89, -15, -100, 113, 85, -110, 108, 13, 94, 110, - -125, 60, 42, -9, 72, 13, -37, 92, -5, 39, -117, -60, -80, -42, -75, 94, -98, -81, -22, - -72, -66, -32, 6, -6, 4, -105, -5, -21, 99, -46, 105, -61, -87, -10, 104, -73, -56, -99, - -126, 57, -79, -111, 99, 22, 9, -86, 51, 29, -44, 57, -77, 2, -77, 38, 26, 13, 50, - 78, -23, 79, 1, -84, -114, -7, -122, 16, 89, -70, 112, 21, -118, -125, 36, 43, -36, -46, - 75, -26, 60, 52, 83, 35, 47, 7, 16, -32, 102, 111, 40, -64, -49, 42, -53, -26, 71, - -3, -88, -21, 40, 112, -12, 8, -103, -93, -53, 21, 83, -84, -50, -85, 26, -35, -99, 41, - -41, -2, 63, 64, -104, 24, 104, 9, 22, 11, 54, -56, -21, -109, 112, -52, -21, -67, 123, - -36, -54, -10, -65, 37, 101, -111, -41, 88, -85, -125, -121, -91, 99, -127, -80, -72, -108, 39, - -64, -38, -114, 33, -63, 31, 17, 50, -18, -105, -86, -128, -14, 92, -124, -69, -112, -69, -12, - 93, -76, -54, -85, 44, -120, -75, -51, 41, -121, -38, -100, -92, 58, 14, -83, 92, 31, -126, - 94, -36, 89, -69, 115, -58, -103, -40, 31, -29, 83, 103, -93, -66, -64, 123, -47, -94, -70, - -52, 31, 83, 54, 114, -17, -3, -66, 85, 108, -17, 65, -85, 24, -9, 16, -119, -18, 72, - -57, -77, -100, -88, -105, -36, -16, -23, 83, -43, -19, -37, -109, 89, -95, 109, 110, -88, 66, - 54, 49, 24, 85, 76, -62, 8, 54, 67, -49, 39, -50, -121, -60, -12, 91, 14, -102, -75, - -103, 120, -111, 68, 116, -85, 25, -107, -51, 55, 8, -44, 122, 98, -10, 75, 42, 54, -35, - 47, 56, -29, -123, 42, -8, -62, -87, 122, 13, 2, -123, 82, 77, 72, -71, -29, 50, -74, - 10, -121, 11, -76, -14, 36, -107, -53, -6, -45, -9, 104, -59, 125, -38, -96, 33, 18, 124, - 53, 34, -72, 24, -81, -112, -32, 20, 77, 77, -20, -87, 82, 9, 63, 80, -14, -37, -98, - -20, 30, -97, -115, -18, 14, 104, 59, 112, -96, -76, -121, -9, 91, 91, 53, -47, 44, -102, - -113, -104, 80, -2, 36, 42, -98, 38, 97, 113, 126, -68, -126, 74, -120, 124, 3, 121, -94, - 125, 9, 113, 72, -22, -20, -56, 117, 103, 100, 34, 110, -84, -50, 103, 13, -35, 44, -82, - 109, -23, 115, 102, -6, 54, -106, 31, 88, 49, -107, -72, -8, -32, -35, -38, -54, 36, 112, - 124, 85, 112, 123, -92, 36, 68, -114, 114, -127, -30, 61, -91, 93, 49, -30, 77, -124, -79, - -6, -26, -120, -34, -6, -78, 106, 95, -95, 40, 116, -64, -32, -98, -32, -87, 82, -81, 57, - 103, 64, 12, -75, 81, -3, -25, 118, 74, -46, 14, 110, 99, 49, 40, 10, 32, -83, 50, - -86, 12, 48, -102, -29, -30, -81, 94, -72, -42, -25, -4, 34, -108, 107, -85, -12, 106, 88, - 24, 50, 84, 96, 105, 75, -67, -12, -122, -106, 113, -118, -94, 91, 36, -107, -82, -99, 53, - -50, 121, 7, 7, 100, 72, 21, 118, 31, -105, -1, -122, 86, 98, -85, -98, 37, -43, 95, - 123, -81, 115, 113, -14, 28, -128, -107, 32, -34, -52, 7, -120, -41, 21, 72, -85, 107, -111, - -88, -62, 126, 7, -30, 63, -69, -4, 81, 39, 123, 23, 52, -4, 4, 35, 74, 28, -76, - -25, -127, -73, -10, -100, 26, -4, 10, -59, -71, 109, -22, 77, 37, 33, -56, 72, -83, -107, - 9, 104, 61, 85, -103, -39, -34, 28, -63, -124, -125, 2, -86, -41, -114, 100, 113, 90, -50, - 61, 111, 94, -92, 104, 51, -126, -8, -98, 125, 42, -117, -33, 91, 33, 59, -7, 91, -124, - 114, -57, 4, 22, -83, -50, -60, 3, -123, -114, 62, 122, 125, 46, -116, -115, -127, -113, 32, - -17, 98, 25, -13, 19, -89, 52, 119, -105, 104, -85, -94, -84, -33, 5, -105, 108, 116, -58, - -60, -39, -18, 118, 55, -101, 115, -5, 29, -78, -110, 26, 98, -82, 69, -126, -104, -64, -73, - 105, -67, -102, 122, -92, 75, -29, 39, -17, -124, 63, -6, 8, 86, 1, 5, 73, -3, 65, - -36, 2, 82, 80, -33, -33, 64, 117, -81, 63, -121, 125, 96, -11, 6, -29, -21, 97, 73, - 97, -65, -68, 43, 111, 72, 79, -58, 88, 66, 19, -97, -92, 24, -3, 110, -96, -128, 93, - -64, -99, -126, -59, -105, 106, -30, 88, -4, 88, 79, 72, 11, -96, -96, 125, 85, 94, 9, - 48, 93, -114, -10, 89, -94, -31, 37, -66, -22, 5, 32, 15, 81, -101, 51, -126, 21, -41, - 20, 28, 62, -27, 84, -40, -51, 51, -114, -95, -29, -108, 54, 122, -118, 96, -118, -44, -27, - 96, 50, 91, 39, -10, 120, -94, -37, -119, -31, -42, 61, 12, -23, 8, -30, -42, 26, 107, - -52, 62, -85, 66, -89, -79, 64, -117, -43, -44, -42, -87, 66, 100, 88, 126, 118, -85, 18, - 119, 113, 6, 113, -68, 94, 26, -28, -125, 15, -82, 77, 53, 45, 67, 8, 90, 97, -79, - -72, 4, -64, 54, -90, -114, -123, 83, 21, 39, 75, 74, 67, 4, 0, 17, -8, -109, -18, - 60, -70, 17, 0, -74, 50, 52, 68, -13, -66, -87, -70, 46, -2, -24, -108, -51, -77, -85, - 29, -31, 89, -110, 16, 43, -84, -96, -76, -125, 93, -75, -99, -115, 44, 24, 119, 88, 45, - -32, 64, -62, 37, 55, 4, -1, 36, -23, -66, -62, -32, 28, 90, 88, 0, 7, 25, -83, - -116, 96, 101, -35, -39, 53, 68, 74, 19, 102, 105, -53, 1, -23, -16, -33, 102, -33, 58, - 51, -10, -117, 26, 60, -72, 31, -57, 80, 34, 67, 122, -50, -57, 0, 63, -1, -8, 115, - -40, -127, -68, 55, 92, 97, -127, -34, -2, 97, 119, -77, 84, -71, -120, 42, -89, -30, 29, - 70, 19, 53, 67, -60, 25, -68, -30, 94, 111, 34, -20, 114, 53, -31, -115, 37, -15, -101, - 51, 3, 32, 114, -100, 35, -7, 110, 4, -55, 125, -54, -108, -126, -52, -79, -108, 27, 61, - 92, 76, 61, 47, 90, 36, 118, 73, 64, 110, 13, 68, 36, -53, -4, -123, -24, -113, 20, - 18, 24, 32, -69, 33, 115, 61, -23, 57, -60, 29, 88, 35, -124, 40, 97, 9, 20, 76, - -64, 52, -47, 61, 29, 50, 86, -106, -15, 16, 56, 45, 49, 11, -102, -86, -57, -54, 17, - -81, 56, -97, 31, 72, -16, 49, 4, 53, -73, -41, 66, -84, -87, -108, 121, -83, 120, 115, - -101, 120, -104, -10, 23, -128, 5, 123, 122, 54, -98, 100, 79, 115, -94, 71, -122, -11, 41, - 32, -53, -44, -97, -20, 56, 4, -104, -33, -42, -70, -48, 9, -65, 16, 28, -97, -60, 72, - 63, 124, -126, -53, 88, 46, -80, -30, -76, -93, -34, -31, -18, -13, 18, -43, 54, -82, 121, - 12, -5, -89, -120, 7, -114, -115, 118, -50, 53, 89, 80, 2, -92, -63, -87, -45, 35, 110, - -91, 39, -87, -80, -15, 122, 28, 114, -79, 110, -116, 43, 33, -102, 35, -19, -3, 120, -53, - 107, 119, -65, -99, -18, 57, 53, 25, 15, -38, 35, 114, 7, -25, 94, 7, 103, -16, -10, - 112, 66, -33, -6, -126, -111, -117, -106, -104, 113, -56, -62, -69, 60, -36, 20, -36, 36, 65, - -105, 48, 27, 43, -94, 39, 122, -44, 23, 59, -33, 32, 20, -45, -27, -15, -111, 118, -94, - 12, -32, -101, 47, 70, 36, -73, 57, -56, 97, -80, -97, 59, 57, 96, 16, 0, -27, -62, - -115, -111, -22, 57, -57, 62, 75, 39, 0, -41, 65, 61, 80, -56, -94, -65, -66, -85, -34, - 81, 126, -54, 52, -79, -36, 6, 8, 49, 28, 29, 45, 45, -25, 110, 23, -118, -108, -39, - 73, 3, -56, 53, -35, 126, -124, 102, -93, 117, -17, -6, 24, 7, -71, 58, -69, -41, -72, - 67, -44, -125, -110, -28, 114, 34, 112, 79, -95, 75, 67, -70, 42, 112, -10, 56, 2, -52, - 21, -86, 69, 40, 12, -90, -4, -97, 82, 75, 31, -38, 65, 109, 55, 14, 28, -70, -104, - 104, 38, 88, -104, -85, 114, 46, -71, -41, -88, 33, -92, 86, -31, 76, 95, -65, 38, 86, - 1, -55, -54, -104, 74, -123, -65, -101, 23, -119, -112, 7, -114, 79, 12, 20, -87, 72, 122, - 49, 122, 109, -88, 19, 59, 2, -126, 24, -93, -112, 107, 106, 72, 67, -95, 112, 89, 99, - 27, -96, -47, -72, 81, 48, -26, -34, 72, 52, 22, -120, 23, 32, 60, -49, 55, -79, 68, - -60, 103, -6, -64, -33, -91, -47, -14, -54, 12, 62, -109, -50, -11, 108, 96, -67, -28, -124, - 20, -42, -10, -62, 66, 68, 88, -87, 91, -14, 118, 83, 34, -78, -105, -20, 32, -34, 12, - 3, 105, -5, 115, -105, -45, -19, 4, 16, 76, -5, -74, 19, 74, 76, -2, 52, 41, -84, - 48, 89, 13, 24, 30, -97, 66, 79, 40, -127, 13, -110, 17, 66, 24, -65, 62, 122, 73, - -95, 32, -32, -126, 123, 116, 80, 47, -58, -88, -34, 111, 111, -104, -5, 52, 40, 10, -98, - 14, -100, -60, -21, 85, -127, 94, -41, 63, 87, 71, 29, -81, 54, 93, 41, -116, 49, -5, - -20, -18, 8, 15, 85, -1, 1, -87, 46, -44, -114, 83, -36, 32, -93, -106, 109, -28, -115, - 89, -21, 70, -17, -42, -69, -55, 117, -67, -39, -44, 116, 63, -59, -49, 98, 91, 105, -52, - 116, 68, -78, 0, 116, -34, 119, 80, -30, 88, -96, -49, 56, 105, 2, 48, 27, 96, 107, - -30, -30, 28, -81, -4, 120, -27, 4, -119, 28, 16, 92, 54, 88, 10, 122, -2, 29, -100, - -6, 56, 52, -124, -9, 58, 102, 12, -112, -25, -73, 36, 107, 115, -124, 43, -90, -25, -68, - 108, 72, 5, 109, 26, -15, -42, -88, 50, 73, 86, -1, 15, -6, 75, -41, -108, -92, 11, - 58, 43, 122, -82, -80, -51, 91, 119, 34, -57, -92, 26, -62, -43, 36, -15, 113, -87, 95, - 57, -65, -123, 68, -103, -3, -120, 104, -4, -44, 87, 28, -39, -77, 74, -50, -107, 20, -45, - 37, -97, 58, -52, 26, -67, -77, 103, 51, -16, 33, 120, -17, -23, -36, -10, -9, 94, 50, - 4, -96, -125, -17, 25, -14, 104, -68, -27, 120, -127, -7, 123, -5, 115, 91, -7, 99, 113, - 47, 46, -81, 88, -54, -6, 36, -110, -69, -117, -55, -3, -5, 57, -90, 94, -87, 104, 41, - 74, 120, 39, 79, 42, -62, -92, -78, -92, 26, -45, -78, -127, -21, -10, -91, 66, 59, -124, - -19, 116, 30, -103, -1, 62, 104, 28, 95, 101, -16, 108, 123, -112, 42, 91, -23, 42, 99, - -62, 10, 28, -17, -62, -70, -8, -85, 122, -6, 19, -98, -99, 7, -7, -7, -9, 35, -50, - -105, -8, 45, 9, 52, -75, 102, -125, -93, 63, -58, 34, 44, 61, -127, 58, -67, -47, -91, - -67, -6, 96, -61, -4, -110, 42, 22, 123, 16, 70, -92, -55, -113, -53, 50, -62, 6, -62, - -54, -70, 54, 100, -26, 67, 36, 78, 62, -16, 48, 79, -53, -21, -3, -102, 82, 98, -80, - -23, -74, -19, -123, -53, -104, -111, 52, 39, 76, -94, -52, 59, 37, 84, -38, 106, 113, 5, - 23, 46, -51, 75, -70, 64, 60, 51, 24, -102, -16, -70, 100, 37, 64, -83, -126, -37, 90, - 49, -49, -93, -81, -35, 67, -86, 103, -43, 24, -55, 85, 99, -105, 67, 97, -85, -83, 123, - 102, -24, -58, 57, 68, 123, -42, -74, 101, -115, -14, 93, -126, 44, -104, -5, -85, -86, 2, - 73, 17, -114, -48, -78, -120, -61, 1, -78, -27, -28, -66, 18, -7, -66, -13, -8, -116, 120, - 11, -100, 32, 15, 36, -1, 53, -81, -78, -54, -54, 68, 124, -29, -25, 110, 109, 1, 8, - 68, -88, -91, 5, 5, -24, -81, 95, 33, -112, -31, 97, 101, -122, -1, -104, -35, -109, -51, - -50, -33, 7, 109, 73, -27, -28, 45, 5, 13, -116, 99, 6, 14, -106, 84, -12, -7, 79, - -33, -82, 54, -119, -66, -116, 89, -50, -67, -49, -103, 6, -76, 40, -74, 56, -78, -60, -90, - -98, -16, -115, 18, 33, -73, -124, 121, 122, -98, 19, 79, -56, 114, -71, -86, -115, -39, -10, - -122, 66, 108, -48, -126, 40, -51, 126, 6, -84, -110, 116, 112, 5, 74, -48, 104, 46, 16, - 90, -102, -126, 17, 89, 20, 110, -49, -116, -97, -83, -94, 94, -5, 68, 1, 78, -106, -67, - -4, 115, 36, 30, 14, -93, -1, -121, 36, -49, -33, -105, 19, -42, 106, -120, -93, 103, -99, - -39, 20, -67, -28, 23, 81, 113, -125, 90, 68, 42, -13, 6, 43, -108, 17, 105, -18, -69, - 46, 40, -110, 0, 50, 108, 115, 9, -99, -24, -92, -19, 86, 17, -98, 81, -71, 34, -82, - 109, 84, -59, 120, 72, -10, -35, -6, 28, 112, -99, -120, 40, -117, 50, 55, -67, -91, -66, - -95, -98, 71, -109, 118, -101, 97, -52, -87, 51, 86, 32, 46, -91, -82, 9, -99, 25, -111, - -78, 97, 107, 61, 67, -29, -94, -75, -73, 33, 117, 90, 104, 32, 69, -29, -89, 19, 104, - -9, 2, 5, -86, 41, -55, -88, 25, -123, -47, -109, -3, -71, -110, -99, -75, 105, -123, 75, - -47, -90, 119, -93, -44, -49, 5, -12, 34, -35, 42, -84, 17, 50, -8, 61, 84, 19, 9, - 78, -126, -8, -7, -68, -53, -47, 83, 98, -34, -36, 32, 35, -63, -90, 36, -39, 60, -25, - -38, -93, -5, 98, -99, 73, 16, -125, -110, -77, 51, 5, -100, 58, 26, -100, -128, 109, 97, - -104, 52, 87, -102, 12, 97, 28, 31, 33, 64, 89, -20, -61, -65, 113, -102, 14, 122, -45, - -67, 58, 124, 73, -100, -71, -58, -32, 1, -16, -31, 65, 115, 42, 36, 76, -32, -29, 78, - -86, -4, -93, -29, 12, 77, -104, -14, -65, -88, -124, -124, 81, 71, 69, -7, -70, -107, 8, - -6, 113, -61, -49, -48, 44, -41, -98, 94, -79, 90, -45, -27, 110, -83, -18, -12, -10, 102, - 56, 30, -65, -53, 51, 37, 97, -62, 26, 46, 93, 100, 72, 97, 45, 12, -42, -115, -48, - -100, -68, 17, 36, 71, 75, 45, 14, -67, 31, 67, 107, 44, 90, -113, 37, 42, -117, 56, - 38, -80, -85, 85, -53, 98, -47, 120, -30, -74, 24, -115, -68, 38, 114, -123, 90, -109, -110, - -68, 51, -88, -116, -18, 72, 20, -54, 31, 54, -84, 50, 12, 119, 73, -110, -70, 126, -21, - -10, -122, 28, 106, -58, 81, 42, 51, 114, 56, -121, 121, -125, -19, -67, 68, 41, 30, 38, - 82, -114, 9, -89, 55, -78, -88, 74, 24, -32, 13, -74, -10, 77, -57, -86, -81, -12, -95, - -63, -12, 122, -109, -100, -24, 70, 125, -86, -100, 6, -1, -73, -101, 18, 23, 120, -110, 40, - -42, 115, 44, -30, 54, 34, -12, 60, 7, -104, -61, -21, 76, 77, -58, -117, 38, 23, 70, - -78, 28, -11, -20, 58, 33, -7, 106, -44, -74, 69, 4, -34, 1, 61, 83, 65, -8, -6, - 44, -97, 55, -81, -26, -63, 46, 101, 57, -124, 123, -104, 39, 90, 108, 30, 93, 19, 70, - -126, -85, -73, -6, -113, 54, -2, 39, 28, -30, -29, 43, 1, -60, 1, 2, -113, -63, 107, - -69, -4, -97, 27, 120, 71, -116, 63, -126, -19, 83, 62, -84, 124, -43, -120, 9, 70, 118, - 106, 16, 59, -111, 1, 20, 109, 44, 72, -32, -41, -122, -104, 3, 35, 101, -25, 82, -25, - -29, -90, -15, 25, -104, 23, 115, -62, 70, -85, -101, 6, 54, 30, 70, 122, -87, -24, 86, - -48, -41, 38, -51, -21, 26, -6, 8, -90, 125, -40, 94, -91, -32, 103, -63, -123, -28, -69, - -33, -13, 7, -116, -33, -65, -90, -119, 114, -51, 85, -55, -78, -12, -6, 68, 96, -102, -127, - 69, 15, -28, 59, -73, 87, 25, 90, -5, 41, -40, 14, 92, -18, -122, -38, -23, -61, 52, - -60, -8, -97, -1, 57, 3, -63, -92, 124, 73, 25, 57, 63, -85, 51, -82, -50, 45, -74, - 113, 108, 36, 22, 64, -47, 113, 114, -46, 80, 59, -55, 3, 119, -58, 88, -19, -90, 75, - -15, 89, -94, -69, 94, -14, -114, 86, 109, 57, -19, 4, -61, -18, -118, -101, 68, -80, -23, - 70, -97, 69, -123, -120, -62, 95, 111, 23, -30, 123, -37, 48, 86, 30, 80, 117, -127, 30, - 86, 77, 75, 73, 90, -55, 12, 11, 82, -88, 125, -52, -8, 57, -77, 122, 115, 13, 8, - -20, -35, 17, 9, -8, 113, -45, -38, -98, 18, -69, 32, -97, 54, 104, 101, 107, 11, 60, - -50, 89, 13, 21, -109, 51, -41, -1, -96, 12, -38, -58, -119, -78, 62, 96, 25, -2, 25, - -106, 61, 116, 118, 119, 48, -109, -108, -52, -108, -14, -42, 76, -93, 76, -20, -35, -35, 116, - -5, -102, -79, 108, 63, 48, -25, 67, 96, 30, 84, 104, -36, -36, 76, -108, -12, 77, 11, - -112, 114, 11, -54, 85, -61, -7, 59, -127, 25, -118, 5, 60, -12, -66, 53, 77, 33, -3, - 126, -115, -123, 102, 11, -94, 32, -74, -14, 87, 49, -122, -111, 88, -24, -77, 37, -101, -86, - -2, 63, -18, -119, -3, 115, -66, 96, 19, 118, 37, 105, -79, -115, -48, 107, -114, 98, -98, - 90, 69, 91, -35, 16, 19, 106, 25, -67, 115, 4, 62, -86, 52, 52, -108, 36, 46, -27, - -6, 4, -11, 28, 112, 17, 107, 74, 9, -35, -15, 19, 73, 102, -26, 64, -41, 30, 70, - 48, 97, 46, 107, 77, 50, 13, 117, 47, 101, 36, -77, -100, -22, 39, -101, 124, -46, 70, - 60, -17, 1, 9, -114, 15, -69, -102, 73, -89, -84, 119, 45, -40, 71, 17, -85, 74, -70, - -30, -7, 47, -39, 21, -25, -124, -22, 53, -57, 53, 96, 96, 31, -76, -22, 28, -107, 111, - 99, 101, -86, 32, 44, -62, -97, 90, -80, 119, -54, -5, 40, -51, -25, 60, 94, 20, 114, - -85, 24, 120, 93, -45, -8, 90, -67, -75, 78, 29, -94, 77, -35, -57, 79, -18, 55, 118, - 15, -29, 50, -68, -39, -100, -42, 54, -46, -96, 85, 42, -22, -109, 117, 126, 88, 43, 32, - -34, -88, -66, 5, -74, -11, 118, -20, -27, 93, 103, 9, -80, -47, -52, -99, 31, -117, -31, - -118, -127, -19, 58, -2, -62, -75, -67, 118, -47, -16, 9, 39, 111, -116, 70, -92, -23, 66, - -72, 123, 35, -123, -28, -52, 74, -4, -67, 55, 105, -121, -78, -71, -34, 62, -38, -108, 49, - -16, -45, -35, -123, -66, 100, 48, 96, 28, 28, -37, -3, 101, -85, 121, -78, 61, -87, 17, - 36, 110, -88, 95, 124, 101, 1, 74, 0, -76, -40, -108, -43, 111, 30, -76, -5, -79, -2, - -95, 110, 112, -16, 9, 118, 24, 3, 52, 61, -61, -43, 6, -31, 18, 107, 115, -107, -49, - 0, 101, 38, 96, -93, 87, 102, 103, 25, -61, 87, 23, 29, -35, -71, -37, 26, -33, 113, - 126, -118, -42, 117, -56, 60, -92, 63, -16, -50, -15, 33, 14, 74, 108, -76, 78, 46, -24, - -9, -57, 64, 77, -113, -94, 123, 8, 14, -119, 46, 79, -23, 35, -16, 97, -77, -54, 37, - -95, 64, -43, 56, -62, 57, 6, -83, -50, 63, 27, 24, -13, 60, -26, -72, -77, 60, -68, - -103, -28, 88, -124, -41, -85, -99, 38, 108, 30, 107, 107, -71, -2, -5, -52, 103, -102, -56, - -11, 74, 2, -112, -113, -51, 64, -106, -83, -5, 32, 66, -99, 119, -28, 121, -104, -14, 97, - -2, 32, 56, 63, -28, -122, 72, 112, -67, 42, -23, 3, -105, -42, 13, -29, -35, 120, -47, - -84, 2, 73, 106, -38, 74, 9, -119, 126, 97, 41, -37, -90, -27, -43, -95, 90, 102, 23, - -39, 118, 18, -63, 104, 12, 101, 71, 84, -128, 92, 12, 110, 29, -109, 68, 107, 105, 25, - -71, 85, -91, -94, -42, -22, -114, -110, 123, -120, 73, -5, -25, -118, 13, -41, 123, -40, -79, - 50, 54, -101, -93, 13, -15, -101, -108, -17, -97, -10, 98, -23, -44, 70, 87, 35, 87, -67, - -71, 58, -28, -55, 10, 105, -120, -51, 32, -70, -97, 99, 82, 71, 22, 13, -9, 69, 6, - 104, -15, 15, 22, -24, -35, 78, 54, 34, -67, 82, 88, 39, -56, -53, -14, -61, -8, -107, - -44, 110, 36, -45, -12, 116, 10, 46, 6, 11, -54, -88, -11, 95, -111, -74, -39, -98, -8, - -5, 104, -104, -60, -70, 83, -82, -44, -7, 120, 55, -118, 43, 113, 92, -9, 116, -5, 26, - -128, -99, -28, 11, 32, 69, -122, 61, -117, 48, -101, -53, 52, -95, -10, -79, 105, -34, 35, - 60, 0, 81, -16, 106, -89, 97, -9, -39, 2, 50, -45, 109, 92, 125, -98, -92, -2, -77, - -37, 87, -108, 51, -71, -56, 106, -46, 42, -127, -81, 32, 57, -16, -90, 88, 38, -94, -44, - 31, -59, -36, 48, 50, 69, 102, -76, -53, -123, 57, -35, 61, -127, 29, -115, -69, 108, -118, - 107, -70, -22, -87, 23, 75, 102, -32, 75, 72, -101, -101, -54, 109, 10, 29, -110, -50, -13, - -68, 11, -90, 120, 84, -34, -3, 108, 16, -34, -73, 120, -101, -33, 10, -10, 47, -111, 16, - 114, 16, 12, 44, 108, 41, 82, -113, 71, 15, -85, 2, 70, 82, -88, 35, 39, -22, -113, - -107, 6, 89, -92, 56, 11, 44, -124, -121, -37, 80, -85, 23, 121, -10, -112, -4, -21, 112, - -49, -3, 50, -117, 14, 80, 24, -21, 95, 115, -48, -121, 14, -30, 0, 89, 15, -18, 15, - 12, 49, -88, -89, -56, -94, 40, -61, -29, 23, -85, 98, -82, -30, 92, -97, -42, 99, -117, - -57, 111, -77, 116, 113, 102, -80, 97, -103, 5, -70, 6, 16, -21, 67, 12, 57, 98, -59, - -119, 94, -44, -81, 45, -21, 37, 114, -27, -86, -118, 67, -67, 114, 14, -38, -72, 73, 80, - 67, -88, 109, 86, 0, 86, -11, 88, -65, 89, -83, -31, 33, 40, -38, 108, -54, 35, 51, - 126, -126, 83, -118, 32, -7, 21, 50, 65, 109, 104, 97, 100, -44, 109, 7, -41, 64, -91, - 83, 79, 120, -95, 95, -60, 84, -2, 72, 71, -68, -99, 18, 51, -83, 113, -9, -70, -80, - 28, -39, 106, 86, -49, -104, -73, 5, -27, -94, -34, -109, -114, 15, 85, 81, -99, -7, -38, - -123, 97, -80, -68, 9, -65, 24, -70, 11, -81, 105, 55, 34, -59, 100, -75, 53, -28, -16, - 28, 13, 105, 23, -43, -6, 69, 33, 81, 84, -12, 6, -122, 90, -116, 16, 32, 75, 76, - -119, 101, -95, 21, -33, 82, 89, -106, 75, 95, 37, 63, -58, 21, -45, -112, -24, -36, 90, - 29, -100, -95, -102, -95, -39, 26, -80, -36, 125, -10, -65, 100, 124, -37, 18, -109, -128, -17, - -120, -45, 51, -42, 125, -70, -37, -121, -57, 66, 114, 103, -21, -29, -62, -10, 44, -59, 78, - -9, -75, 21, 19, 16, 81, -58, -127, -33, 11, -25, -119, -86, -66, 110, 40, 116, -22, -96, - -37, -66, -66, -70, -13, -41, 123, 33, 82, -44, -73, 48, -52, -107, 52, 126, -65, 63, -95, - 37, 45, 13, 115, -33, 70, -13, 62, -47, -89, -92, 21, 82, 52, 124, 82, 110, 71, -71, - 78, -99, 3, -85, -112, 54, -38, -47, 25, -117, -31, -82, 43, -106, -12, 24, -80, -47, 88, - 77, 21, -42, 37, -46, 101, 1, -59, -27, 7, -12, -78, 89, 67, 124, -53, -99, -20, -97, - 8, 65, 66, -124, 112, -95, -4, -16, -8, 49, -37, 35, -70, -123, 106, -82, 124, -87, -92, - -110, -8, 82, -93, 47, 46, -89, 100, -101, -2, -113, 81, 97, -12, 48, 1, 12, -128, 105, - -30, -31, -105, 123, -51, 78, 108, -51, -55, -35, -44, 74, 114, -80, 17, -117, -76, 117, -31, - 35, 35, 98, 103, -128, -71, 21, 19, 103, 109, -88, 102, 65, 31, -86, -74, -30, -2, 10, - -59, -28, 90, -17, -28, -117, -112, 54, -12, 0, -41, 126, 63, 78, -27, 28, -13, 109, -72, - 97, -63, -70, 102, -113, 52, -123, -29, 120, -120, -95, 56, 126, -21, -93, 13, -1, 114, -100, - -24, -92, -105, 118, -118, 88, 25, 44, 85, 103, -49, -5, 100, 80, 95, 24, 59, -70, -73, - -45, -41, 123, 31, -21, -4, -28, 113, 9, 125, -80, 92, -120, 97, 37, 100, 13, 40, -37, - -127, 41, 10, -40, 53, 26, 115, 123, -83, -98, -56, 60, 63, 109, -61, -14, -93, -29, -66, - -44, -79, -115, 71, -89, 23, -52, -78, 49, -85, 94, -56, -52, -52, 123, -67, -105, -18, 29, - -3, -21, -43, 82, -1, 126, 44, -33, 91, 24, 63, -49, -79, 25, -48, -74, -75, 93, -66, - 10, 43, 65, 116, -58, -126}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling/input_tensor.h b/Tests/UnitTest/TestCases/TestData/maxpooling/input_tensor.h new file mode 100644 index 00000000..10559250 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/maxpooling/input_tensor.h @@ -0,0 +1,229 @@ +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. +#pragma once +#include + +const int8_t maxpooling_input_tensor[4224] = { + -40, -12, 29, -114, -89, -22, 58, 33, -74, -8, -104, -89, 26, -38, 22, 113, -23, 64, 42, + 48, 24, 51, 109, 91, -11, 87, 48, -57, 106, 65, 50, 14, -70, -97, -41, 49, -41, 102, + -38, 116, 25, -4, -76, -87, -95, -114, 113, 74, -80, 118, 10, 25, 54, -107, -109, 120, 46, + -105, 47, -16, 101, -125, -111, -10, -103, 24, -75, -100, -122, -108, -103, -18, 67, -81, -91, 31, + -77, -46, -25, 38, -75, -45, 56, -17, -11, -104, -6, -117, -103, 37, 36, 92, -45, -39, -86, + 17, -41, -3, 82, 63, -124, -6, 33, -89, 107, -123, 103, -104, -59, -9, -94, 42, -6, 8, + -101, 50, 66, 116, 23, 11, -50, 94, -23, 17, 99, -94, 58, -115, -31, 69, -127, -126, 0, + -5, 58, 124, -70, 22, -124, 120, 92, 58, -100, 43, 23, 37, 124, 90, 116, -95, -15, -80, + -73, -29, -41, 81, -108, -95, -35, -58, -66, 101, 46, -110, -22, 9, 64, -62, -91, 110, 43, + -33, -107, 114, -84, -54, -68, -89, 63, 112, -112, 37, 29, -62, 97, -101, 116, 51, 13, -16, + -23, -121, -51, 1, -123, -7, -34, 78, -43, 74, 61, -105, 117, 1, -83, 69, 92, -71, -101, + 88, 58, -30, 9, 43, -71, 3, -83, -8, -74, 34, -44, 49, 72, -69, 66, -9, -36, 79, + 96, 59, -114, 32, 100, -47, -113, -94, -47, -54, 49, 81, -52, -86, -32, -23, -95, -35, 9, + 39, 45, -91, -84, -66, 60, -84, 81, 96, 23, -15, -21, -125, -62, 40, 101, 78, 93, 8, + -40, -79, -34, 34, 93, -95, -57, -63, 2, 36, -94, -12, -32, -55, -37, 27, -45, 96, 61, + -72, -96, -116, -124, -112, 51, 74, 19, 29, 77, -76, -23, -44, 112, 12, -2, 21, -50, -9, + -45, 43, -20, 84, 25, 71, -75, 124, 84, -23, -101, 102, 112, -29, 28, -102, 32, 43, -70, + -70, 46, -69, 37, 18, -7, -87, -104, 67, -77, -73, -49, -85, 75, -8, 33, 13, 14, 45, + 3, -63, -48, 46, 78, 31, 38, 23, 82, 107, -89, -45, 92, -78, -36, 37, -46, -125, -9, + -89, 100, -107, -21, 59, 1, 38, -43, 59, -121, 93, 121, 45, -117, -61, 84, 107, 125, 108, + -7, 113, -114, 42, -77, -61, -41, -29, 64, 125, 45, 37, 33, 97, 52, 14, -72, 126, -121, + -6, 106, 3, 43, -49, 67, 41, 115, -22, -64, -117, -80, -32, -7, 45, -88, -39, -64, -79, + 66, -60, 9, 96, -123, -80, -31, 109, 77, -60, -67, 69, -80, -122, -114, 80, -125, -108, -88, + -94, -122, 2, 22, 24, -103, -113, -98, -31, 48, 47, -66, 63, 16, 47, 28, -111, -117, -67, + -5, 119, -86, -43, 60, 97, 37, -79, -48, -35, -103, 39, -14, 40, 17, 37, -35, -111, 61, + 110, -78, 24, 58, -103, -64, -8, 115, 53, -61, 117, -42, 0, 116, -47, -53, -58, -15, -58, + 115, 78, -79, -85, 126, 48, -7, 17, -84, -84, 69, 122, 1, 119, -10, -84, -97, 3, 113, + 25, 114, -90, -58, -81, -107, -72, -63, -13, 119, 83, 63, -125, -8, 75, -75, -94, 35, 15, + 70, -74, 26, 5, -40, 84, 7, -24, 27, -108, 89, -58, 93, -123, -99, -113, 89, 25, 56, + 66, 74, 10, -57, 118, -70, -100, 77, -85, -52, -43, 72, -95, 105, -70, -41, -92, -2, -18, + -5, 104, 78, -25, 49, -69, 89, 5, 8, -109, -104, -124, -118, 5, -71, 24, 118, -98, -3, + 90, 60, 1, 95, 125, -15, 112, 109, 119, 64, -42, 47, 111, 46, -22, -35, -4, -99, 57, + -8, 111, 119, 85, -124, 48, 27, 22, 41, -115, 65, 126, 59, 57, -84, -29, -35, 122, -60, + 17, -83, 106, -84, -46, 8, 125, -7, 40, -60, 88, 123, 23, 33, 89, -4, -105, 104, -82, + 20, -125, -36, -26, 104, -3, 77, 113, -40, -9, -32, 49, 93, 27, -38, -64, 19, -112, -44, + -54, -37, -79, -31, 97, -75, -80, -13, -112, -3, -45, 6, 59, 101, -17, 92, 6, 38, -46, + -20, -11, 104, -33, -98, 64, 76, -53, 0, -115, -108, 114, -100, 45, 106, -99, 73, 87, -57, + -47, -49, 102, 23, 33, 67, -82, 71, 47, 10, -65, 37, 103, 38, -14, 116, 39, 50, 20, + -110, 40, 97, 85, 28, -61, -68, -75, 104, 23, 125, 94, -50, 32, 55, -103, -2, -99, -49, + 5, 115, 53, 120, -27, 28, 105, 123, 43, 73, 61, -5, -93, -30, -13, 11, 5, -105, 55, + -66, 32, 102, 37, -31, 14, 32, 116, 23, 66, -101, 4, 32, 25, -111, 26, 80, 3, 100, + -121, 88, -88, 111, 61, -118, -104, 82, -42, 97, -7, 3, 60, -51, -81, 125, -123, 64, 50, + 85, -99, -115, 82, 26, 24, 73, -23, 117, 82, 93, -44, -111, 108, -61, 116, 89, 82, -47, + -125, 58, 125, 43, 66, 35, -46, -57, -113, -105, -49, 125, 63, -11, 63, 104, -20, 47, 46, + -37, 51, -72, 3, 104, 13, -29, 55, -65, 93, 46, 7, 120, 5, -81, 63, 25, 112, 29, + 57, -76, -29, 48, -113, 27, 38, -56, 64, 125, -2, 68, -41, 123, 111, -101, 86, -61, -115, + -78, -34, -123, 10, -101, 117, -24, 95, -113, 82, -5, 27, 88, -121, -23, 41, -89, 29, -92, + 21, -95, -124, 4, 0, 101, -37, 56, -88, -69, -83, 67, -81, 120, 49, -115, 30, -22, -90, + -5, 0, -15, 23, 19, 92, 55, -112, -68, 70, 13, -86, 110, 38, -7, 41, -104, -104, -21, + 70, -102, -116, -28, 126, 93, -69, -102, 63, -31, 56, -25, -66, -53, 77, -67, -29, 41, -7, + -9, 12, 94, -57, 12, -104, 121, 20, -25, 41, -26, -82, -49, -121, -123, 50, 77, 58, 50, + -85, -97, 95, -34, -13, -16, -53, 77, 105, -123, 93, 124, 82, -117, -99, -111, 24, 45, -30, + -91, -93, 121, -43, -92, -73, -48, 64, 117, -72, -89, -126, -75, -52, 72, 112, -123, 83, -99, + -26, -66, 116, 109, -127, -16, -121, -37, -5, 81, -114, 79, -61, -36, -69, 37, -94, 20, 34, + 124, 55, -6, -10, -72, -103, 91, 18, 62, -105, 98, 32, -53, 88, 80, 82, -80, -31, 59, + -96, 72, -87, 31, -95, -76, 31, -67, -53, -59, 4, -83, 34, -2, -76, -75, 107, 38, -64, + 95, -76, -42, -100, 31, 12, 9, 23, 87, -107, 28, -30, 26, 89, -57, 56, -106, -110, -55, + 60, -23, -99, -125, 51, -57, -125, 44, 95, 100, 77, 24, -110, -42, 96, 22, -32, -29, 15, + 111, 87, 69, 53, -100, -114, 61, 69, -98, 9, 84, -23, -79, -86, -88, 117, -48, -71, -91, + -109, 97, 14, 124, 109, 54, -86, 53, 99, -84, -24, 124, -39, 60, -48, -75, 99, -89, -123, + -91, 79, 53, -115, -30, 3, -72, -116, 0, 25, -83, -95, -110, -104, 85, -20, 2, 22, 18, + -109, -38, 103, -76, -77, 118, 48, -118, -99, 82, -58, -26, 59, -25, 121, -54, -15, 83, -87, + -32, -12, 123, 116, -27, -31, -71, -3, 22, -68, 76, 123, 34, 4, 40, -69, 52, -66, 44, + -82, 81, -72, 123, -68, -80, -74, -104, -11, 39, -51, -28, 73, -32, 116, -97, 7, -16, -42, + 94, -35, 74, -89, -62, -109, 72, -98, -105, 97, -85, -4, 29, 122, 109, 59, -77, -24, -118, + -40, 105, -16, -60, 17, 60, -70, -83, 10, -57, 98, 82, 98, 115, 68, -71, -124, 27, 10, + 76, 111, 21, -81, -23, -68, -60, -100, -53, -13, 86, -128, -11, -59, 18, -82, 88, 33, 23, + 63, 126, 88, 126, 47, 93, 18, 42, -39, -120, -1, -40, -13, -43, -37, -57, 96, 9, 102, + 105, -87, 4, 49, 29, 96, 110, -33, -10, 21, -110, -105, -108, -122, 106, 46, -96, -97, 20, + 126, 43, -50, -104, 82, -35, 113, -118, -26, 47, -102, -118, 78, -20, 25, 77, -38, 67, -100, + -115, 18, -21, -29, 11, -34, 43, 10, -8, 109, 75, 110, -79, 69, 87, -57, 65, -24, 83, + 55, 93, 86, -92, -89, -66, 13, -54, -47, -31, -79, -4, 74, 91, 26, -112, -11, -98, 0, + 4, -46, 29, -28, 127, 87, 75, -42, 43, 111, -106, 58, -83, -48, 0, -10, 119, -3, -5, + 44, 16, 81, -79, 2, 79, -76, -17, 39, 119, 84, -104, 121, 49, 0, -103, 92, -66, 21, + 38, 47, 93, 7, -70, 67, -42, -4, -67, 61, 39, 49, -105, 94, 62, 124, -126, -65, 121, + -11, 42, -35, 68, -83, 17, -117, 116, -79, -40, -37, 34, 13, -56, -71, 72, 46, 70, 7, + 69, 56, -10, -92, 88, -96, -106, 46, 102, -85, -45, -41, -16, 23, 94, -10, -110, 58, -36, + -54, -63, 27, -20, -79, 82, -49, 76, -43, 125, 108, 77, -114, 86, -96, -27, -34, -100, 17, + -127, 34, 110, 81, -46, -49, -127, -34, 88, -94, -10, 36, 59, 97, 126, -94, 28, 93, -2, + 0, 53, -113, -67, -39, 96, 79, -126, -18, 110, 67, -11, -93, -75, 52, 76, -121, 34, 112, + 46, -72, -12, -126, 48, 99, -68, -111, 121, -9, 125, -17, -3, 98, 121, -92, -53, 80, -71, + -74, -109, -45, 95, 97, 79, 127, 94, -3, 43, 50, -61, -21, 102, 42, 15, -75, -95, -87, + 33, 58, 72, 99, -91, -63, 18, 59, -116, 43, 105, -105, 74, 25, -81, 90, -96, -39, -10, + -100, 114, 24, 26, -116, -79, -52, -19, 2, -15, 41, 22, -67, -14, 124, -40, -22, -3, -8, + 74, -90, -32, 22, 40, 48, -122, -98, 32, -72, -23, -8, 3, 30, 25, -67, 76, -8, 38, + 23, -101, 124, 93, -8, 93, 28, 39, -72, -43, -79, -107, 9, 23, -77, -118, 39, 45, -120, + -69, -110, -121, -64, 49, -72, -119, -63, -35, 83, 122, 33, -45, 103, 37, -47, -68, 104, 92, + 26, 31, 4, -62, -94, 99, -41, 43, -78, 126, 24, -117, -127, 76, -32, -87, -70, -37, -115, + -105, 39, 18, -111, 100, -64, -68, -61, 69, -64, -16, -44, 33, -52, -109, -94, 72, 7, 121, + 103, -96, -80, -77, 29, 51, 31, -32, -74, -101, -99, -62, -108, -128, 88, -61, 110, -19, -91, + 88, -94, 16, -7, -28, 32, 32, -50, 87, -68, -37, -77, -82, 33, 125, 12, -114, 53, -105, + 54, 38, -71, -41, -91, -54, 101, 34, 6, 117, 17, 61, -69, 109, 6, 120, 42, -41, -39, + 115, -59, -122, -16, 11, -114, -81, -79, 42, 72, -89, -50, -54, -16, -17, 38, -109, 22, -63, + 30, 33, -93, -18, -61, 45, 64, -56, -48, -106, 44, -27, -53, -51, 64, -107, 85, -39, 52, + -75, -2, -42, 54, 53, 88, 5, 88, 51, -65, 27, 52, 19, 101, -43, -107, 48, 50, -6, + -110, -13, -14, -16, -103, -22, -57, 65, 76, -83, -3, 5, 11, 28, -115, -34, 58, 43, 16, + -51, -87, -115, -108, -57, -127, -85, 106, 66, 52, -71, 116, 27, 56, 78, 115, -45, -17, -18, + -114, -85, -47, 108, -90, 109, 48, -42, -103, 59, 0, -4, -11, -24, 109, -69, -91, -29, 30, + 25, -81, 20, 117, -52, 39, 38, -33, -80, 55, 127, -70, 2, -59, 39, -32, -14, 37, -128, + -51, 21, 37, 18, 52, -45, -95, -24, -87, -57, 66, -111, -112, -15, 114, 32, 30, -52, 116, + 66, 15, -12, 98, 52, -82, -42, 34, -44, -52, 90, 106, -67, -40, 73, -85, -48, -94, 99, + -66, 83, 52, 127, 7, 86, 16, 99, -57, 36, 25, -77, 41, -119, -1, 7, -93, 50, -96, + -25, 74, 40, -121, 63, 96, -82, -97, -69, 56, 50, -55, 61, 73, 50, -127, 30, -111, 9, + 58, 66, 119, 118, -117, -85, -1, 23, -53, 84, -89, 83, -55, 17, 76, -25, 80, -88, 81, + 116, -75, -115, 99, 85, -83, -78, 35, -127, -121, 12, -97, 16, 43, 94, 93, -97, -85, 53, + -3, -106, 31, -23, -77, 22, 73, -52, 63, -48, 5, 2, -71, 35, -17, 122, 52, -5, -3, + -18, 87, 74, -46, 94, -106, -71, -71, 88, -57, -81, 7, -10, 52, 29, 73, 67, 58, 97, + -127, 31, -91, 89, 94, 102, 41, 123, 42, -108, 23, 99, 51, -7, -112, -14, 19, -81, -33, + -6, 1, -107, -48, 114, -114, 58, -49, 68, -63, 54, -90, -42, 19, -37, -111, -36, -64, -115, + 126, -20, 38, 55, 48, -90, 53, 58, 50, -68, -124, -15, -104, -101, 34, -68, 109, -112, -76, + -117, 118, -57, 12, 36, -74, 7, 2, 116, -98, 98, 6, 52, -27, 34, -12, 14, -78, -111, + -85, 38, 38, 106, -37, 79, -22, -71, -83, -95, 108, 55, 64, -1, -126, -57, 7, 70, 68, + -82, 99, -59, -109, -111, 88, -103, -98, 41, 77, -49, -104, -38, 124, 90, 29, -102, -96, -89, + -77, -71, -113, -69, -99, 8, 119, 12, 73, -5, 40, 49, 109, 18, -81, -75, -71, 17, 9, + 56, -106, 56, 60, -48, 115, 100, -112, 80, 48, 33, 79, 73, 72, -37, 3, -71, -109, 37, + -42, 108, 122, 26, -35, -39, 11, 49, -65, -93, -23, -41, -18, 116, 70, 3, -94, -31, 54, + 17, -27, -40, 18, -100, -118, -102, -94, -90, -49, -126, 117, 59, -60, 61, 110, 102, -43, -8, + -56, -55, -70, -90, 69, 81, 2, 100, -35, -71, 61, 74, 28, 49, -17, -116, 79, -73, -79, + -71, 50, -99, 17, -62, -64, 115, -53, -28, -125, 12, 124, 115, 83, -94, 80, -32, 75, -106, + -46, -107, 119, -95, -52, -83, -83, 17, -118, -118, -1, 106, -64, 33, -5, -9, -25, -64, -37, + 24, 76, -65, -70, -61, -65, 25, 69, 47, 112, -9, 95, 40, 104, -1, -77, 72, 26, -108, + -74, -52, -119, 52, -109, -38, 69, -113, 54, -18, -65, -111, -3, -43, -17, -25, 30, 64, -23, + -19, -5, -110, 61, -101, 34, 65, 19, -111, -52, 55, -120, 89, -50, 7, 26, -73, 53, 45, + 29, 106, -41, -104, -126, -6, -84, 87, 109, 127, 106, 51, -50, 71, -11, 57, -127, 89, -55, + 119, -47, 21, -120, 89, -54, 93, 111, -33, 52, -16, -87, 37, 123, 10, -25, -111, 109, -16, + 61, 73, 115, 22, -27, -43, 33, 118, -22, 28, -94, 40, -18, -94, 115, -97, 105, 85, 52, + -113, -6, -89, -49, -16, 38, 20, 90, 55, -117, -11, -29, 46, 126, 5, -74, 49, -54, 75, + -5, 47, -79, 77, -126, -23, -100, 93, -94, -87, 10, 92, 98, -79, -99, 46, 85, -43, 55, + 92, 126, 52, -86, 65, -66, -99, 28, 80, -53, 117, 79, -7, 103, -18, 124, 55, -12, -57, + 103, -69, 40, -27, 45, 66, -86, 4, -69, -62, 117, 105, -24, 60, 86, 65, -74, 3, -61, + 29, -121, -72, 119, -115, 70, 63, 21, 12, -119, 1, 58, 93, 0, -54, 35, 121, -76, 8, + 116, 124, 75, -45, -123, 73, -5, 57, 39, 70, -88, 63, 81, 35, -37, 77, -3, -87, 93, + 111, -117, -106, 44, -91, -87, 51, -9, 80, 104, -41, -6, -34, -9, -28, -77, 65, 70, -44, + 68, -80, -58, -54, -109, 42, -32, 44, 67, 76, 125, -56, -117, 89, -20, -31, 115, -125, 90, + 3, -55, -12, 28, -32, 4, -93, 85, 76, -68, -48, 2, 105, -15, 85, 90, -38, -90, 124, + -73, -87, 117, -12, -24, -71, -51, -6, -112, 68, -71, 6, -40, -84, -41, 109, -68, -2, -4, + -9, -77, 14, -33, 58, 123, 55, 30, -113, 3, -80, 71, -6, -91, -43, -83, 112, 117, -100, + -3, 116, 15, 47, -36, -68, 27, -115, 114, -109, -91, -89, 21, -36, 35, 17, -70, 77, 47, + 114, -6, -27, -124, 98, -63, 79, -109, 41, -20, 0, -75, 68, -40, 8, 73, -2, 39, 60, + -49, -67, 110, -41, -100, -87, -17, -39, 125, -77, 17, -18, 79, 114, 8, -42, -81, 58, -106, + 103, -36, -9, 24, -18, 63, -110, 52, 106, 67, 52, 61, 73, 93, 11, -10, 61, -102, -5, + 38, -6, -33, 62, 61, -9, -47, 49, 55, 80, 103, -36, 96, -8, 15, 50, 89, 115, -38, + 88, -19, 52, -70, 23, 49, -8, -107, -123, -113, -19, -21, -67, -53, 118, -117, -31, -26, -53, + 46, 65, -124, -31, 75, 103, 101, -3, 60, -17, -41, -61, -103, 67, 32, -34, -111, -54, -15, + 114, -109, -122, -12, 36, -70, -62, -74, 61, -57, -86, 45, -123, 33, -49, -93, 50, -75, -52, + -128, -50, 55, -15, 43, 31, -96, 119, -83, -93, -107, 83, -107, -118, -13, 98, 98, -46, 51, + 61, 52, 60, 115, 104, -115, 4, 23, -82, 73, -74, 38, 42, -33, -104, 106, -64, -37, -74, + -84, 7, -62, -78, -59, 33, 6, -38, -26, -123, -99, -81, -66, -24, 94, -53, 37, 92, 77, + -48, -8, 16, -11, -13, -29, -73, -107, -127, 74, 75, -39, -11, -56, 52, 91, -53, -25, -81, + -117, 7, 92, 83, 38, 17, 34, -112, 50, -79, -3, -76, 3, -55, 53, 113, -43, -103, 108, + 109, -56, 80, 86, -28, -107, 106, 36, 86, 52, -59, 64, -13, 91, -69, -19, 121, 85, -52, + -123, -106, -83, 75, -27, 21, -22, -10, -113, -124, 7, -90, 44, 97, -24, -96, 0, 48, -16, + -5, -111, 108, 43, -32, -38, -57, 64, -54, 26, 71, 49, -39, -108, -87, -82, -84, 6, 47, + -22, 64, 92, 47, 79, 58, 103, -9, -16, 100, -102, -67, -98, 27, -56, 84, -36, 39, -79, + -81, 92, -38, 109, 121, 45, 103, -43, -46, -80, 4, 85, -118, 94, 38, 123, 70, 3, 10, + -16, -13, 34, 36, -65, 111, 1, 92, 108, -49, 65, -7, -127, 11, -54, 63, 56, 44, -45, + 49, 11, 33, 54, 91, 59, 69, 31, -122, -54, 110, -59, 7, -42, -69, -100, 69, -61, -48, + 0, -126, 11, -112, 51, -8, -66, -85, -77, 33, 119, 3, 83, -66, -87, -105, 14, -16, -126, + -35, -84, 61, 113, -116, -104, 54, 9, 73, -24, 62, 50, 25, 24, 54, -15, 89, 16, 8, + 107, 98, 85, -31, 107, -122, 115, -2, -7, 79, 94, -11, -96, 8, 109, 119, 66, 40, 44, + 2, -121, -62, -71, 29, 125, 67, 106, -115, -5, -33, 33, 24, -42, -47, 71, 88, -25, 32, + 42, -79, -108, -42, -51, -21, -98, 16, -59, 39, 94, 94, -126, 111, 56, -15, -98, -34, -23, + -71, -121, -110, -20, -61, -58, -53, 52, -50, -35, 12, -72, 79, -51, 38, -107, 77, -112, -105, + 67, -24, 42, 105, 53, -80, 57, -17, 8, -25, 111, 34, -55, 91, 111, 117, 96, -37, -36, + 52, -81, 24, -89, -109, -95, 126, 83, 13, 115, -44, 47, 15, -56, 113, 68, -52, 88, -53, + 25, -75, 28, -15, -123, 67, -99, -94, 1, 91, -87, -104, 47, -18, 1, -65, -59, -17, -3, + -118, -119, 12, 26, -76, -12, 122, -6, 38, 74, 37, 33, 96, -64, -64, -55, -25, -16, -122, + -31, 44, 28, -106, 116, -78, -49, 89, -40, 57, -103, -38, -9, -36, -35, 113, 98, 32, -106, + 9, -91, 6, 18, -66, -93, 49, 39, 81, 22, 7, 117, 11, 40, 76, 31, 39, -108, -118, + 58, 82, 113, 112, 98, -120, -84, 115, 14, -9, -26, 9, 98, 36, 55, 100, -75, -89, -38, + 16, 42, -3, -7, 10, 29, -21, -5, -56, -110, -60, 6, 81, -86, -89, 32, -102, -65, 21, + -80, 91, 82, 106, 48, -41, 124, -67, 66, 85, -23, 26, 109, -79, -110, -88, -64, -62, -12, + -57, 21, 19, -123, -44, -4, -93, -83, -51, 37, 37, -31, 109, -83, -40, 55, -37, -50, -13, + 32, 80, -70, -106, -76, -58, -22, -120, -37, 120, -10, 48, -16, 20, -33, 24, -38, 116, 26, + 82, 63, -117, 101, 111, -60, -17, 30, -56, 1, -111, 18, -94, -108, -38, -54, -53, -2, 32, + -2, 60, 64, -121, -96, 109, -8, 52, -70, 81, 45, -86, 100, -11, -106, 64, 120, 107, 38, + -95, -63, -74, 58, -12, -2, 17, -41, 54, -48, 123, 52, -26, -83, -90, -97, -44, 109, 44, + -119, 109, 115, -45, -70, 54, 48, -75, 91, -70, -19, 107, -70, -100, -84, 66, 3, -88, 59, + -104, -21, -79, 25, -49, -44, 100, -118, 76, -97, -95, -89, 90, -7, 73, -84, -51, -111, 98, + 75, -90, -122, 54, 55, -94, 100, 64, 105, -66, 77, -28, -34, -50, 24, -46, -73, -72, -39, + 124, 69, 93, 65, -73, -72, 75, -98, 110, 126, -102, 90, 87, -48, 41, -73, 101, 6, 38, + 5, 7, 41, -102, -94, -109, 53, 101, -76, 5, -110, 20, 115, -33, -14, -100, -54, -27, -39, + 5, 52, 67, -32, -96, 44, 59, -79, -31, -33, 105, -84, 0, 41, 91, -76, -51, 106, -40, + 99, -27, 36, -119, 14, 9, -60, -90, -127, -27, 1, -15, -55, 35, 116, 119, 63, -3, 114, + -128, 43, -22, 27, -62, -26, 102, 81, -50, -8, -20, -71, -16, -79, 29, 108, 22, 12, 54, + 86, 6, 10, 71, 12, 46, 104, 79, 121, 70, -93, 60, 115, 26, -22, -126, 77, 25, 63, + -66, -93, 111, -73, -106, 8, -108, 53, -59, 95, 11, -58, -63, -93, 9, 95, -101, 51, 53, + 46, 107, -3, -30, -41, 12, -92, -33, 27, 108, -64, 36, -50, -77, 92, 124, -18, 92, 123, + 121, 63, 70, -121, 105, 61, 4, -81, 65, -112, -125, -58, 22, 96, -51, 35, -76, -112, -18, + 54, 8, 90, -7, -37, 105, -59, -11, 23, 52, -39, -64, 80, 98, -8, 114, 94, 16, 87, + 87, 23, -113, -12, -18, 47, -73, -20, 82, 122, 77, 7, -63, 70, -22, 17, -60, 20, 93, + -65, 107, 67, 74, -68, -64, 19, -117, -119, 17, 4, 33, 76, 45, 65, 99, -89, -27, 63, + 113, 56, 114, 81, -86, -102, -43, -29, -22, -25, 37, 57, -73, -9, 21, 47, 1, 116, 1, + -94, 106, -80, 57, 27, 53, 32, -13, -102, -91, -38, 75, 122, 110, 42, -22, 66, 7, -42, + 87, -103, 47, -93, 66, -65, -53, 24, -9, -10, 49, 88, 102, -74, 72, -98, -77, 25, 111, + -119, 43, 110, 16, -41, -81, -107, -115, 121, -18, 61, -76, 72, 12, 35, -65, 111, 31, -15, + 16, -26, -58, -6, -49, -22, 63, -77, 108, 110, 109, -47, -85, -51, 117, 17, 5, -64, 92, + 46, -25, 0, 27, -24, 5, 37, -42, -117, -3, -75, 56, 6, 36, -122, 97, 0, 10, -66, + 48, 120, 52, 75, 18, 72, -13, 76, 39, 100, -125, 118, -55, 110, -108, 63, -51, 67, 21, + -105, -119, 112, -17, 90, -70, 8, 32, -31, 103, -92, -119, -107, 83, 125, 7, 53, -85, -18, + 88, 6, -57, -43, 68, -22, -63, -45, -47, -112, 69, -68, -51, 74, -76, 104, 52, 0, -41, + -109, -88, -112, -17, -31, -25, 116, -74, -11, 26, -14, 45, -88, -84, -49, -73, 17, -50, -18, + 67, -123, 116, -95, 77, 121, -53, 54, -96, -83, -90, -117, 71, -109, -5, -35, 67, 24, -48, + 12, -28, 66, 77, 1, -79, -120, -76, -111, 116, 116, 72, -10, -25, 97, -73, 101, -63, -87, + -49, 6, 58, 12, 68, 124, 11, 10, 20, -83, -20, 9, -118, 53, -79, 76, 54, -98, -85, + -106, -61, 62, -89, 31, 28, 68, 27, 98, 15, 64, -51, -127, 101, -15, 91, -99, -7, -91, + -23, 43, -2, 91, 119, -12, 87, -26, -49, 72, 18, -1, 125, 14, 20, 77, 89, -90, -78, + -81, 101, 99, -77, -71, -103, -105, 89, 27, -88, -69, 53, 93, -56, 38, 37, 51, 56, 123, + -38, 125, -39, -85, 58, 106, -2, 98, -63, 0, 85, 78, 65, -27, 19, -25, -89, 84, -83, + 20, 11, 71, -57, -110, 108, 113, 72, 106, -125, -2, 6, 4, -16, 12, -31, -28, 18, 92, + -28, -85, -111, -100, 102, -34, -105, 114, -21, 120, 102, 83, 95, 122, 32, -33, 16, -93, -67, + 57, -115, -120, 126, 122, 72, 94, -110, -9, -10, -58, 18, 2, -71, -62, 38, -28, 54, -108, + -126, -27, -92, 7, 114, -27, 35, 63, -96, -115, -89, 125, -114, 47, 2, -22, 84, -66, -126, + -47, -57, -45, 4, -101, 21, -110, -90, 35, 82, -24, -120, -56, -50, 36, 112, 7, 71, 34, + -84, -75, -5, -63, 18, 70, 80, 39, -52, -4, -86, 102, -19, -60, -86, 99, 59, -66, 31, + 15, 35, 21, 115, 93, 45, 26, 86, -49, 47, 28, 77, -39, 7, -36, 13, 21, 39, -31, + -94, 28, -84, 74, -75, 118, 74, 90, -98, 86, 14, 118, 68, 66, 58, 114, -63, -118, -51, + -72, -73, -90, 72, 48, 18, 32, 0, 31, 63, 94, -54, -16, -34, 35, 7, 78, -55, 32, + -18, -19, -84, -88, 85, 27, -89, -104, 81, -5, -73, 111, 29, -123, -104, -58, 24, 74, -88, + 78, 121, -21, -9, 85, -68, -72, 96, -109, 17, 48, 24, 70, -26, 94, -89, 46, 34, 26, + 47, -101, 48, -58, -24, -106, 120, -72, -81, -104, 31, -42, -47, -100, -93, -71, -32, -119, -79, + -24, 67, -101, -93, -62, -87, -87, -97, -114, 109, 122, -74, 78, -17, -90, -18, 18, -31, 119, + -119, -99, 62, -23, 85, -81}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling/output.h b/Tests/UnitTest/TestCases/TestData/maxpooling/output.h new file mode 100644 index 00000000..d3d32b9c --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/maxpooling/output.h @@ -0,0 +1,13 @@ +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. +#pragma once +#include + +const int8_t maxpooling_output[144] = { + 97, 107, 125, 118, 121, 125, 109, 116, 107, 125, 119, 126, 109, 119, 123, 96, 116, 122, 126, 120, 116, + 114, 115, 114, 121, 102, 120, 103, 125, 124, 116, 111, 126, 108, 125, 109, 124, 116, 121, 126, 125, 127, + 124, 116, 127, 122, 109, 121, 118, 124, 93, 76, 93, 80, 84, 124, 123, 117, 122, 116, 100, 94, 126, + 115, 120, 54, 127, 118, 125, 72, 109, 109, 106, 125, 124, 110, 106, 124, 121, 108, 118, 125, 108, 122, + 124, 115, 118, 117, 115, 126, 117, 114, 116, 124, 115, 124, 111, 119, 123, 121, 113, 115, 113, 115, 111, + 105, 108, 122, 124, 126, 114, 124, 122, 116, 113, 116, 119, 120, 111, 122, 111, 124, 102, 110, 108, 110, + 112, 98, 108, 125, 85, 125, 89, 118, 103, 123, 116, 122, 120, 121, 126, 122, 85, 125}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling/output_ref_data.h b/Tests/UnitTest/TestCases/TestData/maxpooling/output_ref_data.h deleted file mode 100644 index f7a1ee86..00000000 --- a/Tests/UnitTest/TestCases/TestData/maxpooling/output_ref_data.h +++ /dev/null @@ -1,13 +0,0 @@ -// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). -// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. -#pragma once -#include - -const int8_t maxpooling_output_ref[144] = { - 124, 104, 108, 125, 120, 110, 118, 113, 124, 117, 125, 123, 113, 110, 85, 123, 94, 114, 123, 122, 122, - 125, 112, 107, 115, 120, 118, 122, 119, 122, 118, 113, 121, 115, 125, 126, 122, 120, 72, 118, 114, 120, - 123, 116, 122, 122, 126, 125, 120, 113, 87, 123, 109, 115, 117, 123, 95, 116, 124, 122, 123, 120, 109, - 91, 104, 121, 122, 103, 82, 98, 113, 120, 118, 114, 115, 122, 126, 104, 110, 126, 124, 122, 122, 115, - 109, 112, 125, 120, 111, 117, 120, 124, 117, 107, 109, 95, 90, 120, 124, 126, 125, 111, 125, 118, 120, - 126, 123, 108, 119, 120, 126, 116, 116, 120, 119, 123, 121, 121, 92, 118, 117, 124, 123, 125, 116, 125, - 102, 99, 123, 114, 126, 123, 126, 113, 124, 125, 105, 124, 123, 100, 126, 116, 105, 106}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling/test_data.h b/Tests/UnitTest/TestCases/TestData/maxpooling/test_data.h index b00cc3f6..0e46bdee 100644 --- a/Tests/UnitTest/TestCases/TestData/maxpooling/test_data.h +++ b/Tests/UnitTest/TestCases/TestData/maxpooling/test_data.h @@ -1,5 +1,3 @@ -// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). -// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. #include "config_data.h" -#include "input_data.h" -#include "output_ref_data.h" +#include "input_tensor.h" +#include "output.h" diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling_1/config_data.h b/Tests/UnitTest/TestCases/TestData/maxpooling_1/config_data.h index ec03d596..a96c9501 100644 --- a/Tests/UnitTest/TestCases/TestData/maxpooling_1/config_data.h +++ b/Tests/UnitTest/TestCases/TestData/maxpooling_1/config_data.h @@ -1,19 +1,20 @@ -// Generated by generate_test_data.py using TFL version 2.6.0 as reference. +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. #pragma once -#define MAXPOOLING_1_OUT_CH 3 -#define MAXPOOLING_1_IN_CH 3 +#define MAXPOOLING_1_BATCH_SIZE 1 +#define MAXPOOLING_1_INPUT_N 1 #define MAXPOOLING_1_INPUT_W 9 #define MAXPOOLING_1_INPUT_H 5 -#define MAXPOOLING_1_DST_SIZE 3 -#define MAXPOOLING_1_INPUT_SIZE 135 -#define MAXPOOLING_1_OUT_ACTIVATION_MIN -128 -#define MAXPOOLING_1_OUT_ACTIVATION_MAX 127 -#define MAXPOOLING_1_INPUT_BATCHES 1 -#define MAXPOOLING_1_FILTER_X 9 -#define MAXPOOLING_1_FILTER_Y 5 -#define MAXPOOLING_1_STRIDE_X 1 -#define MAXPOOLING_1_STRIDE_Y 2 -#define MAXPOOLING_1_PAD_X 0 -#define MAXPOOLING_1_PAD_Y 0 +#define MAXPOOLING_1_INPUT_C 3 +#define MAXPOOLING_1_FILTER_W 9 +#define MAXPOOLING_1_FILTER_H 5 +#define MAXPOOLING_1_STRIDE_W 1 +#define MAXPOOLING_1_STRIDE_H 2 +#define MAXPOOLING_1_PAD VALID +#define MAXPOOLING_1_ACTIVATION_MAX 127 +#define MAXPOOLING_1_ACTIVATION_MIN -128 +#define MAXPOOLING_1_OUTPUT_C 3 #define MAXPOOLING_1_OUTPUT_W 1 #define MAXPOOLING_1_OUTPUT_H 1 +#define MAXPOOLING_1_PADDING_H 0 +#define MAXPOOLING_1_PADDING_W 0 diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling_1/input_data.h b/Tests/UnitTest/TestCases/TestData/maxpooling_1/input_data.h deleted file mode 100644 index 3d1703ba..00000000 --- a/Tests/UnitTest/TestCases/TestData/maxpooling_1/input_data.h +++ /dev/null @@ -1,12 +0,0 @@ -// Generated by generate_test_data.py using TFL version 2.6.0 as reference. -#pragma once -#include - -const int8_t maxpooling_1_input[135] = { - 78, 64, 103, 10, 4, -53, 75, 11, 108, 122, -73, -83, -121, 70, -7, -47, -9, -100, 18, 4, - -124, 4, -6, -11, -54, -75, -102, 60, -119, 34, -78, 74, -41, -117, -40, 11, -74, -23, 32, -79, - -66, 70, -122, 83, 60, 65, -73, -106, -83, 11, -82, -75, 18, 100, 81, -38, 20, -40, 102, 4, - 121, 44, -26, -120, 126, 120, -107, 54, 16, -97, 30, -59, -97, 4, -121, 76, -22, -21, -124, -109, - 73, 15, -60, 74, 111, 121, -88, -86, 65, 117, 22, -38, -94, -86, 29, -115, -29, -101, -34, -80, - -44, -74, 110, 98, -124, 109, -16, -108, 94, -93, 106, -57, -123, -102, 87, 29, -26, 89, -8, 114, - -72, -7, 56, 0, -48, -41, -33, -28, 26, 89, 121, -103, 9, 98, 58}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling_1/input_tensor.h b/Tests/UnitTest/TestCases/TestData/maxpooling_1/input_tensor.h new file mode 100644 index 00000000..e1513394 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/maxpooling_1/input_tensor.h @@ -0,0 +1,13 @@ +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. +#pragma once +#include + +const int8_t maxpooling_1_input_tensor[135] = { + -77, 75, 50, 17, 65, -126, -70, 109, -76, -52, 89, -11, 54, -120, 8, -29, -120, -91, -6, -52, + 103, -55, -73, -116, -108, -14, -48, -104, 41, -17, 36, -4, -71, 124, 125, 32, -54, 65, 30, -55, + -41, -105, 3, 79, 106, 109, 45, 99, 87, 60, 10, 4, 22, -82, -92, 66, -103, -85, 41, 43, + 15, -88, -3, -89, -116, -49, 5, 43, -86, 118, -87, 60, -101, -19, 45, 46, -28, -114, 114, 126, + 96, 12, 1, -48, -36, -21, -25, 56, 28, 85, -1, 111, 49, 96, 52, -63, -36, -31, -106, 55, + 124, 5, -49, -48, 70, 56, -86, 85, -99, -100, -108, -5, 52, 33, 123, -105, 65, 46, -21, -89, + -50, 99, 64, 108, -53, 3, 103, 14, -39, 18, -17, -119, 117, -105, 84}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling_1/output.h b/Tests/UnitTest/TestCases/TestData/maxpooling_1/output.h new file mode 100644 index 00000000..a79c5c20 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/maxpooling_1/output.h @@ -0,0 +1,6 @@ +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. +#pragma once +#include + +const int8_t maxpooling_1_output[3] = {124, 126, 106}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling_1/output_ref_data.h b/Tests/UnitTest/TestCases/TestData/maxpooling_1/output_ref_data.h deleted file mode 100644 index e3200aca..00000000 --- a/Tests/UnitTest/TestCases/TestData/maxpooling_1/output_ref_data.h +++ /dev/null @@ -1,5 +0,0 @@ -// Generated by generate_test_data.py using TFL version 2.6.0 as reference. -#pragma once -#include - -const int8_t maxpooling_1_output_ref[3] = {122, 126, 120}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling_1/test_data.h b/Tests/UnitTest/TestCases/TestData/maxpooling_1/test_data.h index 31add6d5..0e46bdee 100644 --- a/Tests/UnitTest/TestCases/TestData/maxpooling_1/test_data.h +++ b/Tests/UnitTest/TestCases/TestData/maxpooling_1/test_data.h @@ -1,4 +1,3 @@ -// Generated by generate_test_data.py using TFL version 2.6.0 as reference. #include "config_data.h" -#include "input_data.h" -#include "output_ref_data.h" +#include "input_tensor.h" +#include "output.h" diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling_2/config_data.h b/Tests/UnitTest/TestCases/TestData/maxpooling_2/config_data.h index 1d5f8738..a2abe59f 100644 --- a/Tests/UnitTest/TestCases/TestData/maxpooling_2/config_data.h +++ b/Tests/UnitTest/TestCases/TestData/maxpooling_2/config_data.h @@ -1,19 +1,20 @@ -// Generated by generate_test_data.py using TFL version 2.6.0 as reference. +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. #pragma once -#define MAXPOOLING_2_OUT_CH 5 -#define MAXPOOLING_2_IN_CH 5 +#define MAXPOOLING_2_BATCH_SIZE 1 +#define MAXPOOLING_2_INPUT_N 1 #define MAXPOOLING_2_INPUT_W 12 #define MAXPOOLING_2_INPUT_H 1 -#define MAXPOOLING_2_DST_SIZE 60 -#define MAXPOOLING_2_INPUT_SIZE 60 -#define MAXPOOLING_2_OUT_ACTIVATION_MIN -128 -#define MAXPOOLING_2_OUT_ACTIVATION_MAX 127 -#define MAXPOOLING_2_INPUT_BATCHES 1 -#define MAXPOOLING_2_FILTER_X 3 -#define MAXPOOLING_2_FILTER_Y 1 -#define MAXPOOLING_2_STRIDE_X 1 -#define MAXPOOLING_2_STRIDE_Y 2 -#define MAXPOOLING_2_PAD_X 1 -#define MAXPOOLING_2_PAD_Y 0 +#define MAXPOOLING_2_INPUT_C 5 +#define MAXPOOLING_2_FILTER_W 3 +#define MAXPOOLING_2_FILTER_H 1 +#define MAXPOOLING_2_STRIDE_W 1 +#define MAXPOOLING_2_STRIDE_H 2 +#define MAXPOOLING_2_PAD SAME +#define MAXPOOLING_2_ACTIVATION_MAX 127 +#define MAXPOOLING_2_ACTIVATION_MIN -128 +#define MAXPOOLING_2_OUTPUT_C 5 #define MAXPOOLING_2_OUTPUT_W 12 #define MAXPOOLING_2_OUTPUT_H 1 +#define MAXPOOLING_2_PADDING_H 0 +#define MAXPOOLING_2_PADDING_W 1 diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling_2/input_data.h b/Tests/UnitTest/TestCases/TestData/maxpooling_2/input_data.h deleted file mode 100644 index b9578614..00000000 --- a/Tests/UnitTest/TestCases/TestData/maxpooling_2/input_data.h +++ /dev/null @@ -1,8 +0,0 @@ -// Generated by generate_test_data.py using TFL version 2.6.0 as reference. -#pragma once -#include - -const int8_t maxpooling_2_input[60] = {75, -52, -42, -30, 56, 64, 106, -36, 120, -3, 34, -105, 69, 75, -39, - 15, 93, -71, 39, 34, -11, 65, 22, 59, 106, 105, 45, -116, -75, 123, - -65, 75, -61, 13, -25, -123, 59, 110, -65, 86, -108, -107, -17, 38, 27, - -1, -115, -123, 75, -75, 68, 52, 12, -35, 116, -68, 22, 15, 76, -81}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling_2/input_tensor.h b/Tests/UnitTest/TestCases/TestData/maxpooling_2/input_tensor.h new file mode 100644 index 00000000..1a3b7984 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/maxpooling_2/input_tensor.h @@ -0,0 +1,9 @@ +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. +#pragma once +#include + +const int8_t maxpooling_2_input_tensor[60] = { + 64, 30, -83, 7, -56, 108, 104, -115, -54, 104, 37, 103, -102, 3, -71, 123, -82, 67, -42, 114, + -39, 27, -76, 72, -23, 94, -51, -22, 87, -66, -87, 11, -89, 81, -109, -108, 88, 17, 32, 59, + 107, 18, -125, 50, -40, 60, 77, 3, -33, -63, 37, -44, 23, 33, -86, 5, -70, 79, -101, 9}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling_2/output.h b/Tests/UnitTest/TestCases/TestData/maxpooling_2/output.h new file mode 100644 index 00000000..8a3f16cc --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/maxpooling_2/output.h @@ -0,0 +1,9 @@ +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. +#pragma once +#include + +const int8_t maxpooling_2_output[60] = {108, 104, -83, 7, 104, 108, 104, -83, 7, 104, 123, 104, 67, 3, 114, + 123, 103, 67, 72, 114, 123, 27, 67, 87, 114, 94, 27, -22, 87, -23, + 94, 88, 17, 87, 59, 107, 88, 17, 81, 59, 107, 88, 17, 50, 59, + 107, 77, 23, 50, -40, 60, 77, 79, 33, 9, 37, -44, 79, 33, 9}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling_2/output_ref_data.h b/Tests/UnitTest/TestCases/TestData/maxpooling_2/output_ref_data.h deleted file mode 100644 index 17bccf5e..00000000 --- a/Tests/UnitTest/TestCases/TestData/maxpooling_2/output_ref_data.h +++ /dev/null @@ -1,8 +0,0 @@ -// Generated by generate_test_data.py using TFL version 2.6.0 as reference. -#pragma once -#include - -const int8_t maxpooling_2_output_ref[60] = {75, 106, -36, 120, 56, 75, 106, 69, 120, 56, 64, 106, 69, 120, 34, - 34, 93, 69, 75, 106, 105, 93, 22, 59, 123, 105, 75, 22, 59, 123, - 105, 75, 110, 13, 123, -65, 75, 110, 38, 86, -1, 59, 110, 75, 86, - 68, 52, 12, 75, 116, 68, 52, 15, 76, 116, 68, 52, 15, 76, 116}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling_2/test_data.h b/Tests/UnitTest/TestCases/TestData/maxpooling_2/test_data.h index 31add6d5..0e46bdee 100644 --- a/Tests/UnitTest/TestCases/TestData/maxpooling_2/test_data.h +++ b/Tests/UnitTest/TestCases/TestData/maxpooling_2/test_data.h @@ -1,4 +1,3 @@ -// Generated by generate_test_data.py using TFL version 2.6.0 as reference. #include "config_data.h" -#include "input_data.h" -#include "output_ref_data.h" +#include "input_tensor.h" +#include "output.h" diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling_3/config_data.h b/Tests/UnitTest/TestCases/TestData/maxpooling_3/config_data.h index a879d548..c3f67c5e 100644 --- a/Tests/UnitTest/TestCases/TestData/maxpooling_3/config_data.h +++ b/Tests/UnitTest/TestCases/TestData/maxpooling_3/config_data.h @@ -1,20 +1,20 @@ -// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). -// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. #pragma once -#define MAXPOOLING_3_OUT_CH 2 -#define MAXPOOLING_3_IN_CH 2 +#define MAXPOOLING_3_BATCH_SIZE 1 +#define MAXPOOLING_3_INPUT_N 1 #define MAXPOOLING_3_INPUT_W 9 #define MAXPOOLING_3_INPUT_H 1 -#define MAXPOOLING_3_DST_SIZE 30 -#define MAXPOOLING_3_INPUT_SIZE 18 -#define MAXPOOLING_3_OUT_ACTIVATION_MIN -128 -#define MAXPOOLING_3_OUT_ACTIVATION_MAX 127 -#define MAXPOOLING_3_INPUT_BATCHES 3 -#define MAXPOOLING_3_FILTER_X 1 -#define MAXPOOLING_3_FILTER_Y 1 -#define MAXPOOLING_3_STRIDE_X 2 -#define MAXPOOLING_3_STRIDE_Y 1 -#define MAXPOOLING_3_PAD_X 0 -#define MAXPOOLING_3_PAD_Y 0 +#define MAXPOOLING_3_INPUT_C 2 +#define MAXPOOLING_3_FILTER_W 1 +#define MAXPOOLING_3_FILTER_H 1 +#define MAXPOOLING_3_STRIDE_W 2 +#define MAXPOOLING_3_STRIDE_H 1 +#define MAXPOOLING_3_PAD VALID +#define MAXPOOLING_3_ACTIVATION_MAX 127 +#define MAXPOOLING_3_ACTIVATION_MIN -128 +#define MAXPOOLING_3_OUTPUT_C 2 #define MAXPOOLING_3_OUTPUT_W 5 #define MAXPOOLING_3_OUTPUT_H 1 +#define MAXPOOLING_3_PADDING_H 0 +#define MAXPOOLING_3_PADDING_W 0 diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling_3/input_data.h b/Tests/UnitTest/TestCases/TestData/maxpooling_3/input_data.h deleted file mode 100644 index 17026874..00000000 --- a/Tests/UnitTest/TestCases/TestData/maxpooling_3/input_data.h +++ /dev/null @@ -1,9 +0,0 @@ -// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). -// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. -#pragma once -#include - -const int8_t maxpooling_3_input[54] = {7, -57, 106, 28, 40, 106, 70, 36, 71, -96, -69, -43, 75, -96, - -38, 38, -117, -104, -108, -72, -79, 57, 0, 72, -31, 83, 109, -3, - -23, -59, -3, -65, -35, -116, -56, -46, -120, 41, 117, 92, -122, 4, - 30, -16, -22, 118, 84, 67, -44, -76, 92, 120, 48, 125}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling_3/input_tensor.h b/Tests/UnitTest/TestCases/TestData/maxpooling_3/input_tensor.h new file mode 100644 index 00000000..8b28c52d --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/maxpooling_3/input_tensor.h @@ -0,0 +1,7 @@ +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. +#pragma once +#include + +const int8_t maxpooling_3_input_tensor[18] = + {14, -113, -75, -4, 8, 5, -103, 28, 112, -33, -40, -91, 124, -14, -39, -64, 114, -32}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling_3/output.h b/Tests/UnitTest/TestCases/TestData/maxpooling_3/output.h new file mode 100644 index 00000000..6c0e7f15 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/maxpooling_3/output.h @@ -0,0 +1,6 @@ +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. +#pragma once +#include + +const int8_t maxpooling_3_output[10] = {14, -113, 8, 5, 112, -33, 124, -14, 114, -32}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling_3/output_ref_data.h b/Tests/UnitTest/TestCases/TestData/maxpooling_3/output_ref_data.h deleted file mode 100644 index 07558d83..00000000 --- a/Tests/UnitTest/TestCases/TestData/maxpooling_3/output_ref_data.h +++ /dev/null @@ -1,8 +0,0 @@ -// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). -// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. -#pragma once -#include - -const int8_t maxpooling_3_output_ref[30] = {7, -57, 40, 106, 71, -96, 75, -96, -117, -104, - -108, -72, 0, 72, 109, -3, -3, -65, -56, -46, - -120, 41, -122, 4, -22, 118, -44, -76, 48, 125}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling_3/test_data.h b/Tests/UnitTest/TestCases/TestData/maxpooling_3/test_data.h index b00cc3f6..0e46bdee 100644 --- a/Tests/UnitTest/TestCases/TestData/maxpooling_3/test_data.h +++ b/Tests/UnitTest/TestCases/TestData/maxpooling_3/test_data.h @@ -1,5 +1,3 @@ -// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). -// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. #include "config_data.h" -#include "input_data.h" -#include "output_ref_data.h" +#include "input_tensor.h" +#include "output.h" diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling_4/config_data.h b/Tests/UnitTest/TestCases/TestData/maxpooling_4/config_data.h index c4ad05c0..c809f15b 100644 --- a/Tests/UnitTest/TestCases/TestData/maxpooling_4/config_data.h +++ b/Tests/UnitTest/TestCases/TestData/maxpooling_4/config_data.h @@ -1,19 +1,20 @@ -// Generated by generate_test_data.py using TFL version 2.6.0 as reference. +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. #pragma once -#define MAXPOOLING_4_OUT_CH 2 -#define MAXPOOLING_4_IN_CH 2 +#define MAXPOOLING_4_BATCH_SIZE 1 +#define MAXPOOLING_4_INPUT_N 1 #define MAXPOOLING_4_INPUT_W 1 #define MAXPOOLING_4_INPUT_H 20 -#define MAXPOOLING_4_DST_SIZE 14 -#define MAXPOOLING_4_INPUT_SIZE 40 -#define MAXPOOLING_4_OUT_ACTIVATION_MIN -128 -#define MAXPOOLING_4_OUT_ACTIVATION_MAX 127 -#define MAXPOOLING_4_INPUT_BATCHES 1 -#define MAXPOOLING_4_FILTER_X 1 -#define MAXPOOLING_4_FILTER_Y 3 -#define MAXPOOLING_4_STRIDE_X 1 -#define MAXPOOLING_4_STRIDE_Y 3 -#define MAXPOOLING_4_PAD_X 0 -#define MAXPOOLING_4_PAD_Y 0 +#define MAXPOOLING_4_INPUT_C 2 +#define MAXPOOLING_4_FILTER_W 1 +#define MAXPOOLING_4_FILTER_H 3 +#define MAXPOOLING_4_STRIDE_W 1 +#define MAXPOOLING_4_STRIDE_H 3 +#define MAXPOOLING_4_PAD SAME +#define MAXPOOLING_4_ACTIVATION_MAX 127 +#define MAXPOOLING_4_ACTIVATION_MIN -128 +#define MAXPOOLING_4_OUTPUT_C 2 #define MAXPOOLING_4_OUTPUT_W 1 #define MAXPOOLING_4_OUTPUT_H 7 +#define MAXPOOLING_4_PADDING_H 0 +#define MAXPOOLING_4_PADDING_W 0 diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling_4/input_data.h b/Tests/UnitTest/TestCases/TestData/maxpooling_4/input_data.h deleted file mode 100644 index 6ae7d49c..00000000 --- a/Tests/UnitTest/TestCases/TestData/maxpooling_4/input_data.h +++ /dev/null @@ -1,7 +0,0 @@ -// Generated by generate_test_data.py using TFL version 2.6.0 as reference. -#pragma once -#include - -const int8_t maxpooling_4_input[40] = {-117, -127, -44, 5, 13, 5, 26, -115, -33, -102, 91, 45, 68, 52, - 60, 93, 96, -73, -29, -46, -128, 62, -108, 20, 67, 84, 109, -67, - -70, -99, -10, -60, -55, -9, 56, -60, -74, -52, -126, 14}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling_4/input_tensor.h b/Tests/UnitTest/TestCases/TestData/maxpooling_4/input_tensor.h new file mode 100644 index 00000000..898f2bae --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/maxpooling_4/input_tensor.h @@ -0,0 +1,8 @@ +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. +#pragma once +#include + +const int8_t maxpooling_4_input_tensor[40] = {-24, 61, 69, 14, -64, 27, -31, -49, 18, 96, -82, 4, 67, 93, + 64, 74, 24, 70, 17, 26, -40, -69, 35, -58, -47, 93, 103, 61, + 10, 50, 56, 94, -90, 1, -123, 115, -87, 28, 78, -35}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling_4/output.h b/Tests/UnitTest/TestCases/TestData/maxpooling_4/output.h new file mode 100644 index 00000000..1853bd76 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/maxpooling_4/output.h @@ -0,0 +1,6 @@ +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. +#pragma once +#include + +const int8_t maxpooling_4_output[14] = {69, 61, 18, 96, 67, 93, 35, 26, 103, 93, 56, 115, 78, 28}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling_4/output_ref_data.h b/Tests/UnitTest/TestCases/TestData/maxpooling_4/output_ref_data.h deleted file mode 100644 index 5a2ee35f..00000000 --- a/Tests/UnitTest/TestCases/TestData/maxpooling_4/output_ref_data.h +++ /dev/null @@ -1,5 +0,0 @@ -// Generated by generate_test_data.py using TFL version 2.6.0 as reference. -#pragma once -#include - -const int8_t maxpooling_4_output_ref[14] = {13, 5, 91, 45, 96, 93, -29, 62, 109, 84, 56, -9, -74, 14}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling_4/test_data.h b/Tests/UnitTest/TestCases/TestData/maxpooling_4/test_data.h index 31add6d5..0e46bdee 100644 --- a/Tests/UnitTest/TestCases/TestData/maxpooling_4/test_data.h +++ b/Tests/UnitTest/TestCases/TestData/maxpooling_4/test_data.h @@ -1,4 +1,3 @@ -// Generated by generate_test_data.py using TFL version 2.6.0 as reference. #include "config_data.h" -#include "input_data.h" -#include "output_ref_data.h" +#include "input_tensor.h" +#include "output.h" diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling_5/config_data.h b/Tests/UnitTest/TestCases/TestData/maxpooling_5/config_data.h index 7af33c3c..dcc0c4f8 100644 --- a/Tests/UnitTest/TestCases/TestData/maxpooling_5/config_data.h +++ b/Tests/UnitTest/TestCases/TestData/maxpooling_5/config_data.h @@ -1,19 +1,20 @@ -// Generated by generate_test_data.py using TFL version 2.6.0 as reference. +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. #pragma once -#define MAXPOOLING_5_OUT_CH 20 -#define MAXPOOLING_5_IN_CH 20 -#define MAXPOOLING_5_INPUT_W 1 -#define MAXPOOLING_5_INPUT_H 1 -#define MAXPOOLING_5_DST_SIZE 20 -#define MAXPOOLING_5_INPUT_SIZE 20 -#define MAXPOOLING_5_OUT_ACTIVATION_MIN -128 -#define MAXPOOLING_5_OUT_ACTIVATION_MAX 127 -#define MAXPOOLING_5_INPUT_BATCHES 1 -#define MAXPOOLING_5_FILTER_X 1 -#define MAXPOOLING_5_FILTER_Y 1 -#define MAXPOOLING_5_STRIDE_X 1 -#define MAXPOOLING_5_STRIDE_Y 1 -#define MAXPOOLING_5_PAD_X 0 -#define MAXPOOLING_5_PAD_Y 0 -#define MAXPOOLING_5_OUTPUT_W 1 -#define MAXPOOLING_5_OUTPUT_H 1 +#define MAXPOOLING_5_BATCH_SIZE 1 +#define MAXPOOLING_5_INPUT_N 1 +#define MAXPOOLING_5_INPUT_W 3 +#define MAXPOOLING_5_INPUT_H 3 +#define MAXPOOLING_5_INPUT_C 20 +#define MAXPOOLING_5_FILTER_W 1 +#define MAXPOOLING_5_FILTER_H 1 +#define MAXPOOLING_5_STRIDE_W 1 +#define MAXPOOLING_5_STRIDE_H 1 +#define MAXPOOLING_5_PAD SAME +#define MAXPOOLING_5_ACTIVATION_MAX 127 +#define MAXPOOLING_5_ACTIVATION_MIN -128 +#define MAXPOOLING_5_OUTPUT_C 20 +#define MAXPOOLING_5_OUTPUT_W 3 +#define MAXPOOLING_5_OUTPUT_H 3 +#define MAXPOOLING_5_PADDING_H 0 +#define MAXPOOLING_5_PADDING_W 0 diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling_5/input_data.h b/Tests/UnitTest/TestCases/TestData/maxpooling_5/input_data.h deleted file mode 100644 index 7f6078e2..00000000 --- a/Tests/UnitTest/TestCases/TestData/maxpooling_5/input_data.h +++ /dev/null @@ -1,6 +0,0 @@ -// Generated by generate_test_data.py using TFL version 2.6.0 as reference. -#pragma once -#include - -const int8_t maxpooling_5_input[20] = {27, 31, 96, 63, -97, -42, -20, 95, 39, -91, - -25, -37, 84, 5, 101, -57, 103, 34, 38, -4}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling_5/input_tensor.h b/Tests/UnitTest/TestCases/TestData/maxpooling_5/input_tensor.h new file mode 100644 index 00000000..cf61ab8e --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/maxpooling_5/input_tensor.h @@ -0,0 +1,15 @@ +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. +#pragma once +#include + +const int8_t maxpooling_5_input_tensor[180] = { + 53, 46, -125, 68, -62, 125, -9, -103, 32, 86, -57, 88, -89, -59, -89, -74, 77, 56, 15, -101, + 27, -55, -48, -29, 99, -43, -83, 45, 15, 2, 113, -76, -46, -98, -81, 92, -89, -76, 6, -81, + 7, -116, -107, 98, -67, 23, -78, 111, 9, -77, -29, 118, 31, -16, 48, -113, -120, 59, 56, 72, + 70, -9, -6, -92, 45, -85, -52, 28, -51, 86, -20, -44, -125, 49, 16, 10, 65, -32, 63, -82, + 53, 41, 46, -54, -60, 21, -83, -70, -123, -55, 83, -106, 22, 123, -74, 37, -94, -75, 62, 121, + -87, -125, 118, -38, -102, 32, 77, -16, 23, 109, -114, 110, 24, 75, 118, -13, 46, 99, -64, -26, + 30, 35, 102, 82, 1, 6, 73, 1, 47, 26, 31, -119, 97, -97, -24, -8, -22, 119, -80, 39, + 18, 83, 33, -42, -117, -110, 12, 59, -105, 48, -2, -92, 100, -67, 113, -5, 9, 120, 8, -36, + -48, -125, 122, 82, -37, -102, 34, -45, -38, 86, 18, 52, -25, 7, -47, -1, 7, 107, 94, -107}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling_5/output.h b/Tests/UnitTest/TestCases/TestData/maxpooling_5/output.h new file mode 100644 index 00000000..691a9a77 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/maxpooling_5/output.h @@ -0,0 +1,15 @@ +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. +#pragma once +#include + +const int8_t maxpooling_5_output[180] = { + 53, 46, -125, 68, -62, 125, -9, -103, 32, 86, -57, 88, -89, -59, -89, -74, 77, 56, 15, -101, + 27, -55, -48, -29, 99, -43, -83, 45, 15, 2, 113, -76, -46, -98, -81, 92, -89, -76, 6, -81, + 7, -116, -107, 98, -67, 23, -78, 111, 9, -77, -29, 118, 31, -16, 48, -113, -120, 59, 56, 72, + 70, -9, -6, -92, 45, -85, -52, 28, -51, 86, -20, -44, -125, 49, 16, 10, 65, -32, 63, -82, + 53, 41, 46, -54, -60, 21, -83, -70, -123, -55, 83, -106, 22, 123, -74, 37, -94, -75, 62, 121, + -87, -125, 118, -38, -102, 32, 77, -16, 23, 109, -114, 110, 24, 75, 118, -13, 46, 99, -64, -26, + 30, 35, 102, 82, 1, 6, 73, 1, 47, 26, 31, -119, 97, -97, -24, -8, -22, 119, -80, 39, + 18, 83, 33, -42, -117, -110, 12, 59, -105, 48, -2, -92, 100, -67, 113, -5, 9, 120, 8, -36, + -48, -125, 122, 82, -37, -102, 34, -45, -38, 86, 18, 52, -25, 7, -47, -1, 7, 107, 94, -107}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling_5/output_ref_data.h b/Tests/UnitTest/TestCases/TestData/maxpooling_5/output_ref_data.h deleted file mode 100644 index 6d7a1f83..00000000 --- a/Tests/UnitTest/TestCases/TestData/maxpooling_5/output_ref_data.h +++ /dev/null @@ -1,6 +0,0 @@ -// Generated by generate_test_data.py using TFL version 2.6.0 as reference. -#pragma once -#include - -const int8_t maxpooling_5_output_ref[20] = {27, 31, 96, 63, -97, -42, -20, 95, 39, -91, - -25, -37, 84, 5, 101, -57, 103, 34, 38, -4}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling_5/test_data.h b/Tests/UnitTest/TestCases/TestData/maxpooling_5/test_data.h index 31add6d5..0e46bdee 100644 --- a/Tests/UnitTest/TestCases/TestData/maxpooling_5/test_data.h +++ b/Tests/UnitTest/TestCases/TestData/maxpooling_5/test_data.h @@ -1,4 +1,3 @@ -// Generated by generate_test_data.py using TFL version 2.6.0 as reference. #include "config_data.h" -#include "input_data.h" -#include "output_ref_data.h" +#include "input_tensor.h" +#include "output.h" diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling_6/config_data.h b/Tests/UnitTest/TestCases/TestData/maxpooling_6/config_data.h index 314b2680..c8b0b58f 100644 --- a/Tests/UnitTest/TestCases/TestData/maxpooling_6/config_data.h +++ b/Tests/UnitTest/TestCases/TestData/maxpooling_6/config_data.h @@ -1,19 +1,20 @@ -// Generated by generate_test_data.py using TFL version 2.6.0 as reference. +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. #pragma once -#define MAXPOOLING_6_OUT_CH 17 -#define MAXPOOLING_6_IN_CH 17 +#define MAXPOOLING_6_BATCH_SIZE 1 +#define MAXPOOLING_6_INPUT_N 1 #define MAXPOOLING_6_INPUT_W 1 #define MAXPOOLING_6_INPUT_H 5 -#define MAXPOOLING_6_DST_SIZE 34 -#define MAXPOOLING_6_INPUT_SIZE 85 -#define MAXPOOLING_6_OUT_ACTIVATION_MIN -128 -#define MAXPOOLING_6_OUT_ACTIVATION_MAX 127 -#define MAXPOOLING_6_INPUT_BATCHES 1 -#define MAXPOOLING_6_FILTER_X 3 -#define MAXPOOLING_6_FILTER_Y 4 -#define MAXPOOLING_6_STRIDE_X 1 -#define MAXPOOLING_6_STRIDE_Y 3 -#define MAXPOOLING_6_PAD_X 1 -#define MAXPOOLING_6_PAD_Y 1 +#define MAXPOOLING_6_INPUT_C 17 +#define MAXPOOLING_6_FILTER_W 3 +#define MAXPOOLING_6_FILTER_H 4 +#define MAXPOOLING_6_STRIDE_W 1 +#define MAXPOOLING_6_STRIDE_H 3 +#define MAXPOOLING_6_PAD SAME +#define MAXPOOLING_6_ACTIVATION_MAX 127 +#define MAXPOOLING_6_ACTIVATION_MIN -128 +#define MAXPOOLING_6_OUTPUT_C 17 #define MAXPOOLING_6_OUTPUT_W 1 #define MAXPOOLING_6_OUTPUT_H 2 +#define MAXPOOLING_6_PADDING_H 1 +#define MAXPOOLING_6_PADDING_W 1 diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling_6/input_data.h b/Tests/UnitTest/TestCases/TestData/maxpooling_6/input_data.h deleted file mode 100644 index c579deff..00000000 --- a/Tests/UnitTest/TestCases/TestData/maxpooling_6/input_data.h +++ /dev/null @@ -1,9 +0,0 @@ -// Generated by generate_test_data.py using TFL version 2.6.0 as reference. -#pragma once -#include - -const int8_t maxpooling_6_input[85] = { - 67, 103, -18, -15, 91, 47, -41, -79, -80, 49, -92, -71, 18, -110, 54, -95, 60, 72, 10, 85, -43, 16, - 96, -64, -49, -10, 49, 46, 80, 63, -52, 117, -24, -5, 39, -76, 30, -55, -85, 63, -54, 85, 80, 111, - -107, 43, 115, -58, 28, -14, -113, 75, -41, 65, -90, -94, -89, 71, -90, -18, -59, -124, 12, 51, -68, 47, - 118, 29, 55, -117, 29, 23, 41, 94, 76, -118, 43, 108, 36, 2, -27, 123, 125, 17, -45}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling_6/input_tensor.h b/Tests/UnitTest/TestCases/TestData/maxpooling_6/input_tensor.h new file mode 100644 index 00000000..cbe0a972 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/maxpooling_6/input_tensor.h @@ -0,0 +1,10 @@ +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. +#pragma once +#include + +const int8_t maxpooling_6_input_tensor[85] = { + -49, 64, -13, 48, -23, -105, -74, 61, -92, 97, -9, 105, 61, -89, -5, 95, -80, -42, -68, -10, -80, 63, + 99, 45, -103, -72, -55, 118, -30, -31, -25, -74, -51, 29, -25, -33, -115, 43, 91, -36, -1, 98, -91, 4, + 80, -20, 10, -17, -114, 7, 40, -55, 13, -45, -35, 1, -93, -42, -12, 50, 109, -6, 127, -26, -63, -107, + -70, -39, -46, 9, 83, 88, -43, 49, -13, -39, -96, -45, 44, -63, 86, 119, 56, -38, 66}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling_6/output.h b/Tests/UnitTest/TestCases/TestData/maxpooling_6/output.h new file mode 100644 index 00000000..e9ba08e1 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/maxpooling_6/output.h @@ -0,0 +1,7 @@ +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. +#pragma once +#include + +const int8_t maxpooling_6_output[34] = {-25, 64, -10, 48, 91, 99, 45, 98, -72, 97, 118, 105, 61, -17, -5, 95, 40, + -25, 13, 83, 88, 91, 49, -1, 98, 50, 109, 80, 127, 86, 119, 56, 7, 66}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling_6/output_ref_data.h b/Tests/UnitTest/TestCases/TestData/maxpooling_6/output_ref_data.h deleted file mode 100644 index 3207ac9a..00000000 --- a/Tests/UnitTest/TestCases/TestData/maxpooling_6/output_ref_data.h +++ /dev/null @@ -1,6 +0,0 @@ -// Generated by generate_test_data.py using TFL version 2.6.0 as reference. -#pragma once -#include - -const int8_t maxpooling_6_output_ref[34] = {72, 103, 85, -15, 91, 96, -41, 85, 80, 111, 46, 80, 115, -52, 117, -14, 60, - 75, -41, 65, 23, 41, 94, 76, 85, 80, 111, 36, 43, 115, 123, 125, 118, 29}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling_6/test_data.h b/Tests/UnitTest/TestCases/TestData/maxpooling_6/test_data.h index 31add6d5..0e46bdee 100644 --- a/Tests/UnitTest/TestCases/TestData/maxpooling_6/test_data.h +++ b/Tests/UnitTest/TestCases/TestData/maxpooling_6/test_data.h @@ -1,4 +1,3 @@ -// Generated by generate_test_data.py using TFL version 2.6.0 as reference. #include "config_data.h" -#include "input_data.h" -#include "output_ref_data.h" +#include "input_tensor.h" +#include "output.h" diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling_7/config_data.h b/Tests/UnitTest/TestCases/TestData/maxpooling_7/config_data.h index 5bffacde..8b74825a 100644 --- a/Tests/UnitTest/TestCases/TestData/maxpooling_7/config_data.h +++ b/Tests/UnitTest/TestCases/TestData/maxpooling_7/config_data.h @@ -1,19 +1,20 @@ -// Generated by generate_test_data.py using TFL version 2.6.0 as reference. +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. #pragma once -#define MAXPOOLING_7_OUT_CH 1 -#define MAXPOOLING_7_IN_CH 1 +#define MAXPOOLING_7_BATCH_SIZE 1 +#define MAXPOOLING_7_INPUT_N 1 #define MAXPOOLING_7_INPUT_W 4 #define MAXPOOLING_7_INPUT_H 2 -#define MAXPOOLING_7_DST_SIZE 2 -#define MAXPOOLING_7_INPUT_SIZE 8 -#define MAXPOOLING_7_OUT_ACTIVATION_MIN 0 -#define MAXPOOLING_7_OUT_ACTIVATION_MAX 6 -#define MAXPOOLING_7_INPUT_BATCHES 1 -#define MAXPOOLING_7_FILTER_X 2 -#define MAXPOOLING_7_FILTER_Y 2 -#define MAXPOOLING_7_STRIDE_X 2 -#define MAXPOOLING_7_STRIDE_Y 2 -#define MAXPOOLING_7_PAD_X 0 -#define MAXPOOLING_7_PAD_Y 0 +#define MAXPOOLING_7_INPUT_C 1 +#define MAXPOOLING_7_FILTER_W 2 +#define MAXPOOLING_7_FILTER_H 2 +#define MAXPOOLING_7_STRIDE_W 2 +#define MAXPOOLING_7_STRIDE_H 2 +#define MAXPOOLING_7_PAD VALID +#define MAXPOOLING_7_ACTIVATION_MAX 6 +#define MAXPOOLING_7_ACTIVATION_MIN 0 +#define MAXPOOLING_7_OUTPUT_C 1 #define MAXPOOLING_7_OUTPUT_W 2 #define MAXPOOLING_7_OUTPUT_H 1 +#define MAXPOOLING_7_PADDING_H 0 +#define MAXPOOLING_7_PADDING_W 0 diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling_7/input_data.h b/Tests/UnitTest/TestCases/TestData/maxpooling_7/input_data.h deleted file mode 100644 index dd5a91f1..00000000 --- a/Tests/UnitTest/TestCases/TestData/maxpooling_7/input_data.h +++ /dev/null @@ -1,5 +0,0 @@ -// Generated by generate_test_data.py using TFL version 2.6.0 as reference. -#pragma once -#include - -const int8_t maxpooling_7_input[8] = {90, 1, -36, -51, -106, -28, -54, 67}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling_7/input_tensor.h b/Tests/UnitTest/TestCases/TestData/maxpooling_7/input_tensor.h new file mode 100644 index 00000000..dbc99b23 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/maxpooling_7/input_tensor.h @@ -0,0 +1,6 @@ +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. +#pragma once +#include + +const int8_t maxpooling_7_input_tensor[8] = {-86, -99, -105, -75, -36, 22, 41, -73}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling_7/output.h b/Tests/UnitTest/TestCases/TestData/maxpooling_7/output.h new file mode 100644 index 00000000..1a44b7c4 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/maxpooling_7/output.h @@ -0,0 +1,6 @@ +// Generated by generate_test_data.py using tensorflow version 2.17.0 (Keras version 3.5.0). +// Interpreter from tensorflow version 2.17.0 and revision v2.17.0-rc1-2-gad6d8cc177d. +#pragma once +#include + +const int8_t maxpooling_7_output[2] = {6, 6}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling_7/output_ref_data.h b/Tests/UnitTest/TestCases/TestData/maxpooling_7/output_ref_data.h deleted file mode 100644 index 289e451b..00000000 --- a/Tests/UnitTest/TestCases/TestData/maxpooling_7/output_ref_data.h +++ /dev/null @@ -1,5 +0,0 @@ -// Generated by generate_test_data.py using TFL version 2.6.0 as reference. -#pragma once -#include - -const int8_t maxpooling_7_output_ref[2] = {6, 6}; diff --git a/Tests/UnitTest/TestCases/TestData/maxpooling_7/test_data.h b/Tests/UnitTest/TestCases/TestData/maxpooling_7/test_data.h index 31add6d5..0e46bdee 100644 --- a/Tests/UnitTest/TestCases/TestData/maxpooling_7/test_data.h +++ b/Tests/UnitTest/TestCases/TestData/maxpooling_7/test_data.h @@ -1,4 +1,3 @@ -// Generated by generate_test_data.py using TFL version 2.6.0 as reference. #include "config_data.h" -#include "input_data.h" -#include "output_ref_data.h" +#include "input_tensor.h" +#include "output.h" diff --git a/Tests/UnitTest/TestCases/test_arm_avgpool_s16/test_arm_avgpool_s16.c b/Tests/UnitTest/TestCases/test_arm_avgpool_s16/test_arm_avgpool_s16.c index b41a675a..392b4841 100644 --- a/Tests/UnitTest/TestCases/test_arm_avgpool_s16/test_arm_avgpool_s16.c +++ b/Tests/UnitTest/TestCases/test_arm_avgpool_s16/test_arm_avgpool_s16.c @@ -1,5 +1,5 @@ /* - * SPDX-FileCopyrightText: Copyright 2010-2023 Arm Limited and/or its affiliates + * SPDX-FileCopyrightText: Copyright 2010-2024 Arm Limited and/or its affiliates * * SPDX-License-Identifier: Apache-2.0 * @@ -28,7 +28,8 @@ void avgpooling_int16_arm_avgpool_s16(void) { const arm_cmsis_nn_status expected = ARM_CMSIS_NN_SUCCESS; - int16_t output[AVGPOOLING_INT16_DST_SIZE] = {0}; + int16_t output[AVGPOOLING_INT16_OUTPUT_C * AVGPOOLING_INT16_OUTPUT_W * AVGPOOLING_INT16_OUTPUT_H * + AVGPOOLING_INT16_BATCH_SIZE] = {0}; cmsis_nn_context ctx; cmsis_nn_pool_params pool_params; @@ -36,27 +37,27 @@ void avgpooling_int16_arm_avgpool_s16(void) cmsis_nn_dims filter_dims; cmsis_nn_dims output_dims; - const int16_t *input_data = avgpooling_int16_input; + const int16_t *input_data = avgpooling_int16_input_tensor; - input_dims.n = AVGPOOLING_INT16_INPUT_BATCHES; + input_dims.n = AVGPOOLING_INT16_BATCH_SIZE; input_dims.w = AVGPOOLING_INT16_INPUT_W; input_dims.h = AVGPOOLING_INT16_INPUT_H; - input_dims.c = AVGPOOLING_INT16_IN_CH; - filter_dims.w = AVGPOOLING_INT16_FILTER_X; - filter_dims.h = AVGPOOLING_INT16_FILTER_Y; + input_dims.c = AVGPOOLING_INT16_INPUT_C; + filter_dims.w = AVGPOOLING_INT16_FILTER_W; + filter_dims.h = AVGPOOLING_INT16_FILTER_H; output_dims.w = AVGPOOLING_INT16_OUTPUT_W; output_dims.h = AVGPOOLING_INT16_OUTPUT_H; - output_dims.c = AVGPOOLING_INT16_OUT_CH; + output_dims.c = AVGPOOLING_INT16_INPUT_C; - pool_params.padding.w = AVGPOOLING_INT16_PAD_X; - pool_params.padding.h = AVGPOOLING_INT16_PAD_Y; - pool_params.stride.w = AVGPOOLING_INT16_STRIDE_X; - pool_params.stride.h = AVGPOOLING_INT16_STRIDE_Y; + pool_params.padding.w = AVGPOOLING_INT16_PADDING_W; + pool_params.padding.h = AVGPOOLING_INT16_PADDING_H; + pool_params.stride.w = AVGPOOLING_INT16_STRIDE_W; + pool_params.stride.h = AVGPOOLING_INT16_STRIDE_H; - pool_params.activation.min = AVGPOOLING_INT16_OUT_ACTIVATION_MIN; - pool_params.activation.max = AVGPOOLING_INT16_OUT_ACTIVATION_MAX; + pool_params.activation.min = AVGPOOLING_INT16_ACTIVATION_MIN; + pool_params.activation.max = AVGPOOLING_INT16_ACTIVATION_MAX; - ctx.size = arm_avgpool_s16_get_buffer_size(AVGPOOLING_INT16_OUTPUT_W, AVGPOOLING_INT16_IN_CH); + ctx.size = arm_avgpool_s16_get_buffer_size(AVGPOOLING_INT16_OUTPUT_W, AVGPOOLING_INT16_INPUT_C); ctx.buf = malloc(ctx.size); arm_cmsis_nn_status result = @@ -69,13 +70,17 @@ void avgpooling_int16_arm_avgpool_s16(void) free(ctx.buf); } TEST_ASSERT_EQUAL(expected, result); - TEST_ASSERT_TRUE(validate_s16(output, avgpooling_int16_output_ref, AVGPOOLING_INT16_DST_SIZE)); + TEST_ASSERT_TRUE(validate_s16(output, + avgpooling_int16_output, + AVGPOOLING_INT16_OUTPUT_C * AVGPOOLING_INT16_OUTPUT_W * AVGPOOLING_INT16_OUTPUT_H * + AVGPOOLING_INT16_BATCH_SIZE)); } void avgpooling_int16_1_arm_avgpool_s16(void) { const arm_cmsis_nn_status expected = ARM_CMSIS_NN_SUCCESS; - int16_t output[AVGPOOLING_INT16_1_DST_SIZE] = {0}; + int16_t output[AVGPOOLING_INT16_1_OUTPUT_C * AVGPOOLING_INT16_1_OUTPUT_W * AVGPOOLING_INT16_1_OUTPUT_H * + AVGPOOLING_INT16_1_BATCH_SIZE] = {0}; cmsis_nn_context ctx; cmsis_nn_pool_params pool_params; @@ -83,27 +88,27 @@ void avgpooling_int16_1_arm_avgpool_s16(void) cmsis_nn_dims filter_dims; cmsis_nn_dims output_dims; - const int16_t *input_data = avgpooling_int16_1_input; + const int16_t *input_data = avgpooling_int16_1_input_tensor; - input_dims.n = AVGPOOLING_INT16_1_INPUT_BATCHES; + input_dims.n = AVGPOOLING_INT16_1_BATCH_SIZE; input_dims.w = AVGPOOLING_INT16_1_INPUT_W; input_dims.h = AVGPOOLING_INT16_1_INPUT_H; - input_dims.c = AVGPOOLING_INT16_1_IN_CH; - filter_dims.w = AVGPOOLING_INT16_1_FILTER_X; - filter_dims.h = AVGPOOLING_INT16_1_FILTER_Y; + input_dims.c = AVGPOOLING_INT16_1_INPUT_C; + filter_dims.w = AVGPOOLING_INT16_1_FILTER_W; + filter_dims.h = AVGPOOLING_INT16_1_FILTER_H; output_dims.w = AVGPOOLING_INT16_1_OUTPUT_W; output_dims.h = AVGPOOLING_INT16_1_OUTPUT_H; - output_dims.c = AVGPOOLING_INT16_1_OUT_CH; + output_dims.c = AVGPOOLING_INT16_1_INPUT_C; - pool_params.padding.w = AVGPOOLING_INT16_1_PAD_X; - pool_params.padding.h = AVGPOOLING_INT16_1_PAD_Y; - pool_params.stride.w = AVGPOOLING_INT16_1_STRIDE_X; - pool_params.stride.h = AVGPOOLING_INT16_1_STRIDE_Y; + pool_params.padding.w = AVGPOOLING_INT16_1_PADDING_W; + pool_params.padding.h = AVGPOOLING_INT16_1_PADDING_H; + pool_params.stride.w = AVGPOOLING_INT16_1_STRIDE_W; + pool_params.stride.h = AVGPOOLING_INT16_1_STRIDE_H; - pool_params.activation.min = AVGPOOLING_INT16_1_OUT_ACTIVATION_MIN; - pool_params.activation.max = AVGPOOLING_INT16_1_OUT_ACTIVATION_MAX; + pool_params.activation.min = AVGPOOLING_INT16_1_ACTIVATION_MIN; + pool_params.activation.max = AVGPOOLING_INT16_1_ACTIVATION_MAX; - ctx.size = arm_avgpool_s16_get_buffer_size(AVGPOOLING_INT16_1_OUTPUT_W, AVGPOOLING_INT16_1_IN_CH); + ctx.size = arm_avgpool_s16_get_buffer_size(AVGPOOLING_INT16_1_OUTPUT_W, AVGPOOLING_INT16_1_INPUT_C); ctx.buf = malloc(ctx.size); arm_cmsis_nn_status result = @@ -115,13 +120,17 @@ void avgpooling_int16_1_arm_avgpool_s16(void) free(ctx.buf); } TEST_ASSERT_EQUAL(expected, result); - TEST_ASSERT_TRUE(validate_s16(output, avgpooling_int16_1_output_ref, AVGPOOLING_INT16_1_DST_SIZE)); + TEST_ASSERT_TRUE(validate_s16(output, + avgpooling_int16_1_output, + AVGPOOLING_INT16_1_OUTPUT_C * AVGPOOLING_INT16_1_OUTPUT_W * + AVGPOOLING_INT16_1_OUTPUT_H * AVGPOOLING_INT16_1_BATCH_SIZE)); } void avgpooling_int16_2_arm_avgpool_s16(void) { const arm_cmsis_nn_status expected = ARM_CMSIS_NN_SUCCESS; - int16_t output[AVGPOOLING_INT16_2_DST_SIZE] = {0}; + int16_t output[AVGPOOLING_INT16_2_OUTPUT_C * AVGPOOLING_INT16_2_OUTPUT_W * AVGPOOLING_INT16_2_OUTPUT_H * + AVGPOOLING_INT16_2_BATCH_SIZE] = {0}; cmsis_nn_context ctx; cmsis_nn_pool_params pool_params; @@ -129,27 +138,27 @@ void avgpooling_int16_2_arm_avgpool_s16(void) cmsis_nn_dims filter_dims; cmsis_nn_dims output_dims; - const int16_t *input_data = avgpooling_int16_2_input; + const int16_t *input_data = avgpooling_int16_2_input_tensor; - input_dims.n = AVGPOOLING_INT16_2_INPUT_BATCHES; + input_dims.n = AVGPOOLING_INT16_2_BATCH_SIZE; input_dims.w = AVGPOOLING_INT16_2_INPUT_W; input_dims.h = AVGPOOLING_INT16_2_INPUT_H; - input_dims.c = AVGPOOLING_INT16_2_IN_CH; - filter_dims.w = AVGPOOLING_INT16_2_FILTER_X; - filter_dims.h = AVGPOOLING_INT16_2_FILTER_Y; + input_dims.c = AVGPOOLING_INT16_2_INPUT_C; + filter_dims.w = AVGPOOLING_INT16_2_FILTER_W; + filter_dims.h = AVGPOOLING_INT16_2_FILTER_H; output_dims.w = AVGPOOLING_INT16_2_OUTPUT_W; output_dims.h = AVGPOOLING_INT16_2_OUTPUT_H; - output_dims.c = AVGPOOLING_INT16_2_OUT_CH; + output_dims.c = AVGPOOLING_INT16_2_INPUT_C; - pool_params.padding.w = AVGPOOLING_INT16_2_PAD_X; - pool_params.padding.h = AVGPOOLING_INT16_2_PAD_Y; - pool_params.stride.w = AVGPOOLING_INT16_2_STRIDE_X; - pool_params.stride.h = AVGPOOLING_INT16_2_STRIDE_Y; + pool_params.padding.w = AVGPOOLING_INT16_2_PADDING_W; + pool_params.padding.h = AVGPOOLING_INT16_2_PADDING_H; + pool_params.stride.w = AVGPOOLING_INT16_2_STRIDE_W; + pool_params.stride.h = AVGPOOLING_INT16_2_STRIDE_H; - pool_params.activation.min = AVGPOOLING_INT16_2_OUT_ACTIVATION_MIN; - pool_params.activation.max = AVGPOOLING_INT16_2_OUT_ACTIVATION_MAX; + pool_params.activation.min = AVGPOOLING_INT16_2_ACTIVATION_MIN; + pool_params.activation.max = AVGPOOLING_INT16_2_ACTIVATION_MAX; - ctx.size = arm_avgpool_s16_get_buffer_size(AVGPOOLING_INT16_2_OUTPUT_W, AVGPOOLING_INT16_2_IN_CH); + ctx.size = arm_avgpool_s16_get_buffer_size(AVGPOOLING_INT16_2_OUTPUT_W, AVGPOOLING_INT16_2_INPUT_C); ctx.buf = malloc(ctx.size); arm_cmsis_nn_status result = @@ -161,13 +170,17 @@ void avgpooling_int16_2_arm_avgpool_s16(void) free(ctx.buf); } TEST_ASSERT_EQUAL(expected, result); - TEST_ASSERT_TRUE(validate_s16(output, avgpooling_int16_2_output_ref, AVGPOOLING_INT16_2_DST_SIZE)); + TEST_ASSERT_TRUE(validate_s16(output, + avgpooling_int16_2_output, + AVGPOOLING_INT16_2_OUTPUT_C * AVGPOOLING_INT16_2_OUTPUT_W * + AVGPOOLING_INT16_2_OUTPUT_H * AVGPOOLING_INT16_2_BATCH_SIZE)); } void avgpooling_int16_3_arm_avgpool_s16(void) { const arm_cmsis_nn_status expected = ARM_CMSIS_NN_SUCCESS; - int16_t output[AVGPOOLING_INT16_3_DST_SIZE] = {0}; + int16_t output[AVGPOOLING_INT16_3_OUTPUT_C * AVGPOOLING_INT16_3_OUTPUT_W * AVGPOOLING_INT16_3_OUTPUT_H * + AVGPOOLING_INT16_3_BATCH_SIZE] = {0}; cmsis_nn_context ctx; cmsis_nn_pool_params pool_params; @@ -175,27 +188,27 @@ void avgpooling_int16_3_arm_avgpool_s16(void) cmsis_nn_dims filter_dims; cmsis_nn_dims output_dims; - const int16_t *input_data = avgpooling_int16_3_input; + const int16_t *input_data = avgpooling_int16_3_input_tensor; - input_dims.n = AVGPOOLING_INT16_3_INPUT_BATCHES; + input_dims.n = AVGPOOLING_INT16_3_BATCH_SIZE; input_dims.w = AVGPOOLING_INT16_3_INPUT_W; input_dims.h = AVGPOOLING_INT16_3_INPUT_H; - input_dims.c = AVGPOOLING_INT16_3_IN_CH; - filter_dims.w = AVGPOOLING_INT16_3_FILTER_X; - filter_dims.h = AVGPOOLING_INT16_3_FILTER_Y; + input_dims.c = AVGPOOLING_INT16_3_INPUT_C; + filter_dims.w = AVGPOOLING_INT16_3_FILTER_W; + filter_dims.h = AVGPOOLING_INT16_3_FILTER_H; output_dims.w = AVGPOOLING_INT16_3_OUTPUT_W; output_dims.h = AVGPOOLING_INT16_3_OUTPUT_H; - output_dims.c = AVGPOOLING_INT16_3_OUT_CH; + output_dims.c = AVGPOOLING_INT16_3_INPUT_C; - pool_params.padding.w = AVGPOOLING_INT16_3_PAD_X; - pool_params.padding.h = AVGPOOLING_INT16_3_PAD_Y; - pool_params.stride.w = AVGPOOLING_INT16_3_STRIDE_X; - pool_params.stride.h = AVGPOOLING_INT16_3_STRIDE_Y; + pool_params.padding.w = AVGPOOLING_INT16_3_PADDING_W; + pool_params.padding.h = AVGPOOLING_INT16_3_PADDING_H; + pool_params.stride.w = AVGPOOLING_INT16_3_STRIDE_W; + pool_params.stride.h = AVGPOOLING_INT16_3_STRIDE_H; - pool_params.activation.min = AVGPOOLING_INT16_3_OUT_ACTIVATION_MIN; - pool_params.activation.max = AVGPOOLING_INT16_3_OUT_ACTIVATION_MAX; + pool_params.activation.min = AVGPOOLING_INT16_3_ACTIVATION_MIN; + pool_params.activation.max = AVGPOOLING_INT16_3_ACTIVATION_MAX; - ctx.size = arm_avgpool_s16_get_buffer_size(AVGPOOLING_INT16_3_OUTPUT_W, AVGPOOLING_INT16_3_IN_CH); + ctx.size = arm_avgpool_s16_get_buffer_size(AVGPOOLING_INT16_3_OUTPUT_W, AVGPOOLING_INT16_3_INPUT_C); ctx.buf = malloc(ctx.size); arm_cmsis_nn_status result = @@ -207,15 +220,18 @@ void avgpooling_int16_3_arm_avgpool_s16(void) free(ctx.buf); } TEST_ASSERT_EQUAL(expected, result); - TEST_ASSERT_TRUE(validate_s16(output, avgpooling_int16_3_output_ref, AVGPOOLING_INT16_3_DST_SIZE)); + TEST_ASSERT_TRUE(validate_s16(output, + avgpooling_int16_3_output, + AVGPOOLING_INT16_3_OUTPUT_C * AVGPOOLING_INT16_3_OUTPUT_W * + AVGPOOLING_INT16_3_OUTPUT_H * AVGPOOLING_INT16_3_BATCH_SIZE)); } void buffer_size_mve_arm_avgpool_s16(void) { #if defined(ARM_MATH_MVEI) - const int32_t buf_size = arm_avgpool_s16_get_buffer_size(AVGPOOLING_INT16_3_OUTPUT_W, AVGPOOLING_INT16_3_IN_CH); + const int32_t buf_size = arm_avgpool_s16_get_buffer_size(AVGPOOLING_INT16_3_OUTPUT_W, AVGPOOLING_INT16_3_INPUT_C); const int32_t mve_buf_size = - arm_avgpool_s16_get_buffer_size_mve(AVGPOOLING_INT16_3_OUTPUT_W, AVGPOOLING_INT16_3_IN_CH); + arm_avgpool_s16_get_buffer_size_mve(AVGPOOLING_INT16_3_OUTPUT_W, AVGPOOLING_INT16_3_INPUT_C); TEST_ASSERT_EQUAL(buf_size, mve_buf_size); #endif @@ -224,9 +240,9 @@ void buffer_size_mve_arm_avgpool_s16(void) void buffer_size_dsp_arm_avgpool_s16(void) { #if defined(ARM_MATH_DSP) && !defined(ARM_MATH_MVEI) - const int32_t buf_size = arm_avgpool_s16_get_buffer_size(AVGPOOLING_INT16_3_OUTPUT_W, AVGPOOLING_INT16_3_IN_CH); + const int32_t buf_size = arm_avgpool_s16_get_buffer_size(AVGPOOLING_INT16_3_OUTPUT_W, AVGPOOLING_INT16_3_INPUT_C); const int32_t dsp_buf_size = - arm_avgpool_s16_get_buffer_size_dsp(AVGPOOLING_INT16_3_OUTPUT_W, AVGPOOLING_INT16_3_IN_CH); + arm_avgpool_s16_get_buffer_size_dsp(AVGPOOLING_INT16_3_OUTPUT_W, AVGPOOLING_INT16_3_INPUT_C); TEST_ASSERT_EQUAL(buf_size, dsp_buf_size); #endif @@ -235,7 +251,8 @@ void buffer_size_dsp_arm_avgpool_s16(void) void avgpooling_int16_param_fail_arm_avgpool_s16(void) { const arm_cmsis_nn_status expected = ARM_CMSIS_NN_ARG_ERROR; - int16_t output[AVGPOOLING_INT16_3_DST_SIZE] = {0}; + int16_t output[AVGPOOLING_INT16_3_OUTPUT_C * AVGPOOLING_INT16_3_OUTPUT_W * AVGPOOLING_INT16_3_OUTPUT_H * + AVGPOOLING_INT16_3_BATCH_SIZE] = {0}; cmsis_nn_context ctx; cmsis_nn_pool_params pool_params; @@ -243,27 +260,27 @@ void avgpooling_int16_param_fail_arm_avgpool_s16(void) cmsis_nn_dims filter_dims; cmsis_nn_dims output_dims; - const int16_t *input_data = avgpooling_int16_3_input; + const int16_t *input_data = avgpooling_int16_3_input_tensor; input_dims.n = 0; input_dims.w = AVGPOOLING_INT16_3_INPUT_W; input_dims.h = AVGPOOLING_INT16_3_INPUT_H; - input_dims.c = AVGPOOLING_INT16_3_IN_CH; - filter_dims.w = AVGPOOLING_INT16_3_FILTER_X; - filter_dims.h = AVGPOOLING_INT16_3_FILTER_Y; + input_dims.c = AVGPOOLING_INT16_3_INPUT_C; + filter_dims.w = AVGPOOLING_INT16_3_FILTER_W; + filter_dims.h = AVGPOOLING_INT16_3_FILTER_H; output_dims.w = AVGPOOLING_INT16_3_OUTPUT_W; output_dims.h = AVGPOOLING_INT16_3_OUTPUT_H; - output_dims.c = AVGPOOLING_INT16_3_OUT_CH; + output_dims.c = AVGPOOLING_INT16_3_INPUT_C; - pool_params.padding.w = AVGPOOLING_INT16_3_PAD_X; - pool_params.padding.h = AVGPOOLING_INT16_3_PAD_Y; - pool_params.stride.w = AVGPOOLING_INT16_3_STRIDE_X; - pool_params.stride.h = AVGPOOLING_INT16_3_STRIDE_Y; + pool_params.padding.w = AVGPOOLING_INT16_3_PADDING_W; + pool_params.padding.h = AVGPOOLING_INT16_3_PADDING_H; + pool_params.stride.w = AVGPOOLING_INT16_3_STRIDE_W; + pool_params.stride.h = AVGPOOLING_INT16_3_STRIDE_H; - pool_params.activation.min = AVGPOOLING_INT16_3_OUT_ACTIVATION_MIN; - pool_params.activation.max = AVGPOOLING_INT16_3_OUT_ACTIVATION_MAX; + pool_params.activation.min = AVGPOOLING_INT16_3_ACTIVATION_MIN; + pool_params.activation.max = AVGPOOLING_INT16_3_ACTIVATION_MAX; - ctx.size = arm_avgpool_s16_get_buffer_size(AVGPOOLING_INT16_3_OUTPUT_W, AVGPOOLING_INT16_3_IN_CH); + ctx.size = arm_avgpool_s16_get_buffer_size(AVGPOOLING_INT16_3_OUTPUT_W, AVGPOOLING_INT16_3_INPUT_C); ctx.buf = malloc(ctx.size); arm_cmsis_nn_status result = diff --git a/Tests/UnitTest/TestCases/test_arm_avgpool_s8/test_arm_avgpool_s8.c b/Tests/UnitTest/TestCases/test_arm_avgpool_s8/test_arm_avgpool_s8.c index 29951231..9bbc05fc 100644 --- a/Tests/UnitTest/TestCases/test_arm_avgpool_s8/test_arm_avgpool_s8.c +++ b/Tests/UnitTest/TestCases/test_arm_avgpool_s8/test_arm_avgpool_s8.c @@ -1,5 +1,5 @@ /* - * SPDX-FileCopyrightText: Copyright 2010-2023 Arm Limited and/or its affiliates + * SPDX-FileCopyrightText: Copyright 2010-2024 Arm Limited and/or its affiliates * * SPDX-License-Identifier: Apache-2.0 * @@ -30,7 +30,7 @@ void avgpooling_arm_avgpool_s8(void) { const arm_cmsis_nn_status expected = ARM_CMSIS_NN_SUCCESS; - int8_t output[AVGPOOLING_DST_SIZE] = {0}; + int8_t output[AVGPOOLING_OUTPUT_W * AVGPOOLING_OUTPUT_H * AVGPOOLING_BATCH_SIZE * AVGPOOLING_OUTPUT_C] = {0}; cmsis_nn_context ctx; cmsis_nn_pool_params pool_params; @@ -38,27 +38,27 @@ void avgpooling_arm_avgpool_s8(void) cmsis_nn_dims filter_dims; cmsis_nn_dims output_dims; - const int8_t *input_data = avgpooling_input; + const int8_t *input_data = avgpooling_input_tensor; - input_dims.n = AVGPOOLING_INPUT_BATCHES; + input_dims.n = AVGPOOLING_BATCH_SIZE; input_dims.w = AVGPOOLING_INPUT_W; input_dims.h = AVGPOOLING_INPUT_H; - input_dims.c = AVGPOOLING_IN_CH; - filter_dims.w = AVGPOOLING_FILTER_X; - filter_dims.h = AVGPOOLING_FILTER_Y; + input_dims.c = AVGPOOLING_INPUT_C; + filter_dims.w = AVGPOOLING_FILTER_W; + filter_dims.h = AVGPOOLING_FILTER_H; output_dims.w = AVGPOOLING_OUTPUT_W; output_dims.h = AVGPOOLING_OUTPUT_H; - output_dims.c = AVGPOOLING_OUT_CH; + output_dims.c = AVGPOOLING_OUTPUT_C; - pool_params.padding.w = AVGPOOLING_PAD_X; - pool_params.padding.h = AVGPOOLING_PAD_Y; - pool_params.stride.w = AVGPOOLING_STRIDE_X; - pool_params.stride.h = AVGPOOLING_STRIDE_Y; + pool_params.padding.w = AVGPOOLING_PADDING_W; + pool_params.padding.h = AVGPOOLING_PADDING_H; + pool_params.stride.w = AVGPOOLING_STRIDE_W; + pool_params.stride.h = AVGPOOLING_STRIDE_H; - pool_params.activation.min = AVGPOOLING_OUT_ACTIVATION_MIN; - pool_params.activation.max = AVGPOOLING_OUT_ACTIVATION_MAX; + pool_params.activation.min = AVGPOOLING_ACTIVATION_MIN; + pool_params.activation.max = AVGPOOLING_ACTIVATION_MAX; - ctx.size = arm_avgpool_s8_get_buffer_size(AVGPOOLING_OUTPUT_W, AVGPOOLING_IN_CH); + ctx.size = arm_avgpool_s8_get_buffer_size(AVGPOOLING_OUTPUT_W, AVGPOOLING_INPUT_C); ctx.buf = malloc(ctx.size); arm_cmsis_nn_status result = @@ -71,13 +71,16 @@ void avgpooling_arm_avgpool_s8(void) free(ctx.buf); } TEST_ASSERT_EQUAL(expected, result); - TEST_ASSERT_TRUE(validate(output, avgpooling_output_ref, AVGPOOLING_DST_SIZE)); + TEST_ASSERT_TRUE(validate(output, + avgpooling_output, + AVGPOOLING_OUTPUT_W * AVGPOOLING_OUTPUT_H * AVGPOOLING_BATCH_SIZE * AVGPOOLING_OUTPUT_C)); } void avgpooling_1_arm_avgpool_s8(void) { const arm_cmsis_nn_status expected = ARM_CMSIS_NN_SUCCESS; - int8_t output[AVGPOOLING_1_DST_SIZE] = {0}; + int8_t output[AVGPOOLING_1_OUTPUT_W * AVGPOOLING_1_OUTPUT_H * AVGPOOLING_1_BATCH_SIZE * AVGPOOLING_1_OUTPUT_C] = { + 0}; cmsis_nn_context ctx; cmsis_nn_pool_params pool_params; @@ -85,27 +88,27 @@ void avgpooling_1_arm_avgpool_s8(void) cmsis_nn_dims filter_dims; cmsis_nn_dims output_dims; - const int8_t *input_data = avgpooling_1_input; + const int8_t *input_data = avgpooling_1_input_tensor; - input_dims.n = AVGPOOLING_1_INPUT_BATCHES; + input_dims.n = AVGPOOLING_1_BATCH_SIZE; input_dims.w = AVGPOOLING_1_INPUT_W; input_dims.h = AVGPOOLING_1_INPUT_H; - input_dims.c = AVGPOOLING_1_IN_CH; - filter_dims.w = AVGPOOLING_1_FILTER_X; - filter_dims.h = AVGPOOLING_1_FILTER_Y; + input_dims.c = AVGPOOLING_1_INPUT_C; + filter_dims.w = AVGPOOLING_1_FILTER_W; + filter_dims.h = AVGPOOLING_1_FILTER_H; output_dims.w = AVGPOOLING_1_OUTPUT_W; output_dims.h = AVGPOOLING_1_OUTPUT_H; - output_dims.c = AVGPOOLING_1_OUT_CH; + output_dims.c = AVGPOOLING_1_OUTPUT_C; - pool_params.padding.w = AVGPOOLING_1_PAD_X; - pool_params.padding.h = AVGPOOLING_1_PAD_Y; - pool_params.stride.w = AVGPOOLING_1_STRIDE_X; - pool_params.stride.h = AVGPOOLING_1_STRIDE_Y; + pool_params.padding.w = AVGPOOLING_1_PADDING_W; + pool_params.padding.h = AVGPOOLING_1_PADDING_H; + pool_params.stride.w = AVGPOOLING_1_STRIDE_W; + pool_params.stride.h = AVGPOOLING_1_STRIDE_H; - pool_params.activation.min = AVGPOOLING_1_OUT_ACTIVATION_MIN; - pool_params.activation.max = AVGPOOLING_1_OUT_ACTIVATION_MAX; + pool_params.activation.min = AVGPOOLING_1_ACTIVATION_MIN; + pool_params.activation.max = AVGPOOLING_1_ACTIVATION_MAX; - ctx.size = arm_avgpool_s8_get_buffer_size(AVGPOOLING_1_OUTPUT_W, AVGPOOLING_1_IN_CH); + ctx.size = arm_avgpool_s8_get_buffer_size(AVGPOOLING_1_OUTPUT_W, AVGPOOLING_1_INPUT_C); ctx.buf = malloc(ctx.size); arm_cmsis_nn_status result = @@ -117,13 +120,17 @@ void avgpooling_1_arm_avgpool_s8(void) free(ctx.buf); } TEST_ASSERT_EQUAL(expected, result); - TEST_ASSERT_TRUE(validate(output, avgpooling_1_output_ref, AVGPOOLING_1_DST_SIZE)); + TEST_ASSERT_TRUE( + validate(output, + avgpooling_1_output, + AVGPOOLING_1_OUTPUT_W * AVGPOOLING_1_OUTPUT_H * AVGPOOLING_1_BATCH_SIZE * AVGPOOLING_1_OUTPUT_C)); } void avgpooling_2_arm_avgpool_s8(void) { const arm_cmsis_nn_status expected = ARM_CMSIS_NN_SUCCESS; - int8_t output[AVGPOOLING_2_DST_SIZE] = {0}; + int8_t output[AVGPOOLING_2_OUTPUT_W * AVGPOOLING_2_OUTPUT_H * AVGPOOLING_2_BATCH_SIZE * AVGPOOLING_2_OUTPUT_C] = { + 0}; cmsis_nn_context ctx; cmsis_nn_pool_params pool_params; @@ -131,27 +138,27 @@ void avgpooling_2_arm_avgpool_s8(void) cmsis_nn_dims filter_dims; cmsis_nn_dims output_dims; - const int8_t *input_data = avgpooling_2_input; + const int8_t *input_data = avgpooling_2_input_tensor; - input_dims.n = AVGPOOLING_2_INPUT_BATCHES; + input_dims.n = AVGPOOLING_2_BATCH_SIZE; input_dims.w = AVGPOOLING_2_INPUT_W; input_dims.h = AVGPOOLING_2_INPUT_H; - input_dims.c = AVGPOOLING_2_IN_CH; - filter_dims.w = AVGPOOLING_2_FILTER_X; - filter_dims.h = AVGPOOLING_2_FILTER_Y; + input_dims.c = AVGPOOLING_2_INPUT_C; + filter_dims.w = AVGPOOLING_2_FILTER_W; + filter_dims.h = AVGPOOLING_2_FILTER_H; output_dims.w = AVGPOOLING_2_OUTPUT_W; output_dims.h = AVGPOOLING_2_OUTPUT_H; - output_dims.c = AVGPOOLING_2_OUT_CH; + output_dims.c = AVGPOOLING_2_OUTPUT_C; - pool_params.padding.w = AVGPOOLING_2_PAD_X; - pool_params.padding.h = AVGPOOLING_2_PAD_Y; - pool_params.stride.w = AVGPOOLING_2_STRIDE_X; - pool_params.stride.h = AVGPOOLING_2_STRIDE_Y; + pool_params.padding.w = AVGPOOLING_2_PADDING_W; + pool_params.padding.h = AVGPOOLING_2_PADDING_H; + pool_params.stride.w = AVGPOOLING_2_STRIDE_W; + pool_params.stride.h = AVGPOOLING_2_STRIDE_H; - pool_params.activation.min = AVGPOOLING_2_OUT_ACTIVATION_MIN; - pool_params.activation.max = AVGPOOLING_2_OUT_ACTIVATION_MAX; + pool_params.activation.min = AVGPOOLING_2_ACTIVATION_MIN; + pool_params.activation.max = AVGPOOLING_2_ACTIVATION_MAX; - ctx.size = arm_avgpool_s8_get_buffer_size(AVGPOOLING_2_OUTPUT_W, AVGPOOLING_2_IN_CH); + ctx.size = arm_avgpool_s8_get_buffer_size(AVGPOOLING_2_OUTPUT_W, AVGPOOLING_2_INPUT_C); ctx.buf = malloc(ctx.size); arm_cmsis_nn_status result = @@ -163,13 +170,17 @@ void avgpooling_2_arm_avgpool_s8(void) free(ctx.buf); } TEST_ASSERT_EQUAL(expected, result); - TEST_ASSERT_TRUE(validate(output, avgpooling_2_output_ref, AVGPOOLING_2_DST_SIZE)); + TEST_ASSERT_TRUE( + validate(output, + avgpooling_2_output, + AVGPOOLING_2_OUTPUT_W * AVGPOOLING_2_OUTPUT_H * AVGPOOLING_2_BATCH_SIZE * AVGPOOLING_2_OUTPUT_C)); } void avgpooling_3_arm_avgpool_s8(void) { const arm_cmsis_nn_status expected = ARM_CMSIS_NN_SUCCESS; - int8_t output[AVGPOOLING_3_DST_SIZE] = {0}; + int8_t output[AVGPOOLING_3_OUTPUT_W * AVGPOOLING_3_OUTPUT_H * AVGPOOLING_3_BATCH_SIZE * AVGPOOLING_3_OUTPUT_C] = { + 0}; cmsis_nn_context ctx; cmsis_nn_pool_params pool_params; @@ -177,27 +188,27 @@ void avgpooling_3_arm_avgpool_s8(void) cmsis_nn_dims filter_dims; cmsis_nn_dims output_dims; - const int8_t *input_data = avgpooling_3_input; + const int8_t *input_data = avgpooling_3_input_tensor; - input_dims.n = AVGPOOLING_3_INPUT_BATCHES; + input_dims.n = AVGPOOLING_3_BATCH_SIZE; input_dims.w = AVGPOOLING_3_INPUT_W; input_dims.h = AVGPOOLING_3_INPUT_H; - input_dims.c = AVGPOOLING_3_IN_CH; - filter_dims.w = AVGPOOLING_3_FILTER_X; - filter_dims.h = AVGPOOLING_3_FILTER_Y; + input_dims.c = AVGPOOLING_3_INPUT_C; + filter_dims.w = AVGPOOLING_3_FILTER_W; + filter_dims.h = AVGPOOLING_3_FILTER_H; output_dims.w = AVGPOOLING_3_OUTPUT_W; output_dims.h = AVGPOOLING_3_OUTPUT_H; - output_dims.c = AVGPOOLING_3_OUT_CH; + output_dims.c = AVGPOOLING_3_OUTPUT_C; - pool_params.padding.w = AVGPOOLING_3_PAD_X; - pool_params.padding.h = AVGPOOLING_3_PAD_Y; - pool_params.stride.w = AVGPOOLING_3_STRIDE_X; - pool_params.stride.h = AVGPOOLING_3_STRIDE_Y; + pool_params.padding.w = AVGPOOLING_3_PADDING_W; + pool_params.padding.h = AVGPOOLING_3_PADDING_H; + pool_params.stride.w = AVGPOOLING_3_STRIDE_W; + pool_params.stride.h = AVGPOOLING_3_STRIDE_H; - pool_params.activation.min = AVGPOOLING_3_OUT_ACTIVATION_MIN; - pool_params.activation.max = AVGPOOLING_3_OUT_ACTIVATION_MAX; + pool_params.activation.min = AVGPOOLING_3_ACTIVATION_MIN; + pool_params.activation.max = AVGPOOLING_3_ACTIVATION_MAX; - ctx.size = arm_avgpool_s8_get_buffer_size(AVGPOOLING_3_OUTPUT_W, AVGPOOLING_3_IN_CH); + ctx.size = arm_avgpool_s8_get_buffer_size(AVGPOOLING_3_OUTPUT_W, AVGPOOLING_3_INPUT_C); ctx.buf = malloc(ctx.size); arm_cmsis_nn_status result = @@ -209,13 +220,17 @@ void avgpooling_3_arm_avgpool_s8(void) free(ctx.buf); } TEST_ASSERT_EQUAL(expected, result); - TEST_ASSERT_TRUE(validate(output, avgpooling_3_output_ref, AVGPOOLING_3_DST_SIZE)); + TEST_ASSERT_TRUE( + validate(output, + avgpooling_3_output, + AVGPOOLING_3_OUTPUT_W * AVGPOOLING_3_OUTPUT_H * AVGPOOLING_3_BATCH_SIZE * AVGPOOLING_3_OUTPUT_C)); } void avgpooling_4_arm_avgpool_s8(void) { const arm_cmsis_nn_status expected = ARM_CMSIS_NN_SUCCESS; - int8_t output[AVGPOOLING_4_DST_SIZE] = {0}; + int8_t output[AVGPOOLING_4_OUTPUT_W * AVGPOOLING_4_OUTPUT_H * AVGPOOLING_4_BATCH_SIZE * AVGPOOLING_4_OUTPUT_C] = { + 0}; cmsis_nn_context ctx; cmsis_nn_pool_params pool_params; @@ -223,27 +238,27 @@ void avgpooling_4_arm_avgpool_s8(void) cmsis_nn_dims filter_dims; cmsis_nn_dims output_dims; - const int8_t *input_data = avgpooling_4_input; + const int8_t *input_data = avgpooling_4_input_tensor; - input_dims.n = AVGPOOLING_4_INPUT_BATCHES; + input_dims.n = AVGPOOLING_4_BATCH_SIZE; input_dims.w = AVGPOOLING_4_INPUT_W; input_dims.h = AVGPOOLING_4_INPUT_H; - input_dims.c = AVGPOOLING_4_IN_CH; - filter_dims.w = AVGPOOLING_4_FILTER_X; - filter_dims.h = AVGPOOLING_4_FILTER_Y; + input_dims.c = AVGPOOLING_4_INPUT_C; + filter_dims.w = AVGPOOLING_4_FILTER_W; + filter_dims.h = AVGPOOLING_4_FILTER_H; output_dims.w = AVGPOOLING_4_OUTPUT_W; output_dims.h = AVGPOOLING_4_OUTPUT_H; - output_dims.c = AVGPOOLING_4_OUT_CH; + output_dims.c = AVGPOOLING_4_OUTPUT_C; - pool_params.padding.w = AVGPOOLING_4_PAD_X; - pool_params.padding.h = AVGPOOLING_4_PAD_Y; - pool_params.stride.w = AVGPOOLING_4_STRIDE_X; - pool_params.stride.h = AVGPOOLING_4_STRIDE_Y; + pool_params.padding.w = AVGPOOLING_4_PADDING_W; + pool_params.padding.h = AVGPOOLING_4_PADDING_H; + pool_params.stride.w = AVGPOOLING_4_STRIDE_W; + pool_params.stride.h = AVGPOOLING_4_STRIDE_H; - pool_params.activation.min = AVGPOOLING_4_OUT_ACTIVATION_MIN; - pool_params.activation.max = AVGPOOLING_4_OUT_ACTIVATION_MAX; + pool_params.activation.min = AVGPOOLING_4_ACTIVATION_MIN; + pool_params.activation.max = AVGPOOLING_4_ACTIVATION_MAX; - ctx.size = arm_avgpool_s8_get_buffer_size(AVGPOOLING_4_OUTPUT_W, AVGPOOLING_4_IN_CH); + ctx.size = arm_avgpool_s8_get_buffer_size(AVGPOOLING_4_OUTPUT_W, AVGPOOLING_4_INPUT_C); ctx.buf = malloc(ctx.size); arm_cmsis_nn_status result = @@ -255,13 +270,17 @@ void avgpooling_4_arm_avgpool_s8(void) free(ctx.buf); } TEST_ASSERT_EQUAL(expected, result); - TEST_ASSERT_TRUE(validate(output, avgpooling_4_output_ref, AVGPOOLING_4_DST_SIZE)); + TEST_ASSERT_TRUE( + validate(output, + avgpooling_4_output, + AVGPOOLING_4_OUTPUT_W * AVGPOOLING_4_OUTPUT_H * AVGPOOLING_4_BATCH_SIZE * AVGPOOLING_4_OUTPUT_C)); } void avgpooling_5_arm_avgpool_s8(void) { const arm_cmsis_nn_status expected = ARM_CMSIS_NN_SUCCESS; - int8_t output[AVGPOOLING_5_DST_SIZE] = {0}; + int8_t output[AVGPOOLING_5_OUTPUT_W * AVGPOOLING_5_OUTPUT_H * AVGPOOLING_5_BATCH_SIZE * AVGPOOLING_5_OUTPUT_C] = { + 0}; cmsis_nn_context ctx; cmsis_nn_pool_params pool_params; @@ -269,27 +288,27 @@ void avgpooling_5_arm_avgpool_s8(void) cmsis_nn_dims filter_dims; cmsis_nn_dims output_dims; - const int8_t *input_data = avgpooling_5_input; + const int8_t *input_data = avgpooling_5_input_tensor; - input_dims.n = AVGPOOLING_5_INPUT_BATCHES; + input_dims.n = AVGPOOLING_5_BATCH_SIZE; input_dims.w = AVGPOOLING_5_INPUT_W; input_dims.h = AVGPOOLING_5_INPUT_H; - input_dims.c = AVGPOOLING_5_IN_CH; - filter_dims.w = AVGPOOLING_5_FILTER_X; - filter_dims.h = AVGPOOLING_5_FILTER_Y; + input_dims.c = AVGPOOLING_5_INPUT_C; + filter_dims.w = AVGPOOLING_5_FILTER_W; + filter_dims.h = AVGPOOLING_5_FILTER_H; output_dims.w = AVGPOOLING_5_OUTPUT_W; output_dims.h = AVGPOOLING_5_OUTPUT_H; - output_dims.c = AVGPOOLING_5_OUT_CH; + output_dims.c = AVGPOOLING_5_OUTPUT_C; - pool_params.padding.w = AVGPOOLING_5_PAD_X; - pool_params.padding.h = AVGPOOLING_5_PAD_Y; - pool_params.stride.w = AVGPOOLING_5_STRIDE_X; - pool_params.stride.h = AVGPOOLING_5_STRIDE_Y; + pool_params.padding.w = AVGPOOLING_5_PADDING_W; + pool_params.padding.h = AVGPOOLING_5_PADDING_H; + pool_params.stride.w = AVGPOOLING_5_STRIDE_W; + pool_params.stride.h = AVGPOOLING_5_STRIDE_H; - pool_params.activation.min = AVGPOOLING_5_OUT_ACTIVATION_MIN; - pool_params.activation.max = AVGPOOLING_5_OUT_ACTIVATION_MAX; + pool_params.activation.min = AVGPOOLING_5_ACTIVATION_MIN; + pool_params.activation.max = AVGPOOLING_5_ACTIVATION_MAX; - ctx.size = arm_avgpool_s8_get_buffer_size(AVGPOOLING_5_OUTPUT_W, AVGPOOLING_5_IN_CH); + ctx.size = arm_avgpool_s8_get_buffer_size(AVGPOOLING_5_OUTPUT_W, AVGPOOLING_5_INPUT_C); ctx.buf = malloc(ctx.size); arm_cmsis_nn_status result = @@ -301,14 +320,17 @@ void avgpooling_5_arm_avgpool_s8(void) free(ctx.buf); } TEST_ASSERT_EQUAL(expected, result); - TEST_ASSERT_TRUE(validate(output, avgpooling_5_output_ref, AVGPOOLING_5_DST_SIZE)); + TEST_ASSERT_TRUE( + validate(output, + avgpooling_5_output, + AVGPOOLING_5_OUTPUT_W * AVGPOOLING_5_OUTPUT_H * AVGPOOLING_5_BATCH_SIZE * AVGPOOLING_5_OUTPUT_C)); } void buffer_size_mve_arm_avgpool_s8(void) { #if defined(ARM_MATH_MVEI) - const int32_t buf_size = arm_avgpool_s8_get_buffer_size(AVGPOOLING_5_OUTPUT_W, AVGPOOLING_5_IN_CH); - const int32_t mve_buf_size = arm_avgpool_s8_get_buffer_size_mve(AVGPOOLING_5_OUTPUT_W, AVGPOOLING_5_IN_CH); + const int32_t buf_size = arm_avgpool_s8_get_buffer_size(AVGPOOLING_5_OUTPUT_W, AVGPOOLING_5_INPUT_C); + const int32_t mve_buf_size = arm_avgpool_s8_get_buffer_size_mve(AVGPOOLING_5_OUTPUT_W, AVGPOOLING_5_INPUT_C); TEST_ASSERT_EQUAL(buf_size, mve_buf_size); #endif @@ -317,8 +339,8 @@ void buffer_size_mve_arm_avgpool_s8(void) void buffer_size_dsp_arm_avgpool_s8(void) { #if defined(ARM_MATH_DSP) && !defined(ARM_MATH_MVEI) - const int32_t buf_size = arm_avgpool_s8_get_buffer_size(AVGPOOLING_5_OUTPUT_W, AVGPOOLING_5_IN_CH); - const int32_t dsp_buf_size = arm_avgpool_s8_get_buffer_size_dsp(AVGPOOLING_5_OUTPUT_W, AVGPOOLING_5_IN_CH); + const int32_t buf_size = arm_avgpool_s8_get_buffer_size(AVGPOOLING_5_OUTPUT_W, AVGPOOLING_5_INPUT_C); + const int32_t dsp_buf_size = arm_avgpool_s8_get_buffer_size_dsp(AVGPOOLING_5_OUTPUT_W, AVGPOOLING_5_INPUT_C); TEST_ASSERT_EQUAL(buf_size, dsp_buf_size); #endif @@ -327,7 +349,8 @@ void buffer_size_dsp_arm_avgpool_s8(void) void avgpooling_param_fail_arm_avgpool_s8(void) { const arm_cmsis_nn_status expected = ARM_CMSIS_NN_ARG_ERROR; - int8_t output[AVGPOOLING_5_DST_SIZE] = {0}; + int8_t output[AVGPOOLING_5_OUTPUT_W * AVGPOOLING_5_OUTPUT_H * AVGPOOLING_5_BATCH_SIZE * AVGPOOLING_5_OUTPUT_C] = { + 0}; cmsis_nn_context ctx; cmsis_nn_pool_params pool_params; @@ -335,27 +358,27 @@ void avgpooling_param_fail_arm_avgpool_s8(void) cmsis_nn_dims filter_dims; cmsis_nn_dims output_dims; - const int8_t *input_data = avgpooling_5_input; + const int8_t *input_data = avgpooling_5_input_tensor; - input_dims.n = -AVGPOOLING_5_INPUT_BATCHES; + input_dims.n = -AVGPOOLING_5_BATCH_SIZE; input_dims.w = AVGPOOLING_5_INPUT_W; input_dims.h = AVGPOOLING_5_INPUT_H; - input_dims.c = AVGPOOLING_5_IN_CH; - filter_dims.w = AVGPOOLING_5_FILTER_X; - filter_dims.h = AVGPOOLING_5_FILTER_Y; + input_dims.c = AVGPOOLING_5_INPUT_C; + filter_dims.w = AVGPOOLING_5_FILTER_W; + filter_dims.h = AVGPOOLING_5_FILTER_H; output_dims.w = AVGPOOLING_5_OUTPUT_W; output_dims.h = AVGPOOLING_5_OUTPUT_H; - output_dims.c = AVGPOOLING_5_OUT_CH; + output_dims.c = AVGPOOLING_5_OUTPUT_C; - pool_params.padding.w = AVGPOOLING_5_PAD_X; - pool_params.padding.h = AVGPOOLING_5_PAD_Y; - pool_params.stride.w = AVGPOOLING_5_STRIDE_X; - pool_params.stride.h = AVGPOOLING_5_STRIDE_Y; + pool_params.padding.w = AVGPOOLING_5_PADDING_W; + pool_params.padding.h = AVGPOOLING_5_PADDING_H; + pool_params.stride.w = AVGPOOLING_5_STRIDE_W; + pool_params.stride.h = AVGPOOLING_5_STRIDE_H; - pool_params.activation.min = AVGPOOLING_5_OUT_ACTIVATION_MIN; - pool_params.activation.max = AVGPOOLING_5_OUT_ACTIVATION_MAX; + pool_params.activation.min = AVGPOOLING_5_ACTIVATION_MIN; + pool_params.activation.max = AVGPOOLING_5_ACTIVATION_MAX; - ctx.size = arm_avgpool_s8_get_buffer_size(AVGPOOLING_5_OUTPUT_W, AVGPOOLING_5_IN_CH); + ctx.size = arm_avgpool_s8_get_buffer_size(AVGPOOLING_5_OUTPUT_W, AVGPOOLING_5_INPUT_C); ctx.buf = malloc(ctx.size); arm_cmsis_nn_status result = diff --git a/Tests/UnitTest/TestCases/test_arm_max_pool_s16/test_arm_max_pool_s16.c b/Tests/UnitTest/TestCases/test_arm_max_pool_s16/test_arm_max_pool_s16.c index 9bb72651..c6f34972 100644 --- a/Tests/UnitTest/TestCases/test_arm_max_pool_s16/test_arm_max_pool_s16.c +++ b/Tests/UnitTest/TestCases/test_arm_max_pool_s16/test_arm_max_pool_s16.c @@ -1,5 +1,5 @@ /* - * SPDX-FileCopyrightText: Copyright 2022-2023 Arm Limited and/or its affiliates + * SPDX-FileCopyrightText: Copyright 2022-2024 Arm Limited and/or its affiliates * * SPDX-License-Identifier: Apache-2.0 * @@ -29,7 +29,8 @@ void maxpool_int16_arm_max_pool_s16(void) { const arm_cmsis_nn_status expected = ARM_CMSIS_NN_SUCCESS; - int16_t output[MAXPOOL_INT16_DST_SIZE] = {0}; + int16_t output[MAXPOOL_INT16_OUTPUT_W * MAXPOOL_INT16_OUTPUT_H * MAXPOOL_INT16_INPUT_C * MAXPOOL_INT16_BATCH_SIZE] = + {0}; cmsis_nn_context ctx; cmsis_nn_pool_params pool_params; @@ -37,25 +38,25 @@ void maxpool_int16_arm_max_pool_s16(void) cmsis_nn_dims filter_dims; cmsis_nn_dims output_dims; - const int16_t *input_data = maxpool_int16_input; + const int16_t *input_data = maxpool_int16_input_tensor; - input_dims.n = MAXPOOL_INT16_INPUT_BATCHES; + input_dims.n = MAXPOOL_INT16_BATCH_SIZE; input_dims.w = MAXPOOL_INT16_INPUT_W; input_dims.h = MAXPOOL_INT16_INPUT_H; - input_dims.c = MAXPOOL_INT16_IN_CH; - filter_dims.w = MAXPOOL_INT16_FILTER_X; - filter_dims.h = MAXPOOL_INT16_FILTER_Y; + input_dims.c = MAXPOOL_INT16_INPUT_C; + filter_dims.w = MAXPOOL_INT16_FILTER_W; + filter_dims.h = MAXPOOL_INT16_FILTER_H; output_dims.w = MAXPOOL_INT16_OUTPUT_W; output_dims.h = MAXPOOL_INT16_OUTPUT_H; - output_dims.c = MAXPOOL_INT16_OUT_CH; + output_dims.c = MAXPOOL_INT16_INPUT_C; - pool_params.padding.w = MAXPOOL_INT16_PAD_X; - pool_params.padding.h = MAXPOOL_INT16_PAD_Y; - pool_params.stride.w = MAXPOOL_INT16_STRIDE_X; - pool_params.stride.h = MAXPOOL_INT16_STRIDE_Y; + pool_params.padding.w = MAXPOOL_INT16_PADDING_W; + pool_params.padding.h = MAXPOOL_INT16_PADDING_H; + pool_params.stride.w = MAXPOOL_INT16_STRIDE_W; + pool_params.stride.h = MAXPOOL_INT16_STRIDE_H; - pool_params.activation.min = MAXPOOL_INT16_OUT_ACTIVATION_MIN; - pool_params.activation.max = MAXPOOL_INT16_OUT_ACTIVATION_MAX; + pool_params.activation.min = MAXPOOL_INT16_ACTIVATION_MIN; + pool_params.activation.max = MAXPOOL_INT16_ACTIVATION_MAX; for (int i = 0; i < REPEAT_NUM; i++) { @@ -63,14 +64,18 @@ void maxpool_int16_arm_max_pool_s16(void) arm_max_pool_s16(&ctx, &pool_params, &input_dims, input_data, &filter_dims, &output_dims, output); TEST_ASSERT_EQUAL(expected, result); - TEST_ASSERT_TRUE(validate_s16(output, maxpool_int16_output_ref, MAXPOOL_INT16_DST_SIZE)); + TEST_ASSERT_TRUE(validate_s16(output, + maxpool_int16_output, + MAXPOOL_INT16_OUTPUT_W * MAXPOOL_INT16_OUTPUT_H * MAXPOOL_INT16_INPUT_C * + MAXPOOL_INT16_BATCH_SIZE)); } } void maxpool_int16_1_arm_max_pool_s16(void) { const arm_cmsis_nn_status expected = ARM_CMSIS_NN_SUCCESS; - int16_t output[MAXPOOL_INT16_1_DST_SIZE] = {0}; + int16_t output[MAXPOOL_INT16_1_OUTPUT_W * MAXPOOL_INT16_1_OUTPUT_H * MAXPOOL_INT16_1_INPUT_C * + MAXPOOL_INT16_1_BATCH_SIZE] = {0}; cmsis_nn_context ctx; cmsis_nn_pool_params pool_params; @@ -78,25 +83,25 @@ void maxpool_int16_1_arm_max_pool_s16(void) cmsis_nn_dims filter_dims; cmsis_nn_dims output_dims; - const int16_t *input_data = maxpool_int16_1_input; + const int16_t *input_data = maxpool_int16_1_input_tensor; - input_dims.n = MAXPOOL_INT16_1_INPUT_BATCHES; + input_dims.n = MAXPOOL_INT16_1_BATCH_SIZE; input_dims.w = MAXPOOL_INT16_1_INPUT_W; input_dims.h = MAXPOOL_INT16_1_INPUT_H; - input_dims.c = MAXPOOL_INT16_1_IN_CH; - filter_dims.w = MAXPOOL_INT16_1_FILTER_X; - filter_dims.h = MAXPOOL_INT16_1_FILTER_Y; + input_dims.c = MAXPOOL_INT16_1_INPUT_C; + filter_dims.w = MAXPOOL_INT16_1_FILTER_W; + filter_dims.h = MAXPOOL_INT16_1_FILTER_H; output_dims.w = MAXPOOL_INT16_1_OUTPUT_W; output_dims.h = MAXPOOL_INT16_1_OUTPUT_H; - output_dims.c = MAXPOOL_INT16_1_OUT_CH; + output_dims.c = MAXPOOL_INT16_1_INPUT_C; - pool_params.padding.w = MAXPOOL_INT16_1_PAD_X; - pool_params.padding.h = MAXPOOL_INT16_1_PAD_Y; - pool_params.stride.w = MAXPOOL_INT16_1_STRIDE_X; - pool_params.stride.h = MAXPOOL_INT16_1_STRIDE_Y; + pool_params.padding.w = MAXPOOL_INT16_1_PADDING_W; + pool_params.padding.h = MAXPOOL_INT16_1_PADDING_H; + pool_params.stride.w = MAXPOOL_INT16_1_STRIDE_W; + pool_params.stride.h = MAXPOOL_INT16_1_STRIDE_H; - pool_params.activation.min = MAXPOOL_INT16_1_OUT_ACTIVATION_MIN; - pool_params.activation.max = MAXPOOL_INT16_1_OUT_ACTIVATION_MAX; + pool_params.activation.min = MAXPOOL_INT16_1_ACTIVATION_MIN; + pool_params.activation.max = MAXPOOL_INT16_1_ACTIVATION_MAX; for (int i = 0; i < REPEAT_NUM; i++) { @@ -104,14 +109,18 @@ void maxpool_int16_1_arm_max_pool_s16(void) arm_max_pool_s16(&ctx, &pool_params, &input_dims, input_data, &filter_dims, &output_dims, output); TEST_ASSERT_EQUAL(expected, result); - TEST_ASSERT_TRUE(validate_s16(output, maxpool_int16_1_output_ref, MAXPOOL_INT16_1_DST_SIZE)); + TEST_ASSERT_TRUE(validate_s16(output, + maxpool_int16_1_output, + MAXPOOL_INT16_1_OUTPUT_W * MAXPOOL_INT16_1_OUTPUT_H * MAXPOOL_INT16_1_INPUT_C * + MAXPOOL_INT16_1_BATCH_SIZE)); } } void maxpool_int16_2_arm_max_pool_s16(void) { const arm_cmsis_nn_status expected = ARM_CMSIS_NN_SUCCESS; - int16_t output[MAXPOOL_INT16_2_DST_SIZE] = {0}; + int16_t output[MAXPOOL_INT16_2_OUTPUT_W * MAXPOOL_INT16_2_OUTPUT_H * MAXPOOL_INT16_2_INPUT_C * + MAXPOOL_INT16_2_BATCH_SIZE] = {0}; cmsis_nn_context ctx; cmsis_nn_pool_params pool_params; @@ -119,25 +128,25 @@ void maxpool_int16_2_arm_max_pool_s16(void) cmsis_nn_dims filter_dims; cmsis_nn_dims output_dims; - const int16_t *input_data = maxpool_int16_2_input; + const int16_t *input_data = maxpool_int16_2_input_tensor; - input_dims.n = MAXPOOL_INT16_2_INPUT_BATCHES; + input_dims.n = MAXPOOL_INT16_2_BATCH_SIZE; input_dims.w = MAXPOOL_INT16_2_INPUT_W; input_dims.h = MAXPOOL_INT16_2_INPUT_H; - input_dims.c = MAXPOOL_INT16_2_IN_CH; - filter_dims.w = MAXPOOL_INT16_2_FILTER_X; - filter_dims.h = MAXPOOL_INT16_2_FILTER_Y; + input_dims.c = MAXPOOL_INT16_2_INPUT_C; + filter_dims.w = MAXPOOL_INT16_2_FILTER_W; + filter_dims.h = MAXPOOL_INT16_2_FILTER_H; output_dims.w = MAXPOOL_INT16_2_OUTPUT_W; output_dims.h = MAXPOOL_INT16_2_OUTPUT_H; - output_dims.c = MAXPOOL_INT16_2_OUT_CH; + output_dims.c = MAXPOOL_INT16_2_INPUT_C; - pool_params.padding.w = MAXPOOL_INT16_2_PAD_X; - pool_params.padding.h = MAXPOOL_INT16_2_PAD_Y; - pool_params.stride.w = MAXPOOL_INT16_2_STRIDE_X; - pool_params.stride.h = MAXPOOL_INT16_2_STRIDE_Y; + pool_params.padding.w = MAXPOOL_INT16_2_PADDING_W; + pool_params.padding.h = MAXPOOL_INT16_2_PADDING_H; + pool_params.stride.w = MAXPOOL_INT16_2_STRIDE_W; + pool_params.stride.h = MAXPOOL_INT16_2_STRIDE_H; - pool_params.activation.min = MAXPOOL_INT16_2_OUT_ACTIVATION_MIN; - pool_params.activation.max = MAXPOOL_INT16_2_OUT_ACTIVATION_MAX; + pool_params.activation.min = MAXPOOL_INT16_2_ACTIVATION_MIN; + pool_params.activation.max = MAXPOOL_INT16_2_ACTIVATION_MAX; for (int i = 0; i < REPEAT_NUM; i++) { @@ -145,14 +154,18 @@ void maxpool_int16_2_arm_max_pool_s16(void) arm_max_pool_s16(&ctx, &pool_params, &input_dims, input_data, &filter_dims, &output_dims, output); TEST_ASSERT_EQUAL(expected, result); - TEST_ASSERT_TRUE(validate_s16(output, maxpool_int16_2_output_ref, MAXPOOL_INT16_2_DST_SIZE)); + TEST_ASSERT_TRUE(validate_s16(output, + maxpool_int16_2_output, + MAXPOOL_INT16_2_OUTPUT_W * MAXPOOL_INT16_2_OUTPUT_H * MAXPOOL_INT16_2_INPUT_C * + MAXPOOL_INT16_2_BATCH_SIZE)); } } void maxpool_int16_param_fail_arm_max_pool_s16(void) { const arm_cmsis_nn_status expected = ARM_CMSIS_NN_ARG_ERROR; - int16_t output[MAXPOOL_INT16_2_DST_SIZE] = {0}; + int16_t output[MAXPOOL_INT16_2_OUTPUT_W * MAXPOOL_INT16_2_OUTPUT_H * MAXPOOL_INT16_2_INPUT_C * + MAXPOOL_INT16_2_BATCH_SIZE] = {0}; cmsis_nn_context ctx = {}; cmsis_nn_pool_params pool_params; @@ -160,25 +173,25 @@ void maxpool_int16_param_fail_arm_max_pool_s16(void) cmsis_nn_dims filter_dims; cmsis_nn_dims output_dims; - const int16_t *input_data = maxpool_int16_2_input; + const int16_t *input_data = maxpool_int16_2_input_tensor; - input_dims.n = -MAXPOOL_INT16_2_INPUT_BATCHES; + input_dims.n = -MAXPOOL_INT16_2_BATCH_SIZE; input_dims.w = MAXPOOL_INT16_2_INPUT_W; input_dims.h = MAXPOOL_INT16_2_INPUT_H; - input_dims.c = MAXPOOL_INT16_2_IN_CH; - filter_dims.w = MAXPOOL_INT16_2_FILTER_X; - filter_dims.h = MAXPOOL_INT16_2_FILTER_Y; + input_dims.c = MAXPOOL_INT16_2_INPUT_C; + filter_dims.w = MAXPOOL_INT16_2_FILTER_W; + filter_dims.h = MAXPOOL_INT16_2_FILTER_H; output_dims.w = MAXPOOL_INT16_2_OUTPUT_W; output_dims.h = MAXPOOL_INT16_2_OUTPUT_H; - output_dims.c = MAXPOOL_INT16_2_OUT_CH; + output_dims.c = MAXPOOL_INT16_2_INPUT_C; - pool_params.padding.w = MAXPOOL_INT16_2_PAD_X; - pool_params.padding.h = MAXPOOL_INT16_2_PAD_Y; - pool_params.stride.w = MAXPOOL_INT16_2_STRIDE_X; - pool_params.stride.h = MAXPOOL_INT16_2_STRIDE_Y; + pool_params.padding.w = MAXPOOL_INT16_2_PADDING_W; + pool_params.padding.h = MAXPOOL_INT16_2_PADDING_H; + pool_params.stride.w = MAXPOOL_INT16_2_STRIDE_W; + pool_params.stride.h = MAXPOOL_INT16_2_STRIDE_H; - pool_params.activation.min = MAXPOOL_INT16_2_OUT_ACTIVATION_MIN; - pool_params.activation.max = MAXPOOL_INT16_2_OUT_ACTIVATION_MAX; + pool_params.activation.min = MAXPOOL_INT16_2_ACTIVATION_MIN; + pool_params.activation.max = MAXPOOL_INT16_2_ACTIVATION_MAX; arm_cmsis_nn_status result = arm_max_pool_s16(&ctx, &pool_params, &input_dims, input_data, &filter_dims, &output_dims, output); diff --git a/Tests/UnitTest/TestCases/test_arm_max_pool_s8/test_arm_max_pool_s8.c b/Tests/UnitTest/TestCases/test_arm_max_pool_s8/test_arm_max_pool_s8.c index 535c8652..55ddbcf0 100644 --- a/Tests/UnitTest/TestCases/test_arm_max_pool_s8/test_arm_max_pool_s8.c +++ b/Tests/UnitTest/TestCases/test_arm_max_pool_s8/test_arm_max_pool_s8.c @@ -1,5 +1,5 @@ /* - * SPDX-FileCopyrightText: Copyright 2010-2023 Arm Limited and/or its affiliates + * SPDX-FileCopyrightText: Copyright 2010-2024 Arm Limited and/or its affiliates * * SPDX-License-Identifier: Apache-2.0 * @@ -34,7 +34,7 @@ void maxpooling_arm_max_pool_s8(void) { const arm_cmsis_nn_status expected = ARM_CMSIS_NN_SUCCESS; - int8_t output[MAXPOOLING_DST_SIZE] = {0}; + int8_t output[MAXPOOLING_OUTPUT_W * MAXPOOLING_OUTPUT_H * MAXPOOLING_INPUT_C * MAXPOOLING_BATCH_SIZE] = {0}; cmsis_nn_context ctx; cmsis_nn_pool_params pool_params; @@ -42,25 +42,25 @@ void maxpooling_arm_max_pool_s8(void) cmsis_nn_dims filter_dims; cmsis_nn_dims output_dims; - const int8_t *input_data = maxpooling_input; + const int8_t *input_data = maxpooling_input_tensor; - input_dims.n = MAXPOOLING_INPUT_BATCHES; + input_dims.n = MAXPOOLING_BATCH_SIZE; input_dims.w = MAXPOOLING_INPUT_W; input_dims.h = MAXPOOLING_INPUT_H; - input_dims.c = MAXPOOLING_IN_CH; - filter_dims.w = MAXPOOLING_FILTER_X; - filter_dims.h = MAXPOOLING_FILTER_Y; + input_dims.c = MAXPOOLING_INPUT_C; + filter_dims.w = MAXPOOLING_FILTER_W; + filter_dims.h = MAXPOOLING_FILTER_H; output_dims.w = MAXPOOLING_OUTPUT_W; output_dims.h = MAXPOOLING_OUTPUT_H; - output_dims.c = MAXPOOLING_OUT_CH; + output_dims.c = MAXPOOLING_INPUT_C; - pool_params.padding.w = MAXPOOLING_PAD_X; - pool_params.padding.h = MAXPOOLING_PAD_Y; - pool_params.stride.w = MAXPOOLING_STRIDE_X; - pool_params.stride.h = MAXPOOLING_STRIDE_Y; + pool_params.padding.w = MAXPOOLING_PADDING_W; + pool_params.padding.h = MAXPOOLING_PADDING_H; + pool_params.stride.w = MAXPOOLING_STRIDE_W; + pool_params.stride.h = MAXPOOLING_STRIDE_H; - pool_params.activation.min = MAXPOOLING_OUT_ACTIVATION_MIN; - pool_params.activation.max = MAXPOOLING_OUT_ACTIVATION_MAX; + pool_params.activation.min = MAXPOOLING_ACTIVATION_MIN; + pool_params.activation.max = MAXPOOLING_ACTIVATION_MAX; for (int i = 0; i < REPEAT_NUM; i++) { @@ -68,14 +68,17 @@ void maxpooling_arm_max_pool_s8(void) arm_max_pool_s8(&ctx, &pool_params, &input_dims, input_data, &filter_dims, &output_dims, output); TEST_ASSERT_EQUAL(expected, result); - TEST_ASSERT_TRUE(validate(output, maxpooling_output_ref, MAXPOOLING_DST_SIZE)); + TEST_ASSERT_TRUE( + validate(output, + maxpooling_output, + MAXPOOLING_OUTPUT_W * MAXPOOLING_OUTPUT_H * MAXPOOLING_INPUT_C * MAXPOOLING_BATCH_SIZE)); } } void maxpooling_1_arm_max_pool_s8(void) { const arm_cmsis_nn_status expected = ARM_CMSIS_NN_SUCCESS; - int8_t output[MAXPOOLING_1_DST_SIZE] = {0}; + int8_t output[MAXPOOLING_1_OUTPUT_W * MAXPOOLING_1_OUTPUT_H * MAXPOOLING_1_INPUT_C * MAXPOOLING_1_BATCH_SIZE] = {0}; cmsis_nn_context ctx; cmsis_nn_pool_params pool_params; @@ -83,25 +86,25 @@ void maxpooling_1_arm_max_pool_s8(void) cmsis_nn_dims filter_dims; cmsis_nn_dims output_dims; - const int8_t *input_data = maxpooling_1_input; + const int8_t *input_data = maxpooling_1_input_tensor; - input_dims.n = MAXPOOLING_1_INPUT_BATCHES; + input_dims.n = MAXPOOLING_1_BATCH_SIZE; input_dims.w = MAXPOOLING_1_INPUT_W; input_dims.h = MAXPOOLING_1_INPUT_H; - input_dims.c = MAXPOOLING_1_IN_CH; - filter_dims.w = MAXPOOLING_1_FILTER_X; - filter_dims.h = MAXPOOLING_1_FILTER_Y; + input_dims.c = MAXPOOLING_1_INPUT_C; + filter_dims.w = MAXPOOLING_1_FILTER_W; + filter_dims.h = MAXPOOLING_1_FILTER_H; output_dims.w = MAXPOOLING_1_OUTPUT_W; output_dims.h = MAXPOOLING_1_OUTPUT_H; - output_dims.c = MAXPOOLING_1_OUT_CH; + output_dims.c = MAXPOOLING_1_INPUT_C; - pool_params.padding.w = MAXPOOLING_1_PAD_X; - pool_params.padding.h = MAXPOOLING_1_PAD_Y; - pool_params.stride.w = MAXPOOLING_1_STRIDE_X; - pool_params.stride.h = MAXPOOLING_1_STRIDE_Y; + pool_params.padding.w = MAXPOOLING_1_PADDING_W; + pool_params.padding.h = MAXPOOLING_1_PADDING_H; + pool_params.stride.w = MAXPOOLING_1_STRIDE_W; + pool_params.stride.h = MAXPOOLING_1_STRIDE_H; - pool_params.activation.min = MAXPOOLING_1_OUT_ACTIVATION_MIN; - pool_params.activation.max = MAXPOOLING_1_OUT_ACTIVATION_MAX; + pool_params.activation.min = MAXPOOLING_1_ACTIVATION_MIN; + pool_params.activation.max = MAXPOOLING_1_ACTIVATION_MAX; for (int i = 0; i < REPEAT_NUM; i++) { @@ -109,14 +112,17 @@ void maxpooling_1_arm_max_pool_s8(void) arm_max_pool_s8(&ctx, &pool_params, &input_dims, input_data, &filter_dims, &output_dims, output); TEST_ASSERT_EQUAL(expected, result); - TEST_ASSERT_TRUE(validate(output, maxpooling_1_output_ref, MAXPOOLING_1_DST_SIZE)); + TEST_ASSERT_TRUE( + validate(output, + maxpooling_1_output, + MAXPOOLING_1_OUTPUT_W * MAXPOOLING_1_OUTPUT_H * MAXPOOLING_1_INPUT_C * MAXPOOLING_1_BATCH_SIZE)); } } void maxpooling_2_arm_max_pool_s8(void) { const arm_cmsis_nn_status expected = ARM_CMSIS_NN_SUCCESS; - int8_t output[MAXPOOLING_2_DST_SIZE] = {0}; + int8_t output[MAXPOOLING_2_OUTPUT_W * MAXPOOLING_2_OUTPUT_H * MAXPOOLING_2_INPUT_C * MAXPOOLING_2_BATCH_SIZE] = {0}; cmsis_nn_context ctx; cmsis_nn_pool_params pool_params; @@ -124,25 +130,25 @@ void maxpooling_2_arm_max_pool_s8(void) cmsis_nn_dims filter_dims; cmsis_nn_dims output_dims; - const int8_t *input_data = maxpooling_2_input; + const int8_t *input_data = maxpooling_2_input_tensor; - input_dims.n = MAXPOOLING_2_INPUT_BATCHES; + input_dims.n = MAXPOOLING_2_BATCH_SIZE; input_dims.w = MAXPOOLING_2_INPUT_W; input_dims.h = MAXPOOLING_2_INPUT_H; - input_dims.c = MAXPOOLING_2_IN_CH; - filter_dims.w = MAXPOOLING_2_FILTER_X; - filter_dims.h = MAXPOOLING_2_FILTER_Y; + input_dims.c = MAXPOOLING_2_INPUT_C; + filter_dims.w = MAXPOOLING_2_FILTER_W; + filter_dims.h = MAXPOOLING_2_FILTER_H; output_dims.w = MAXPOOLING_2_OUTPUT_W; output_dims.h = MAXPOOLING_2_OUTPUT_H; - output_dims.c = MAXPOOLING_2_OUT_CH; + output_dims.c = MAXPOOLING_2_INPUT_C; - pool_params.padding.w = MAXPOOLING_2_PAD_X; - pool_params.padding.h = MAXPOOLING_2_PAD_Y; - pool_params.stride.w = MAXPOOLING_2_STRIDE_X; - pool_params.stride.h = MAXPOOLING_2_STRIDE_Y; + pool_params.padding.w = MAXPOOLING_2_PADDING_W; + pool_params.padding.h = MAXPOOLING_2_PADDING_H; + pool_params.stride.w = MAXPOOLING_2_STRIDE_W; + pool_params.stride.h = MAXPOOLING_2_STRIDE_H; - pool_params.activation.min = MAXPOOLING_2_OUT_ACTIVATION_MIN; - pool_params.activation.max = MAXPOOLING_2_OUT_ACTIVATION_MAX; + pool_params.activation.min = MAXPOOLING_2_ACTIVATION_MIN; + pool_params.activation.max = MAXPOOLING_2_ACTIVATION_MAX; for (int i = 0; i < REPEAT_NUM; i++) { @@ -150,14 +156,17 @@ void maxpooling_2_arm_max_pool_s8(void) arm_max_pool_s8(&ctx, &pool_params, &input_dims, input_data, &filter_dims, &output_dims, output); TEST_ASSERT_EQUAL(expected, result); - TEST_ASSERT_TRUE(validate(output, maxpooling_2_output_ref, MAXPOOLING_2_DST_SIZE)); + TEST_ASSERT_TRUE( + validate(output, + maxpooling_2_output, + MAXPOOLING_2_OUTPUT_W * MAXPOOLING_2_OUTPUT_H * MAXPOOLING_2_INPUT_C * MAXPOOLING_2_BATCH_SIZE)); } } void maxpooling_3_arm_max_pool_s8(void) { const arm_cmsis_nn_status expected = ARM_CMSIS_NN_SUCCESS; - int8_t output[MAXPOOLING_3_DST_SIZE] = {0}; + int8_t output[MAXPOOLING_3_OUTPUT_W * MAXPOOLING_3_OUTPUT_H * MAXPOOLING_3_INPUT_C * MAXPOOLING_3_BATCH_SIZE] = {0}; cmsis_nn_context ctx; cmsis_nn_pool_params pool_params; @@ -165,25 +174,25 @@ void maxpooling_3_arm_max_pool_s8(void) cmsis_nn_dims filter_dims; cmsis_nn_dims output_dims; - const int8_t *input_data = maxpooling_3_input; + const int8_t *input_data = maxpooling_3_input_tensor; - input_dims.n = MAXPOOLING_3_INPUT_BATCHES; + input_dims.n = MAXPOOLING_3_BATCH_SIZE; input_dims.w = MAXPOOLING_3_INPUT_W; input_dims.h = MAXPOOLING_3_INPUT_H; - input_dims.c = MAXPOOLING_3_IN_CH; - filter_dims.w = MAXPOOLING_3_FILTER_X; - filter_dims.h = MAXPOOLING_3_FILTER_Y; + input_dims.c = MAXPOOLING_3_INPUT_C; + filter_dims.w = MAXPOOLING_3_FILTER_W; + filter_dims.h = MAXPOOLING_3_FILTER_H; output_dims.w = MAXPOOLING_3_OUTPUT_W; output_dims.h = MAXPOOLING_3_OUTPUT_H; - output_dims.c = MAXPOOLING_3_OUT_CH; + output_dims.c = MAXPOOLING_3_INPUT_C; - pool_params.padding.w = MAXPOOLING_3_PAD_X; - pool_params.padding.h = MAXPOOLING_3_PAD_Y; - pool_params.stride.w = MAXPOOLING_3_STRIDE_X; - pool_params.stride.h = MAXPOOLING_3_STRIDE_Y; + pool_params.padding.w = MAXPOOLING_3_PADDING_W; + pool_params.padding.h = MAXPOOLING_3_PADDING_H; + pool_params.stride.w = MAXPOOLING_3_STRIDE_W; + pool_params.stride.h = MAXPOOLING_3_STRIDE_H; - pool_params.activation.min = MAXPOOLING_3_OUT_ACTIVATION_MIN; - pool_params.activation.max = MAXPOOLING_3_OUT_ACTIVATION_MAX; + pool_params.activation.min = MAXPOOLING_3_ACTIVATION_MIN; + pool_params.activation.max = MAXPOOLING_3_ACTIVATION_MAX; for (int i = 0; i < REPEAT_NUM; i++) { @@ -191,14 +200,17 @@ void maxpooling_3_arm_max_pool_s8(void) arm_max_pool_s8(&ctx, &pool_params, &input_dims, input_data, &filter_dims, &output_dims, output); TEST_ASSERT_EQUAL(expected, result); - TEST_ASSERT_TRUE(validate(output, maxpooling_3_output_ref, MAXPOOLING_3_DST_SIZE)); + TEST_ASSERT_TRUE( + validate(output, + maxpooling_3_output, + MAXPOOLING_3_OUTPUT_W * MAXPOOLING_3_OUTPUT_H * MAXPOOLING_3_INPUT_C * MAXPOOLING_3_BATCH_SIZE)); } } void maxpooling_4_arm_max_pool_s8(void) { const arm_cmsis_nn_status expected = ARM_CMSIS_NN_SUCCESS; - int8_t output[MAXPOOLING_4_DST_SIZE] = {0}; + int8_t output[MAXPOOLING_4_OUTPUT_W * MAXPOOLING_4_OUTPUT_H * MAXPOOLING_4_INPUT_C * MAXPOOLING_4_BATCH_SIZE] = {0}; cmsis_nn_context ctx; cmsis_nn_pool_params pool_params; @@ -206,25 +218,25 @@ void maxpooling_4_arm_max_pool_s8(void) cmsis_nn_dims filter_dims; cmsis_nn_dims output_dims; - const int8_t *input_data = maxpooling_4_input; + const int8_t *input_data = maxpooling_4_input_tensor; - input_dims.n = MAXPOOLING_4_INPUT_BATCHES; + input_dims.n = MAXPOOLING_4_BATCH_SIZE; input_dims.w = MAXPOOLING_4_INPUT_W; input_dims.h = MAXPOOLING_4_INPUT_H; - input_dims.c = MAXPOOLING_4_IN_CH; - filter_dims.w = MAXPOOLING_4_FILTER_X; - filter_dims.h = MAXPOOLING_4_FILTER_Y; + input_dims.c = MAXPOOLING_4_INPUT_C; + filter_dims.w = MAXPOOLING_4_FILTER_W; + filter_dims.h = MAXPOOLING_4_FILTER_H; output_dims.w = MAXPOOLING_4_OUTPUT_W; output_dims.h = MAXPOOLING_4_OUTPUT_H; - output_dims.c = MAXPOOLING_4_OUT_CH; + output_dims.c = MAXPOOLING_4_INPUT_C; - pool_params.padding.w = MAXPOOLING_4_PAD_X; - pool_params.padding.h = MAXPOOLING_4_PAD_Y; - pool_params.stride.w = MAXPOOLING_4_STRIDE_X; - pool_params.stride.h = MAXPOOLING_4_STRIDE_Y; + pool_params.padding.w = MAXPOOLING_4_PADDING_W; + pool_params.padding.h = MAXPOOLING_4_PADDING_H; + pool_params.stride.w = MAXPOOLING_4_STRIDE_W; + pool_params.stride.h = MAXPOOLING_4_STRIDE_H; - pool_params.activation.min = MAXPOOLING_4_OUT_ACTIVATION_MIN; - pool_params.activation.max = MAXPOOLING_4_OUT_ACTIVATION_MAX; + pool_params.activation.min = MAXPOOLING_4_ACTIVATION_MIN; + pool_params.activation.max = MAXPOOLING_4_ACTIVATION_MAX; for (int i = 0; i < REPEAT_NUM; i++) { @@ -232,14 +244,17 @@ void maxpooling_4_arm_max_pool_s8(void) arm_max_pool_s8(&ctx, &pool_params, &input_dims, input_data, &filter_dims, &output_dims, output); TEST_ASSERT_EQUAL(expected, result); - TEST_ASSERT_TRUE(validate(output, maxpooling_4_output_ref, MAXPOOLING_4_DST_SIZE)); + TEST_ASSERT_TRUE( + validate(output, + maxpooling_4_output, + MAXPOOLING_4_OUTPUT_W * MAXPOOLING_4_OUTPUT_H * MAXPOOLING_4_INPUT_C * MAXPOOLING_4_BATCH_SIZE)); } } void maxpooling_5_arm_max_pool_s8(void) { const arm_cmsis_nn_status expected = ARM_CMSIS_NN_SUCCESS; - int8_t output[MAXPOOLING_5_DST_SIZE] = {0}; + int8_t output[MAXPOOLING_5_OUTPUT_W * MAXPOOLING_5_OUTPUT_H * MAXPOOLING_5_INPUT_C * MAXPOOLING_5_BATCH_SIZE] = {0}; cmsis_nn_context ctx; cmsis_nn_pool_params pool_params; @@ -247,25 +262,25 @@ void maxpooling_5_arm_max_pool_s8(void) cmsis_nn_dims filter_dims; cmsis_nn_dims output_dims; - const int8_t *input_data = maxpooling_5_input; + const int8_t *input_data = maxpooling_5_input_tensor; - input_dims.n = MAXPOOLING_5_INPUT_BATCHES; + input_dims.n = MAXPOOLING_5_BATCH_SIZE; input_dims.w = MAXPOOLING_5_INPUT_W; input_dims.h = MAXPOOLING_5_INPUT_H; - input_dims.c = MAXPOOLING_5_IN_CH; - filter_dims.w = MAXPOOLING_5_FILTER_X; - filter_dims.h = MAXPOOLING_5_FILTER_Y; + input_dims.c = MAXPOOLING_5_INPUT_C; + filter_dims.w = MAXPOOLING_5_FILTER_W; + filter_dims.h = MAXPOOLING_5_FILTER_H; output_dims.w = MAXPOOLING_5_OUTPUT_W; output_dims.h = MAXPOOLING_5_OUTPUT_H; - output_dims.c = MAXPOOLING_5_OUT_CH; + output_dims.c = MAXPOOLING_5_INPUT_C; - pool_params.padding.w = MAXPOOLING_5_PAD_X; - pool_params.padding.h = MAXPOOLING_5_PAD_Y; - pool_params.stride.w = MAXPOOLING_5_STRIDE_X; - pool_params.stride.h = MAXPOOLING_5_STRIDE_Y; + pool_params.padding.w = MAXPOOLING_5_PADDING_W; + pool_params.padding.h = MAXPOOLING_5_PADDING_H; + pool_params.stride.w = MAXPOOLING_5_STRIDE_W; + pool_params.stride.h = MAXPOOLING_5_STRIDE_H; - pool_params.activation.min = MAXPOOLING_5_OUT_ACTIVATION_MIN; - pool_params.activation.max = MAXPOOLING_5_OUT_ACTIVATION_MAX; + pool_params.activation.min = MAXPOOLING_5_ACTIVATION_MIN; + pool_params.activation.max = MAXPOOLING_5_ACTIVATION_MAX; for (int i = 0; i < REPEAT_NUM; i++) { @@ -273,14 +288,17 @@ void maxpooling_5_arm_max_pool_s8(void) arm_max_pool_s8(&ctx, &pool_params, &input_dims, input_data, &filter_dims, &output_dims, output); TEST_ASSERT_EQUAL(expected, result); - TEST_ASSERT_TRUE(validate(output, maxpooling_5_output_ref, MAXPOOLING_5_DST_SIZE)); + TEST_ASSERT_TRUE( + validate(output, + maxpooling_5_output, + MAXPOOLING_5_OUTPUT_W * MAXPOOLING_5_OUTPUT_H * MAXPOOLING_6_INPUT_C * MAXPOOLING_5_BATCH_SIZE)); } } void maxpooling_6_arm_max_pool_s8(void) { const arm_cmsis_nn_status expected = ARM_CMSIS_NN_SUCCESS; - int8_t output[MAXPOOLING_6_DST_SIZE] = {0}; + int8_t output[MAXPOOLING_6_OUTPUT_W * MAXPOOLING_6_OUTPUT_H * MAXPOOLING_6_INPUT_C * MAXPOOLING_6_BATCH_SIZE] = {0}; cmsis_nn_context ctx; cmsis_nn_pool_params pool_params; @@ -288,25 +306,25 @@ void maxpooling_6_arm_max_pool_s8(void) cmsis_nn_dims filter_dims; cmsis_nn_dims output_dims; - const int8_t *input_data = maxpooling_6_input; + const int8_t *input_data = maxpooling_6_input_tensor; - input_dims.n = MAXPOOLING_6_INPUT_BATCHES; + input_dims.n = MAXPOOLING_6_BATCH_SIZE; input_dims.w = MAXPOOLING_6_INPUT_W; input_dims.h = MAXPOOLING_6_INPUT_H; - input_dims.c = MAXPOOLING_6_IN_CH; - filter_dims.w = MAXPOOLING_6_FILTER_X; - filter_dims.h = MAXPOOLING_6_FILTER_Y; + input_dims.c = MAXPOOLING_6_INPUT_C; + filter_dims.w = MAXPOOLING_6_FILTER_W; + filter_dims.h = MAXPOOLING_6_FILTER_H; output_dims.w = MAXPOOLING_6_OUTPUT_W; output_dims.h = MAXPOOLING_6_OUTPUT_H; - output_dims.c = MAXPOOLING_6_OUT_CH; + output_dims.c = MAXPOOLING_6_INPUT_C; - pool_params.padding.w = MAXPOOLING_6_PAD_X; - pool_params.padding.h = MAXPOOLING_6_PAD_Y; - pool_params.stride.w = MAXPOOLING_6_STRIDE_X; - pool_params.stride.h = MAXPOOLING_6_STRIDE_Y; + pool_params.padding.w = MAXPOOLING_6_PADDING_W; + pool_params.padding.h = MAXPOOLING_6_PADDING_H; + pool_params.stride.w = MAXPOOLING_6_STRIDE_W; + pool_params.stride.h = MAXPOOLING_6_STRIDE_H; - pool_params.activation.min = MAXPOOLING_6_OUT_ACTIVATION_MIN; - pool_params.activation.max = MAXPOOLING_6_OUT_ACTIVATION_MAX; + pool_params.activation.min = MAXPOOLING_6_ACTIVATION_MIN; + pool_params.activation.max = MAXPOOLING_6_ACTIVATION_MAX; for (int i = 0; i < REPEAT_NUM; i++) { @@ -314,14 +332,17 @@ void maxpooling_6_arm_max_pool_s8(void) arm_max_pool_s8(&ctx, &pool_params, &input_dims, input_data, &filter_dims, &output_dims, output); TEST_ASSERT_EQUAL(expected, result); - TEST_ASSERT_TRUE(validate(output, maxpooling_6_output_ref, MAXPOOLING_6_DST_SIZE)); + TEST_ASSERT_TRUE( + validate(output, + maxpooling_6_output, + MAXPOOLING_6_OUTPUT_W * MAXPOOLING_6_OUTPUT_H * MAXPOOLING_6_INPUT_C * MAXPOOLING_6_BATCH_SIZE)); } } void maxpooling_7_arm_max_pool_s8(void) { const arm_cmsis_nn_status expected = ARM_CMSIS_NN_SUCCESS; - int8_t output[MAXPOOLING_7_DST_SIZE] = {0}; + int8_t output[MAXPOOLING_7_OUTPUT_W * MAXPOOLING_7_OUTPUT_H * MAXPOOLING_7_INPUT_C * MAXPOOLING_7_BATCH_SIZE] = {0}; cmsis_nn_context ctx; cmsis_nn_pool_params pool_params; @@ -329,25 +350,25 @@ void maxpooling_7_arm_max_pool_s8(void) cmsis_nn_dims filter_dims; cmsis_nn_dims output_dims; - const int8_t *input_data = maxpooling_7_input; + const int8_t *input_data = maxpooling_7_input_tensor; - input_dims.n = MAXPOOLING_7_INPUT_BATCHES; + input_dims.n = MAXPOOLING_7_BATCH_SIZE; input_dims.w = MAXPOOLING_7_INPUT_W; input_dims.h = MAXPOOLING_7_INPUT_H; - input_dims.c = MAXPOOLING_7_IN_CH; - filter_dims.w = MAXPOOLING_7_FILTER_X; - filter_dims.h = MAXPOOLING_7_FILTER_Y; + input_dims.c = MAXPOOLING_7_INPUT_C; + filter_dims.w = MAXPOOLING_7_FILTER_W; + filter_dims.h = MAXPOOLING_7_FILTER_H; output_dims.w = MAXPOOLING_7_OUTPUT_W; output_dims.h = MAXPOOLING_7_OUTPUT_H; - output_dims.c = MAXPOOLING_7_OUT_CH; + output_dims.c = MAXPOOLING_7_INPUT_C; - pool_params.padding.w = MAXPOOLING_7_PAD_X; - pool_params.padding.h = MAXPOOLING_7_PAD_Y; - pool_params.stride.w = MAXPOOLING_7_STRIDE_X; - pool_params.stride.h = MAXPOOLING_7_STRIDE_Y; + pool_params.padding.w = MAXPOOLING_7_PADDING_W; + pool_params.padding.h = MAXPOOLING_7_PADDING_H; + pool_params.stride.w = MAXPOOLING_7_STRIDE_W; + pool_params.stride.h = MAXPOOLING_7_STRIDE_H; - pool_params.activation.min = MAXPOOLING_7_OUT_ACTIVATION_MIN; - pool_params.activation.max = MAXPOOLING_7_OUT_ACTIVATION_MAX; + pool_params.activation.min = MAXPOOLING_7_ACTIVATION_MIN; + pool_params.activation.max = MAXPOOLING_7_ACTIVATION_MAX; for (int i = 0; i < REPEAT_NUM; i++) { @@ -355,14 +376,17 @@ void maxpooling_7_arm_max_pool_s8(void) arm_max_pool_s8(&ctx, &pool_params, &input_dims, input_data, &filter_dims, &output_dims, output); TEST_ASSERT_EQUAL(expected, result); - TEST_ASSERT_TRUE(validate(output, maxpooling_7_output_ref, MAXPOOLING_7_DST_SIZE)); + TEST_ASSERT_TRUE( + validate(output, + maxpooling_7_output, + MAXPOOLING_7_OUTPUT_W * MAXPOOLING_7_OUTPUT_H * MAXPOOLING_7_INPUT_C * MAXPOOLING_7_BATCH_SIZE)); } } void maxpooling_param_fail_arm_max_pool_s8(void) { const arm_cmsis_nn_status expected = ARM_CMSIS_NN_ARG_ERROR; - int8_t output[MAXPOOLING_7_DST_SIZE] = {0}; + int8_t output[MAXPOOLING_7_OUTPUT_W * MAXPOOLING_7_OUTPUT_H * MAXPOOLING_7_INPUT_C * MAXPOOLING_7_BATCH_SIZE] = {0}; cmsis_nn_context ctx = {}; cmsis_nn_pool_params pool_params; @@ -370,25 +394,25 @@ void maxpooling_param_fail_arm_max_pool_s8(void) cmsis_nn_dims filter_dims; cmsis_nn_dims output_dims; - const int8_t *input_data = maxpooling_7_input; + const int8_t *input_data = maxpooling_7_input_tensor; input_dims.n = -3; input_dims.w = MAXPOOLING_7_INPUT_W; input_dims.h = MAXPOOLING_7_INPUT_H; - input_dims.c = MAXPOOLING_7_IN_CH; - filter_dims.w = MAXPOOLING_7_FILTER_X; - filter_dims.h = MAXPOOLING_7_FILTER_Y; + input_dims.c = MAXPOOLING_7_INPUT_C; + filter_dims.w = MAXPOOLING_7_FILTER_W; + filter_dims.h = MAXPOOLING_7_FILTER_H; output_dims.w = MAXPOOLING_7_OUTPUT_W; output_dims.h = MAXPOOLING_7_OUTPUT_H; - output_dims.c = MAXPOOLING_7_OUT_CH; + output_dims.c = MAXPOOLING_7_INPUT_C; - pool_params.padding.w = MAXPOOLING_7_PAD_X; - pool_params.padding.h = MAXPOOLING_7_PAD_Y; - pool_params.stride.w = MAXPOOLING_7_STRIDE_X; - pool_params.stride.h = MAXPOOLING_7_STRIDE_Y; + pool_params.padding.w = MAXPOOLING_7_PADDING_W; + pool_params.padding.h = MAXPOOLING_7_PADDING_H; + pool_params.stride.w = MAXPOOLING_7_STRIDE_W; + pool_params.stride.h = MAXPOOLING_7_STRIDE_H; - pool_params.activation.min = MAXPOOLING_7_OUT_ACTIVATION_MIN; - pool_params.activation.max = MAXPOOLING_7_OUT_ACTIVATION_MAX; + pool_params.activation.min = MAXPOOLING_7_ACTIVATION_MIN; + pool_params.activation.max = MAXPOOLING_7_ACTIVATION_MAX; arm_cmsis_nn_status result = arm_max_pool_s8(&ctx, &pool_params, &input_dims, input_data, &filter_dims, &output_dims, output);