From d32e7ae0f75b6bcd83a6bb92a715cf03bfdeb324 Mon Sep 17 00:00:00 2001 From: liujinnan <1823192871@qq.com> Date: Wed, 1 Feb 2023 03:17:02 +0000 Subject: [PATCH 1/2] Submit add_n categorical test script and json --- api/tests/add_n.py | 18 ++---- api/tests/categorical.py | 47 ++++++++++++++ api/tests_v2/configs/add_n.json | 91 +++++++++++++++++++++++++++ api/tests_v2/configs/categorical.json | 10 +++ 4 files changed, 154 insertions(+), 12 deletions(-) create mode 100644 api/tests/categorical.py create mode 100644 api/tests_v2/configs/categorical.json diff --git a/api/tests/add_n.py b/api/tests/add_n.py index 536c66467e..e79f91265f 100644 --- a/api/tests/add_n.py +++ b/api/tests/add_n.py @@ -34,22 +34,16 @@ def build_graph(self, config): @benchmark_registry.register("add_n") class TorchAddN(PytorchOpBenchmarkBase): def build_graph(self, config): - input_list = [] - input_0 = self.variable( - name='input_' + str(0), - shape=config.inputs_shape[0], - dtype=config.inputs_dtype[0]) - result = input_0 - input_list.append(input_0) - for i in range(1, len(config.inputs_shape)): + inputs = [] + for i in range(len(config.inputs_shape)): input_i = self.variable( name='input_' + str(i), shape=config.inputs_shape[i], dtype=config.inputs_dtype[i]) - result = torch.add(result, input_i) - input_list.append(input_i) - - self.feed_list = input_list + inputs.append(input_i) + inputs = torch.stack(inputs, dim=0) + result = torch.sum(inputs, axis=0) + self.feed_list = inputs self.fetch_list = [result] diff --git a/api/tests/categorical.py b/api/tests/categorical.py new file mode 100644 index 0000000000..0ddf00c2f5 --- /dev/null +++ b/api/tests/categorical.py @@ -0,0 +1,47 @@ +from common_import import * + + +@benchmark_registry.register("categorical") +class CategoricalConfig(APIConfig): + def __init__(self): + super(CategoricalConfig, self).__init__("categorical") + self.feed_spec = {"range": [-5, -0.1]} + + +@benchmark_registry.register("categorical") +class PaddleCategorical(PaddleOpBenchmarkBase): + def build_graph(self, config): + logits = self.variable( + name="logits", + shape=config.logits_shape, + dtype=config.logits_dtype) + result = paddle.distribution.Categorical(logits) + counts = result.sample([100]) + self.feed_list = [logits] + self.fetch_list = [counts] + + +@benchmark_registry.register("categorical") +class TorchCategorical(PytorchOpBenchmarkBase): + def build_graph(self, config): + logits = self.variable( + name="logits", + shape=config.logits_shape, + dtype=config.logits_dtype) + result = torch.distributions.categorical.Categorical( + logits=torch.tensor(logits)) + counts = result.sample([100]) + self.feed_list = [logits] + self.fetch_list = [counts] + + +@benchmark_registry.register("categorical") +class TFCategoricall(TensorflowOpBenchmarkBase): + def build_graph(self, config): + logits = self.variable( + name='logits', + shape=config.logits_shape, + dtype=config.logits_dtype) + counts = tf.random.categorical(logits, 100) + self.feed_list = [logits] + self.fetch_list = [counts] diff --git a/api/tests_v2/configs/add_n.json b/api/tests_v2/configs/add_n.json index c599994a99..097afd6b0e 100644 --- a/api/tests_v2/configs/add_n.json +++ b/api/tests_v2/configs/add_n.json @@ -89,4 +89,95 @@ "type": "list" } } +}, { + "op": "add_n", + "param_info": { + "inputs": { + "inputs0": { + "dtype": "float16", + "shape": "[1L]", + "type": "Variable" + }, + "inputs1": { + "dtype": "float16", + "shape": "[1L]", + "type": "Variable" + }, + "type": "list" + } + } +}, { + "op": "add_n", + "param_info": { + "inputs": { + "inputs0": { + "dtype": "float16", + "shape": "[1L]", + "type": "Variable" + }, + "inputs1": { + "dtype": "float16", + "shape": "[1L]", + "type": "Variable" + }, + "inputs2": { + "dtype": "float16", + "shape": "[1L]", + "type": "Variable" + }, + "inputs3": { + "dtype": "float16", + "shape": "[1L]", + "type": "Variable" + }, + "inputs4": { + "dtype": "float16", + "shape": "[1L]", + "type": "Variable" + }, + "inputs5": { + "dtype": "float16", + "shape": "[1L]", + "type": "Variable" + }, + "inputs6": { + "dtype": "float16", + "shape": "[1L]", + "type": "Variable" + }, + "inputs7": { + "dtype": "float16", + "shape": "[1L]", + "type": "Variable" + }, + "type": "list" + } + } +}, { + "op": "add_n", + "param_info": { + "inputs": { + "inputs0": { + "dtype": "float16", + "shape": "[-1L, 256L]", + "type": "Variable" + }, + "inputs1": { + "dtype": "float16", + "shape": "[-1L, 256L]", + "type": "Variable" + }, + "inputs2": { + "dtype": "float16", + "shape": "[-1L, 256L]", + "type": "Variable" + }, + "inputs3": { + "dtype": "float16", + "shape": "[-1L, 256L]", + "type": "Variable" + }, + "type": "list" + } + } }] diff --git a/api/tests_v2/configs/categorical.json b/api/tests_v2/configs/categorical.json new file mode 100644 index 0000000000..8b8d4d905c --- /dev/null +++ b/api/tests_v2/configs/categorical.json @@ -0,0 +1,10 @@ +[{ + "op": "categorical", + "param_info": { + "logits": { + "dtype": "float32", + "shape": "[524288L, 23L]", + "type": "Variable" + } + } +}] \ No newline at end of file From 4df16eeceb2beb2809577fa1c1728db9dd2f0bad Mon Sep 17 00:00:00 2001 From: liujinnan <1823192871@qq.com> Date: Wed, 1 Feb 2023 10:56:16 +0000 Subject: [PATCH 2/2] change test case in add_n.json --- api/tests_v2/configs/add_n.json | 41 +++------------------------------ 1 file changed, 3 insertions(+), 38 deletions(-) diff --git a/api/tests_v2/configs/add_n.json b/api/tests_v2/configs/add_n.json index 097afd6b0e..36de4f2e97 100644 --- a/api/tests_v2/configs/add_n.json +++ b/api/tests_v2/configs/add_n.json @@ -95,12 +95,12 @@ "inputs": { "inputs0": { "dtype": "float16", - "shape": "[1L]", + "shape": "[-1L, 256L]", "type": "Variable" }, "inputs1": { "dtype": "float16", - "shape": "[1L]", + "shape": "[-1L, 256L]", "type": "Variable" }, "type": "list" @@ -112,42 +112,7 @@ "inputs": { "inputs0": { "dtype": "float16", - "shape": "[1L]", - "type": "Variable" - }, - "inputs1": { - "dtype": "float16", - "shape": "[1L]", - "type": "Variable" - }, - "inputs2": { - "dtype": "float16", - "shape": "[1L]", - "type": "Variable" - }, - "inputs3": { - "dtype": "float16", - "shape": "[1L]", - "type": "Variable" - }, - "inputs4": { - "dtype": "float16", - "shape": "[1L]", - "type": "Variable" - }, - "inputs5": { - "dtype": "float16", - "shape": "[1L]", - "type": "Variable" - }, - "inputs6": { - "dtype": "float16", - "shape": "[1L]", - "type": "Variable" - }, - "inputs7": { - "dtype": "float16", - "shape": "[1L]", + "shape": "[-1L, 256L]", "type": "Variable" }, "type": "list"