From 81d1251577f864810c6458d20d748d71a422511e Mon Sep 17 00:00:00 2001 From: gichan2-jang Date: Thu, 25 May 2023 15:43:31 +0900 Subject: [PATCH] [Service] Register pipeline description - Register pipeline description - Add unittest to register pipeline Signed-off-by: gichan2-jang --- c/include/ml-api-service.h | 20 +- c/src/meson.build | 15 +- c/src/ml-api-remote-service.c | 400 ++++++++++++++++++ c/src/ml-api-service-common.c | 4 + c/src/ml-api-service-private.h | 23 +- meson.build | 1 + packaging/machine-learning-api.spec | 6 + tests/capi/meson.build | 12 + tests/capi/unittest_capi_remote_service.cc | 304 +++++++++++++ .../unittest_capi_service_agent_client.cc | 2 +- 10 files changed, 779 insertions(+), 8 deletions(-) create mode 100644 c/src/ml-api-remote-service.c create mode 100644 tests/capi/unittest_capi_remote_service.cc diff --git a/c/include/ml-api-service.h b/c/include/ml-api-service.h index 14e71160..f6ae3889 100644 --- a/c/include/ml-api-service.h +++ b/c/include/ml-api-service.h @@ -224,6 +224,22 @@ int ml_service_query_create (ml_option_h option, ml_service_h *handle); */ int ml_service_query_request (ml_service_h handle, const ml_tensors_data_h input, ml_tensors_data_h *output); +/** + * @brief Creates ml remote service handle with given ml-option handle. + * @details The caller should set one of "remote_sender" and "remote_receiver" as a service type in @a ml_option. + * @remarks The @a handle should be destroyed using ml_service_destroy(). + * @param[in] option The option used for creating query service. + * @param[out] handle Newly created query service handle is returned. + * @return @c 0 on Success. Otherwise a negative error value. + * @retval #ML_ERROR_NONE Successful. + * @retval #ML_ERROR_NOT_SUPPORTED Not supported. + * @retval #ML_ERROR_INVALID_PARAMETER Fail. The parameter is invalid. + * @retval #ML_ERROR_OUT_OF_MEMORY Failed to allocate required memory. + * @retval #ML_ERROR_STREAMS_PIPE Failed to launch the pipeline. + * @retval #ML_ERROR_TRY_AGAIN The pipeline is not ready yet. + */ +int ml_remote_service_create (ml_option_h option, ml_service_h *handle); + /** * @todo DRAFT. API name should be determined later. * @brief Register new information, such as neural network models or pipeline descriptions, on a remote server. @@ -280,7 +296,7 @@ int ml_service_query_request (ml_service_h handle, const ml_tensors_data_h input * ml_option_set (client_option_h, "dest_host", dest_host, g_free); * * // Create query service. - * ml_service_query_create (client_option_h, &client_h); + * ml_remote_service_create (client_option_h, &client_h); * * ml_option_h query_option_h = NULL; * ml_option_create (&query_option_h); @@ -302,7 +318,7 @@ int ml_service_query_request (ml_service_h handle, const ml_tensors_data_h input * * @endcode */ -int ml_service_query_register (ml_service_h handle, ml_option_h option, void *data); +int ml_remote_service_register (ml_service_h handle, ml_option_h option, void *data, size_t data_len); /** * @brief Registers new information of a neural network model. diff --git a/c/src/meson.build b/c/src/meson.build index c2f29679..d53759d2 100644 --- a/c/src/meson.build +++ b/c/src/meson.build @@ -12,6 +12,9 @@ endif nns_capi_single_srcs = files('ml-api-inference-single.c') nns_capi_pipeline_srcs = files('ml-api-inference-pipeline.c') nns_capi_service_srcs = files('ml-api-service-common.c','ml-api-service-agent-client.c', 'ml-api-service-query-client.c') +if nnstreamer_edge_dep.found() + nns_capi_service_srcs += files('ml-api-remote-service.c') +endif # Build ML-API Common Lib First. nns_capi_common_shared_lib = shared_library ('capi-ml-common', @@ -97,12 +100,16 @@ nns_capi_dep = declare_dependency(link_with: nns_capi_lib, include_directories: nns_capi_include ) - # Service API if get_option('enable-ml-service') + ml_service_deps = [nns_capi_dep, ml_agentd_deps] + if nnstreamer_edge_dep.found() + ml_service_deps += nnstreamer_edge_dep + endif + nns_capi_service_shared_lib = shared_library ('capi-ml-service', nns_capi_service_srcs, - dependencies: [nns_capi_dep, ml_agentd_deps], + dependencies: ml_service_deps, include_directories: [nns_capi_include, ml_agentd_incs], install: true, install_dir: api_install_libdir, @@ -112,7 +119,7 @@ if get_option('enable-ml-service') nns_capi_service_static_lib = static_library ('capi-ml-service', nns_capi_service_srcs, - dependencies: [nns_capi_dep, ml_agentd_deps], + dependencies: ml_service_deps, include_directories: [nns_capi_include, ml_agentd_incs], install: true, link_with: ml_agentd_lib, @@ -125,7 +132,7 @@ if get_option('enable-ml-service') endif nns_capi_service_dep = declare_dependency(link_with: nns_capi_service_lib, - dependencies: [nns_capi_dep, ml_agentd_deps], + dependencies: ml_service_deps, include_directories: nns_capi_include ) endif diff --git a/c/src/ml-api-remote-service.c b/c/src/ml-api-remote-service.c new file mode 100644 index 00000000..c81c31d5 --- /dev/null +++ b/c/src/ml-api-remote-service.c @@ -0,0 +1,400 @@ +/* SPDX-License-Identifier: Apache-2.0 */ +/** + * Copyright (c) 2023 Samsung Electronics Co., Ltd. All Rights Reserved. + * + * @file ml-api-remote-service.c + * @date 26 Jun 2023 + * @brief ml-remote-service of NNStreamer/Service C-API + * @see https://github.com/nnstreamer/nnstreamer + * @author Gichan Jang + * @bug No known bugs except for NYI items + */ + +#include +#include +#include +#include +#include + +#include "ml-api-internal.h" +#include "ml-api-service.h" +#include "ml-api-service-private.h" + +/** + * @brief Data struct for options. + */ +typedef struct +{ + gchar *host; + guint port; + gchar *topic; + gchar *dest_host; + guint dest_port; + nns_edge_connect_type_e conn_type; + nns_edge_node_type_e node_type; +} edge_info_s; + +/** + * @brief Get ml-service node type from ml_option. + */ +static nns_edge_node_type_e +_mlrs_get_node_type (const gchar * value) +{ + nns_edge_node_type_e node_type; + + if (g_ascii_strcasecmp (value, "remote_sender") == 0) { + node_type = NNS_EDGE_NODE_TYPE_PUB; + } else if (g_ascii_strcasecmp (value, "remote_receiver") == 0) { + node_type = NNS_EDGE_NODE_TYPE_SUB; + } else { + _ml_error_report ("Invalid node type: %s, Please check ml_option.", value); + node_type = NNS_EDGE_NODE_TYPE_UNKNOWN; + } + return node_type; +} + +/** + * @brief Get nnstreamer-edge connection type + */ +static nns_edge_connect_type_e +_mlrs_get_conn_type (const gchar * value) +{ + nns_edge_connect_type_e conn_type; + + if (0 == g_ascii_strcasecmp (value, "TCP")) + conn_type = NNS_EDGE_CONNECT_TYPE_TCP; + else if (0 == g_ascii_strcasecmp (value, "HYBRID")) + conn_type = NNS_EDGE_CONNECT_TYPE_HYBRID; + else if (0 == g_ascii_strcasecmp (value, "MQTT")) + conn_type = NNS_EDGE_CONNECT_TYPE_MQTT; + else if (0 == g_ascii_strcasecmp (value, "AITT")) + conn_type = NNS_EDGE_CONNECT_TYPE_AITT; + else + conn_type = NNS_EDGE_CONNECT_TYPE_UNKNOWN; + + return conn_type; +} + +/** + * @brief Get edge info from ml_option. + */ +static void +_mlrs_get_edge_info (ml_option_h option, edge_info_s * edge_info) +{ + ml_option_s *_option = (ml_option_s *) option; + GHashTableIter iter; + gchar *key; + ml_option_value_s *_option_value; + + g_hash_table_iter_init (&iter, _option->option_table); + + while (g_hash_table_iter_next (&iter, (gpointer *) & key, + (gpointer *) & _option_value)) { + if (0 == g_ascii_strcasecmp (key, "host")) { + edge_info->host = g_strdup (_option_value->value); + } else if (0 == g_ascii_strcasecmp (key, "port")) { + edge_info->port = *((guint *) _option_value->value); + } else if (0 == g_ascii_strcasecmp (key, "dest-host")) { + edge_info->dest_host = g_strdup (_option_value->value); + } else if (0 == g_ascii_strcasecmp (key, "dest-port")) { + edge_info->dest_port = *((guint *) _option_value->value); + } else if (0 == g_ascii_strcasecmp (key, "connect-type")) { + edge_info->conn_type = _mlrs_get_conn_type (_option_value->value); + } else if (0 == g_ascii_strcasecmp (key, "topic")) { + edge_info->topic = g_strdup (_option_value->value); + } else if (0 == g_ascii_strcasecmp (key, "node-type")) { + edge_info->node_type = _mlrs_get_node_type (_option_value->value); + } else { + _ml_logd ("Ignore unknown key for edge info: %s", key); + } + } +} + +/** + * @brief Set nns-edge info. + */ +static void +_mlrs_set_edge_info (edge_info_s * edge_info, nns_edge_h edge_h) +{ + char port[6]; + + nns_edge_set_info (edge_h, "HOST", edge_info->host); + sprintf (port, "%u", edge_info->port); + nns_edge_set_info (edge_h, "PORT", port); + + if (edge_info->topic) + nns_edge_set_info (edge_h, "TOPIC", edge_info->topic); + + nns_edge_set_info (edge_h, "DEST_HOST", edge_info->dest_host); + sprintf (port, "%u", edge_info->dest_port); + nns_edge_set_info (edge_h, "DEST_PORT", port); +} + +/** + * @brief Release edge info. + */ +static void +_mlrs_release_edge_info (edge_info_s * edge_info) +{ + g_free (edge_info->dest_host); + g_free (edge_info->host); + g_free (edge_info->topic); +} + +/** + * @brief Get ml remote service type from ml_option. + */ +static ml_remote_service_type_e +_mlrs_get_service_type (gchar * service_str) +{ + ml_remote_service_type_e service_type = ML_REMOTE_SERVICE_TYPE_UNKNOWN; + + if (g_ascii_strcasecmp (service_str, "model_raw") == 0) { + service_type = ML_REMOTE_SERVICE_TYPE_MODEL_RAW; + } else if (g_ascii_strcasecmp (service_str, "model_url") == 0) { + service_type = ML_REMOTE_SERVICE_TYPE_MODEL_URL; + } else if (g_ascii_strcasecmp (service_str, "pipeline_raw") == 0) { + service_type = ML_REMOTE_SERVICE_TYPE_PIPELINE_RAW; + } else if (g_ascii_strcasecmp (service_str, "pipeline_url") == 0) { + service_type = ML_REMOTE_SERVICE_TYPE_PIPELINE_URL; + } else { + _ml_error_report ("Invalid service type: %s, Please check service type.", + service_str); + service_type = ML_REMOTE_SERVICE_TYPE_UNKNOWN; + } + return service_type; +} + +/** + * @brief Process ml remote service + */ +static void +_mlrs_process_remote_service (nns_edge_data_h data_h) +{ + void *data; + nns_size_t data_len; + gchar *service_str = NULL; + gchar *service_key = NULL; + ml_remote_service_type_e service_type; + + nns_edge_data_get (data_h, 0, &data, &data_len); + + nns_edge_data_get_info (data_h, "service-type", &service_str); + service_type = _mlrs_get_service_type (service_str); + nns_edge_data_get_info (data_h, "service-key", &service_key); + + switch (service_type) { + case ML_REMOTE_SERVICE_TYPE_MODEL_URL: + /** @todo Download the model file from given URL */ + case ML_REMOTE_SERVICE_TYPE_MODEL_RAW: + /** @todo Save model file to given path and register the model */ + break; + case ML_REMOTE_SERVICE_TYPE_PIPELINE_URL: + /** @todo Download the pipeline description from given URL */ + case ML_REMOTE_SERVICE_TYPE_PIPELINE_RAW: + ml_service_set_pipeline (service_key, (gchar *) data); + break; + default: + _ml_error_report + ("Unknown service type or not supported yet. Service num: %d", + service_type); + break; + } +} + +/** + * @brief Edge event callback. + */ +static int +_mlrs_edge_event_cb (nns_edge_event_h event_h, void *user_data) +{ + nns_edge_event_e event = NNS_EDGE_EVENT_UNKNOWN; + nns_edge_data_h data_h = NULL; + + int ret; + + ret = nns_edge_event_get_type (event_h, &event); + if (NNS_EDGE_ERROR_NONE != ret) + return ret; + + switch (event) { + case NNS_EDGE_EVENT_NEW_DATA_RECEIVED:{ + nns_edge_event_parse_new_data (event_h, &data_h); + _mlrs_process_remote_service (data_h); + break; + } + default: + break; + } + + if (data_h) + nns_edge_data_destroy (data_h); + + return NNS_EDGE_ERROR_NONE; +} + +/** + * @brief Create edge handle. + */ +static int +_mlrs_create_edge_handle (nns_edge_h * edge_h, edge_info_s * edge_info) +{ + int ret = 0; + ret = nns_edge_create_handle (edge_info->topic, edge_info->conn_type, + edge_info->node_type, edge_h); + + if (NNS_EDGE_ERROR_NONE != ret) { + _ml_error_report ("nns_edge_create_handle failed."); + return ret; + } + + ret = nns_edge_set_event_callback (*edge_h, _mlrs_edge_event_cb, NULL); + if (NNS_EDGE_ERROR_NONE != ret) { + _ml_error_report ("nns_edge_set_event_callback failed."); + nns_edge_release_handle (*edge_h); + return ret; + } + + _mlrs_set_edge_info (edge_info, *edge_h); + + ret = nns_edge_start (*edge_h); + if (NNS_EDGE_ERROR_NONE != ret) { + _ml_error_report ("nns_edge_start failed."); + nns_edge_release_handle (*edge_h); + return ret; + } + + if (edge_info->node_type == NNS_EDGE_NODE_TYPE_SUB) { + ret = nns_edge_connect (*edge_h, edge_info->dest_host, + edge_info->dest_port); + if (NNS_EDGE_ERROR_NONE != ret) { + _ml_error_report ("nns_edge_connect failed."); + nns_edge_release_handle (*edge_h); + } + } + + return ret; +} + +/** + * @brief Creates ml-service handle with given ml-option handle. + */ +int +ml_remote_service_create (ml_option_h option, ml_service_h * handle) +{ + ml_service_s *mls; + _ml_remote_service_s *remote_s; + nns_edge_h edge_h = NULL; + edge_info_s *edge_info = NULL; + int ret = ML_ERROR_NONE; + + check_feature_state (ML_FEATURE_SERVICE); + check_feature_state (ML_FEATURE_INFERENCE); + + if (!option) { + _ml_error_report_return (ML_ERROR_INVALID_PARAMETER, + "The parameter, 'option' is NULL. It should be a valid ml_option_h, which should be created by ml_option_create()."); + } + + if (!handle) { + _ml_error_report_return (ML_ERROR_INVALID_PARAMETER, + "The parameter, 'handle' (ml_service_h), is NULL. It should be a valid ml_service_h."); + } + + edge_info = g_new0 (edge_info_s, 1); + edge_info->topic = NULL; + edge_info->host = NULL; + edge_info->port = 0; + edge_info->dest_host = NULL; + edge_info->dest_port = 0; + edge_info->conn_type = NNS_EDGE_CONNECT_TYPE_UNKNOWN; + + _mlrs_get_edge_info (option, edge_info); + + ret = _mlrs_create_edge_handle (&edge_h, edge_info); + if (ML_ERROR_NONE != ret) { + g_free (edge_info); + return ret; + } + + remote_s = g_new0 (_ml_remote_service_s, 1); + remote_s->edge_h = edge_h; + remote_s->node_type = edge_info->node_type; + + mls = g_new0 (ml_service_s, 1); + mls->type = ML_SERVICE_TYPE_REMOTE; + mls->priv = remote_s; + + *handle = mls; + + _mlrs_release_edge_info (edge_info); + g_free (edge_info); + + return ret; +} + +/** + * @brief Register new information, such as neural network models or pipeline descriptions, on a remote server. +*/ +int +ml_remote_service_register (ml_service_h handle, ml_option_h option, void *data, + size_t data_len) +{ + ml_service_s *mls = (ml_service_s *) handle; + _ml_remote_service_s *remote_s = NULL; + gchar *service_key = NULL; + nns_edge_data_h data_h = NULL; + int ret = NNS_EDGE_ERROR_NONE; + gchar *service_str = NULL; + + check_feature_state (ML_FEATURE_SERVICE); + check_feature_state (ML_FEATURE_INFERENCE); + + if (!handle) { + _ml_error_report_return (ML_ERROR_INVALID_PARAMETER, + "The parameter, 'handle' is NULL. It should be a valid ml_service_h."); + } + + if (!option) { + _ml_error_report_return (ML_ERROR_INVALID_PARAMETER, + "The parameter, 'option' is NULL. It should be a valid ml_option_h, which should be created by ml_option_create()."); + } + + if (!data) { + _ml_error_report_return (ML_ERROR_INVALID_PARAMETER, + "The parameter, 'data' is NULL. It should be a valid pointer."); + } + + if (data_len <= 0) { + _ml_error_report_return (ML_ERROR_INVALID_PARAMETER, + "The parameter, 'data_len' should be greater than 0."); + } + + remote_s = (_ml_remote_service_s *) mls->priv; + + ret = nns_edge_data_create (&data_h); + if (NNS_EDGE_ERROR_NONE != ret) { + _ml_error_report ("Failed to create an edge data."); + return ret; + } + + ml_option_get (option, "service-type", (void **) &service_str); + nns_edge_data_set_info (data_h, "service-type", service_str); + ml_option_get (option, "service-key", (void **) &service_key); + nns_edge_data_set_info (data_h, "service-key", service_key); + + ret = nns_edge_data_add (data_h, data, data_len, NULL); + if (NNS_EDGE_ERROR_NONE != ret) { + _ml_error_report ("Failed to add camera data to the edge data.\n"); + nns_edge_data_destroy (data_h); + } + + ret = nns_edge_send (remote_s->edge_h, data_h); + if (NNS_EDGE_ERROR_NONE != ret) { + _ml_error_report + ("Failed to publish the data to register the remote service."); + nns_edge_data_destroy (data_h); + } + + return ret; +} diff --git a/c/src/ml-api-service-common.c b/c/src/ml-api-service-common.c index e696ff35..71dcabd0 100644 --- a/c/src/ml-api-service-common.c +++ b/c/src/ml-api-service-common.c @@ -66,6 +66,10 @@ ml_service_destroy (ml_service_h h) g_async_queue_unref (query->out_data_queue); g_free (query); + } else if (ML_SERVICE_TYPE_REMOTE == mls->type) { + _ml_remote_service_s *mlrs = (_ml_remote_service_s *) mls->priv; + nns_edge_release_handle (mlrs->edge_h); + g_free (mlrs); } else { _ml_error_report_return (ML_ERROR_INVALID_PARAMETER, "Invalid type of ml_service_h."); diff --git a/c/src/ml-api-service-private.h b/c/src/ml-api-service-private.h index 5ff4b0dc..fa8b8fd3 100644 --- a/c/src/ml-api-service-private.h +++ b/c/src/ml-api-service-private.h @@ -19,6 +19,7 @@ #include "pipeline-dbus.h" #include "model-dbus.h" +#include "nnstreamer-edge.h" #ifdef __cplusplus extern "C" { @@ -28,12 +29,23 @@ typedef enum { ML_SERVICE_TYPE_UNKNOWN = 0, ML_SERVICE_TYPE_SERVER_PIPELINE, ML_SERVICE_TYPE_CLIENT_QUERY, + ML_SERVICE_TYPE_REMOTE, ML_SERVICE_TYPE_MAX } ml_service_type_e; +typedef enum { + ML_REMOTE_SERVICE_TYPE_UNKNOWN = 0, + ML_REMOTE_SERVICE_TYPE_MODEL_RAW, + ML_REMOTE_SERVICE_TYPE_MODEL_URL, + ML_REMOTE_SERVICE_TYPE_PIPELINE_RAW, + ML_REMOTE_SERVICE_TYPE_PIPELINE_URL, + + ML_REMOTE_SERVICE_TYPE_MAX +} ml_remote_service_type_e; + /** - * @brief Structure for ml_service_h + * @brief Structure for ml_remote_service_h */ typedef struct { @@ -65,6 +77,15 @@ typedef struct GAsyncQueue *out_data_queue; } _ml_service_query_s; +/** + * @brief Structure for ml_remote_service + */ +typedef struct +{ + nns_edge_h edge_h; + nns_edge_node_type_e node_type; +} _ml_remote_service_s; + #ifdef __cplusplus } #endif /* __cplusplus */ diff --git a/meson.build b/meson.build index 24311792..c3f1d0c3 100644 --- a/meson.build +++ b/meson.build @@ -31,6 +31,7 @@ gst_app_dep = dependency('gstreamer-app-1.0') nnstreamer_internal_dep = dependency('nnstreamer-internal') nnstreamer_single_dep = dependency('nnstreamer-single') nnstreamer_dep = dependency('nnstreamer') +nnstreamer_edge_dep = dependency('nnstreamer-edge', required: false) if get_option('enable-ml-service') libsystemd_dep = dependency('libsystemd') diff --git a/packaging/machine-learning-api.spec b/packaging/machine-learning-api.spec index 695be357..cacd82b9 100644 --- a/packaging/machine-learning-api.spec +++ b/packaging/machine-learning-api.spec @@ -14,6 +14,7 @@ %define tensorflow2_gpu_delegate_support 1 %define nnfw_support 1 %define armnn_support 0 +%define nnstreamer_edge_support 1 %define release_test 0 %define test_script $(pwd)/packaging/run_unittests.sh @@ -162,6 +163,10 @@ BuildRequires: pkgconfig(capi-appfw-package-manager) BuildRequires: pkgconfig(capi-appfw-app-common) %endif +%if 0%{?nnstreamer_edge_support} +BuildRequires: nnstreamer-edge-devel +%endif + %description Tizen ML(Machine Learning) native API for NNStreamer. You can construct a data stream pipeline with neural networks easily. @@ -384,6 +389,7 @@ export MLAPI_BUILD_ROOT_PATH=$(pwd)/%{builddir} # Run test %if 0%{?unit_test} +bash %{test_script} ./tests/capi/unittest_capi_remote_service bash %{test_script} ./tests/capi/unittest_capi_inference_single bash %{test_script} ./tests/capi/unittest_capi_inference bash %{test_script} ./tests/capi/unittest_capi_datatype_consistency diff --git a/tests/capi/meson.build b/tests/capi/meson.build index 2394b4c7..36be7d58 100644 --- a/tests/capi/meson.build +++ b/tests/capi/meson.build @@ -41,6 +41,18 @@ if get_option('enable-ml-service') include_directories: nns_capi_include, ) test('unittest_capi_service_agent_client', unittest_capi_service_agent_client, env: testenv, timeout: 100) + + if nnstreamer_edge_dep.found() + unittest_capi_remote_service = executable('unittest_capi_remote_service', + 'unittest_capi_remote_service.cc', + link_with: nns_capi_service_lib, + dependencies: [unittest_common_dep, gdbus_gen_test_dep, lib_ml_agentd_test_dep], + install: get_option('install-test'), + install_dir: unittest_install_dir, + include_directories: nns_capi_include, + ) + test('unittest_capi_remote_service', unittest_capi_remote_service, env: testenv, timeout: 100) + endif endif if nnfw_dep.found() diff --git a/tests/capi/unittest_capi_remote_service.cc b/tests/capi/unittest_capi_remote_service.cc new file mode 100644 index 00000000..97554623 --- /dev/null +++ b/tests/capi/unittest_capi_remote_service.cc @@ -0,0 +1,304 @@ +/** + * @file unittest_capi_remote_service.cc + * @date 26 Jun 2023 + * @brief Unit test for ML Service C-API remote service. + * @see https://github.com/nnstreamer/api + * @author Gichan Jang + * @bug No known bugs + */ + +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +/** + * @brief Test base class for Database of ML Service API. + */ +class MLRemoteService : public ::testing::Test +{ + protected: + GTestDBus *dbus; + + public: + /** + * @brief Setup method for each test case. + */ + void SetUp () override + { + gchar *current_dir = g_get_current_dir (); + gchar *services_dir = g_build_filename (current_dir, "tests/services", NULL); + + dbus = g_test_dbus_new (G_TEST_DBUS_NONE); + ASSERT_NE (nullptr, dbus); + + g_test_dbus_add_service_dir (dbus, services_dir); + g_free (current_dir); + g_free (services_dir); + + g_test_dbus_up (dbus); + } + + /** + * @brief Teardown method for each test case. + */ + void TearDown () override + { + if (dbus) { + g_test_dbus_down (dbus); + g_object_unref (dbus); + } + } + + /** + * @brief Get available port number. + */ + static guint _get_available_port (void) + { + struct sockaddr_in sin; + guint port = 0; + gint sock; + socklen_t len = sizeof (struct sockaddr); + + sin.sin_family = AF_INET; + sin.sin_addr.s_addr = INADDR_ANY; + sin.sin_port = htons (0); + + sock = socket (AF_INET, SOCK_STREAM, 0); + EXPECT_TRUE (sock > 0); + if (sock < 0) + return 0; + + if (bind (sock, (struct sockaddr *) &sin, sizeof (struct sockaddr)) == 0) { + if (getsockname (sock, (struct sockaddr *) &sin, &len) == 0) { + port = ntohs (sin.sin_port); + } + } + close (sock); + + EXPECT_TRUE (port > 0); + return port; + } +}; + +/** + * @brief use case of pipeline registration using ml remote service. + */ +TEST_F (MLRemoteService, registerPipeline) +{ + int status; + + /**============= Prepare client ============= **/ + ml_service_h client_h; + ml_option_h client_option_h = NULL; + + status = ml_option_create (&client_option_h); + EXPECT_EQ (ML_ERROR_NONE, status); + + gchar *client_node_type = g_strdup ("remote_sender"); + status = ml_option_set (client_option_h, "node-type", client_node_type, g_free); + EXPECT_EQ (ML_ERROR_NONE, status); + + gchar *client_dest_host = g_strdup ("127.0.0.1"); + status = ml_option_set (client_option_h, "host", client_dest_host, g_free); + EXPECT_EQ (ML_ERROR_NONE, status); + + guint dest_port = 3000; + status = ml_option_set (client_option_h, "port", &dest_port, NULL); + EXPECT_EQ (ML_ERROR_NONE, status); + + gchar *client_connect_type = g_strdup ("TCP"); + status = ml_option_set (client_option_h, "connect-type", client_connect_type, g_free); + EXPECT_EQ (ML_ERROR_NONE, status); + + gchar *topic = g_strdup ("remote_service_test_topic"); + status = ml_option_set (client_option_h, "topic", topic, NULL); + EXPECT_EQ (ML_ERROR_NONE, status); + + status = ml_remote_service_create (client_option_h, &client_h); + EXPECT_EQ (ML_ERROR_NONE, status); + + /**============= Prepare server ============= **/ + ml_service_h server_h; + ml_option_h server_option_h = NULL; + status = ml_option_create (&server_option_h); + EXPECT_EQ (ML_ERROR_NONE, status); + + gchar *server_node_type = g_strdup ("remote_receiver"); + status = ml_option_set (server_option_h, "node-type", server_node_type, g_free); + + gchar *dest_host = g_strdup ("127.0.0.1"); + status = ml_option_set (server_option_h, "dest-host", dest_host, g_free); + EXPECT_EQ (ML_ERROR_NONE, status); + + status = ml_option_set (server_option_h, "topic", topic, g_free); + EXPECT_EQ (ML_ERROR_NONE, status); + + status = ml_option_set (server_option_h, "dest-port", &dest_port, NULL); + EXPECT_EQ (ML_ERROR_NONE, status); + + gchar *server_connect_type = g_strdup ("TCP"); + status = ml_option_set (server_option_h, "connect-type", server_connect_type, g_free); + EXPECT_EQ (ML_ERROR_NONE, status); + + status = ml_remote_service_create (server_option_h, &server_h); + EXPECT_EQ (ML_ERROR_NONE, status); + + ml_option_h remote_service_option_h = NULL; + status = ml_option_create (&remote_service_option_h); + EXPECT_EQ (ML_ERROR_NONE, status); + + + gchar *service_type = g_strdup ("pipeline_raw"); + ml_option_set (remote_service_option_h, "service-type", service_type, g_free); + + gchar *service_key = g_strdup ("pipeline_test_key"); + ml_option_set (remote_service_option_h, "service-key", service_key, g_free); + + gchar *pipeline_desc = g_strdup ("fakesrc ! fakesink"); + + status = ml_remote_service_register (client_h, remote_service_option_h, + pipeline_desc, strlen (pipeline_desc) + 1); + EXPECT_EQ (ML_ERROR_NONE, status); + + /** Wait for the server to register the pipeline. */ + g_usleep (1000000); + + gchar *ret_pipeline = NULL; + status = ml_service_get_pipeline (service_key, &ret_pipeline); + EXPECT_EQ (ML_ERROR_NONE, status); + EXPECT_STREQ (pipeline_desc, ret_pipeline); + + g_free (ret_pipeline); + g_free (pipeline_desc); + status = ml_service_destroy (server_h); + EXPECT_EQ (ML_ERROR_NONE, status); + status = ml_service_destroy (client_h); + EXPECT_EQ (ML_ERROR_NONE, status); + status = ml_option_destroy (server_option_h); + EXPECT_EQ (ML_ERROR_NONE, status); + status = ml_option_destroy (remote_service_option_h); + EXPECT_EQ (ML_ERROR_NONE, status); + status = ml_option_destroy (client_option_h); + EXPECT_EQ (ML_ERROR_NONE, status); +} + +/** + * @brief Test ml_remote_service_create with invalid param. + */ +TEST_F (MLRemoteService, createInvalidParam_n) +{ + int status; + ml_option_h option_h = NULL; + ml_service_h service_h = NULL; + + status = ml_option_create (&option_h); + EXPECT_EQ (ML_ERROR_NONE, status); + + status = ml_remote_service_create (NULL, &service_h); + EXPECT_EQ (ML_ERROR_INVALID_PARAMETER, status); + + status = ml_remote_service_create (option_h, NULL); + EXPECT_EQ (ML_ERROR_INVALID_PARAMETER, status); + + status = ml_option_destroy (option_h); + EXPECT_EQ (ML_ERROR_NONE, status); +} + +/** + * @brief Test ml_remote_service_register with invalid param. + */ +TEST_F (MLRemoteService, registerInvalidParam_n) +{ + int status; + ml_service_h service_h = NULL; + ml_option_h option_h = NULL; + gchar *str = g_strdup ("Temp_test_str"); + size_t len = strlen (str) + 1; + + status = ml_option_create (&option_h); + EXPECT_EQ (ML_ERROR_NONE, status); + + gchar *client_node_type = g_strdup ("remote_sender"); + status = ml_option_set (option_h, "node-type", client_node_type, g_free); + EXPECT_EQ (ML_ERROR_NONE, status); + + gchar *client_dest_host = g_strdup ("127.0.0.1"); + status = ml_option_set (option_h, "dest-host", client_dest_host, g_free); + EXPECT_EQ (ML_ERROR_NONE, status); + + guint dest_port = 1883; + status = ml_option_set (option_h, "dest-port", &dest_port, NULL); + EXPECT_EQ (ML_ERROR_NONE, status); + + gchar *client_connect_type = g_strdup ("HYBRID"); + status = ml_option_set (option_h, "connect-type", client_connect_type, g_free); + EXPECT_EQ (ML_ERROR_NONE, status); + + gchar *topic = g_strdup ("temp_test_topic"); + status = ml_option_set (option_h, "topic", topic, NULL); + EXPECT_EQ (ML_ERROR_NONE, status); + + status = ml_remote_service_create (option_h, &service_h); + EXPECT_EQ (ML_ERROR_NONE, status); + + status = ml_remote_service_register (NULL, option_h, str, len); + EXPECT_EQ (ML_ERROR_INVALID_PARAMETER, status); + + status = ml_remote_service_register (service_h, NULL, str, len); + EXPECT_EQ (ML_ERROR_INVALID_PARAMETER, status); + + status = ml_remote_service_register (service_h, option_h, NULL, len); + EXPECT_EQ (ML_ERROR_INVALID_PARAMETER, status); + + status = ml_remote_service_register (service_h, option_h, str, 0); + EXPECT_EQ (ML_ERROR_INVALID_PARAMETER, status); + + + g_free (str); + status = ml_option_destroy (option_h); + EXPECT_EQ (ML_ERROR_NONE, status); + + status = ml_service_destroy (service_h); + EXPECT_EQ (ML_ERROR_NONE, status); +} + +/** + * @brief Main gtest + */ +int +main (int argc, char **argv) +{ + int result = -1; + + try { + testing::InitGoogleTest (&argc, argv); + } catch (...) { + g_warning ("catch 'testing::internal::::ClassUniqueToAlwaysTrue'"); + } + + _ml_initialize_gstreamer (); + + /* ignore tizen feature status while running the testcases */ + set_feature_state (ML_FEATURE, SUPPORTED); + set_feature_state (ML_FEATURE_INFERENCE, SUPPORTED); + set_feature_state (ML_FEATURE_SERVICE, SUPPORTED); + + try { + result = RUN_ALL_TESTS (); + } catch (...) { + g_warning ("catch `testing::internal::GoogleTestFailureException`"); + } + + set_feature_state (ML_FEATURE, NOT_CHECKED_YET); + set_feature_state (ML_FEATURE_INFERENCE, NOT_CHECKED_YET); + set_feature_state (ML_FEATURE_SERVICE, NOT_CHECKED_YET); + + return result; +} diff --git a/tests/capi/unittest_capi_service_agent_client.cc b/tests/capi/unittest_capi_service_agent_client.cc index f7696fdf..621c5f02 100644 --- a/tests/capi/unittest_capi_service_agent_client.cc +++ b/tests/capi/unittest_capi_service_agent_client.cc @@ -106,7 +106,7 @@ TEST_F (MLServiceAgentTest, usecase_00) status = ml_service_set_pipeline (service_name, pipeline_desc); EXPECT_EQ (ML_ERROR_NONE, status); - gchar *ret_pipeline; + gchar *ret_pipeline = NULL; status = ml_service_get_pipeline (service_name, &ret_pipeline); EXPECT_EQ (ML_ERROR_NONE, status); EXPECT_STREQ (pipeline_desc, ret_pipeline);