diff --git a/Tizen.native/ml-service-example/sum_and_add_app/src/sum_and_add.c b/Tizen.native/ml-service-example/sum_and_add_app/src/sum_and_add.c index 567c2a9f..166e3310 100644 --- a/Tizen.native/ml-service-example/sum_and_add_app/src/sum_and_add.c +++ b/Tizen.native/ml-service-example/sum_and_add_app/src/sum_and_add.c @@ -32,6 +32,9 @@ typedef struct appdata { ml_tensors_info_h output_info; ml_tensors_data_h input_data_h; + gchar *input_node_name; + gchar *output_node_name; + } appdata_s; Ecore_Pipe *data_output_pipe; @@ -43,7 +46,7 @@ _invoke_request_loop (void *user_data) appdata_s *ad = (appdata_s *) user_data; while (1) { if (ad->is_running == 1) { - ml_service_request (ad->service_handle, NULL, ad->input_data_h); + ml_service_request (ad->service_handle, ad->input_node_name, ad->input_data_h); } g_usleep (1000 * 1000); /* request every 1 sec */ @@ -216,7 +219,8 @@ init_ml_service (appdata_s *ad) ml_information_list_h res_info_list; // get conf file from rpk using key for the file - const char *conf_key = "sum_and_add_resource_conf"; + // const char *conf_key = "sum_and_add_resource_conf"; // use single conf + const char *conf_key = "sum_and_add_resource_pipeline_conf"; // use pipeline conf status = ml_service_resource_get (conf_key, &res_info_list); unsigned int info_length = 0U; if (status!= ML_ERROR_NONE) { @@ -254,15 +258,27 @@ init_ml_service (appdata_s *ad) return status; } + // get input_node_name and output_node_name from conf file (it's running pipeline NOT single) + status = ml_service_get_information (ad->service_handle, "input_node_name", &ad->input_node_name); + if (status!= ML_ERROR_NONE) { + dlog_print (DLOG_INFO, LOG_TAG, "No input_node_name given. It's single!"); + ad->input_node_name = NULL; + } + status = ml_service_get_information (ad->service_handle, "output_node_name", &ad->output_node_name); + if (status!= ML_ERROR_NONE) { + dlog_print (DLOG_INFO, LOG_TAG, "No output_node_name given. It's single!"); + output_node_name = NULL; + } + // get input information - status = ml_service_get_input_information (ad->service_handle, NULL, &ad->input_info); + status = ml_service_get_input_information (ad->service_handle, ad->input_node_name, &ad->input_info); if (status!= ML_ERROR_NONE) { dlog_print (DLOG_ERROR, LOG_TAG, "ml_service_get_input_information failed"); return status; } // get output information - status = ml_service_get_output_information (ad->service_handle, NULL, &ad->output_info); + status = ml_service_get_output_information (ad->service_handle, ad->output_node_name, &ad->output_info); if (status!= ML_ERROR_NONE) { dlog_print (DLOG_ERROR, LOG_TAG, "ml_service_get_output_information failed"); return status; diff --git a/Tizen.native/ml-service-example/sum_and_add_model_rpk_v1/res/global/sum_and_add/rpk_config.json b/Tizen.native/ml-service-example/sum_and_add_model_rpk_v1/res/global/sum_and_add/rpk_config.json index 39ea21e5..3987cf04 100644 --- a/Tizen.native/ml-service-example/sum_and_add_model_rpk_v1/res/global/sum_and_add/rpk_config.json +++ b/Tizen.native/ml-service-example/sum_and_add_model_rpk_v1/res/global/sum_and_add/rpk_config.json @@ -12,6 +12,11 @@ "name" : "sum_and_add_resource_conf", "path" : "sum_and_add.single.conf", "description" : "Notes for this conf file" + }, + { + "name" : "sum_and_add_resource_pipeline_conf", + "path" : "sum_and_add.pipeline.conf", + "description" : "Notes for this conf file" } ] } diff --git a/Tizen.native/ml-service-example/sum_and_add_model_rpk_v1/res/global/sum_and_add/sum_and_add.pipeline.conf b/Tizen.native/ml-service-example/sum_and_add_model_rpk_v1/res/global/sum_and_add/sum_and_add.pipeline.conf new file mode 100644 index 00000000..00ba9ba2 --- /dev/null +++ b/Tizen.native/ml-service-example/sum_and_add_model_rpk_v1/res/global/sum_and_add/sum_and_add.pipeline.conf @@ -0,0 +1,37 @@ +{ + "pipeline" : { + "description" : " + appsrc name=tsrc caps=other/tensors,num_tensors=1,format=(string)static,types=(string)float32,dimensions=(string)4:1,framerate=(fraction)0/1 ! + queue leaky=2 max-size-buffers=2 ! + tensor_filter framework=tensorflow-lite model=mlagent://model/sum_and_add_model custom=Delegate:XNNPACK,NumThreads:2 latency=1 ! + tensor_sink name=tsink", + "input_node" : [ + { + "name" : "tsrc", + "info" : [ + { + "type" : "float32", + "dimension" : "4:1" + } + ] + } + ], + "output_node" : [ + { + "name" : "tsink", + "info" : [ + { + "type" : "float32", + "dimension" : "1" + } + ] + } + ] + }, + "information" : + { + "description" : "[sum_and_add_one.tflite] sum all values and add 1.0", + "input_node_name" : "tsrc", + "output_node_name" : "tsink" + } +} diff --git a/Tizen.native/ml-service-example/sum_and_add_model_rpk_v2/res/global/sum_and_add/rpk_config.json b/Tizen.native/ml-service-example/sum_and_add_model_rpk_v2/res/global/sum_and_add/rpk_config.json index 2c634f43..38366dc3 100644 --- a/Tizen.native/ml-service-example/sum_and_add_model_rpk_v2/res/global/sum_and_add/rpk_config.json +++ b/Tizen.native/ml-service-example/sum_and_add_model_rpk_v2/res/global/sum_and_add/rpk_config.json @@ -12,6 +12,11 @@ "name" : "sum_and_add_resource_conf", "path" : "sum_and_add.single.conf", "description" : "Notes for this conf file" + }, + { + "name" : "sum_and_add_resource_pipeline_conf", + "path" : "sum_and_add.pipeline.conf", + "description" : "Notes for this conf file" } ] } diff --git a/Tizen.native/ml-service-example/sum_and_add_model_rpk_v2/res/global/sum_and_add/sum_and_add.pipeline.conf b/Tizen.native/ml-service-example/sum_and_add_model_rpk_v2/res/global/sum_and_add/sum_and_add.pipeline.conf new file mode 100644 index 00000000..3f30acca --- /dev/null +++ b/Tizen.native/ml-service-example/sum_and_add_model_rpk_v2/res/global/sum_and_add/sum_and_add.pipeline.conf @@ -0,0 +1,37 @@ +{ + "pipeline" : { + "description" : " + appsrc name=tsrc caps=other/tensors,num_tensors=1,format=(string)static,types=(string)float32,dimensions=(string)4:1,framerate=(fraction)0/1 ! + queue leaky=2 max-size-buffers=2 ! + tensor_filter framework=tensorflow-lite model=mlagent://model/sum_and_add_model custom=Delegate:XNNPACK,NumThreads:2 latency=1 ! + tensor_sink name=tsink", + "input_node" : [ + { + "name" : "tsrc", + "info" : [ + { + "type" : "float32", + "dimension" : "4:1" + } + ] + } + ], + "output_node" : [ + { + "name" : "tsink", + "info" : [ + { + "type" : "float32", + "dimension" : "1" + } + ] + } + ] + }, + "information" : + { + "description" : "[sum_and_add_two.tflite] sum all values and add 2.0", + "input_node_name" : "tsrc", + "output_node_name" : "tsink" + } +}