Skip to content

Commit

Permalink
Force xnnpack when CPU inference is enforced
Browse files Browse the repository at this point in the history
Also renames the flag to MEDIAPIPE_ to abide to new naming scheme.

PiperOrigin-RevId: 683942820
  • Loading branch information
MediaPipe Team authored and copybara-github committed Oct 9, 2024
1 parent a6637e4 commit c4f475e
Show file tree
Hide file tree
Showing 3 changed files with 8 additions and 5 deletions.
6 changes: 5 additions & 1 deletion mediapipe/calculators/tensor/BUILD
Original file line number Diff line number Diff line change
Expand Up @@ -430,7 +430,7 @@ cc_library_with_tflite(
hdrs = ["inference_calculator.h"],
local_defines = select({
":force_cpu_inference": ["MEDIAPIPE_FORCE_CPU_INFERENCE=1"],
"//conditions:default": [],
"//conditions:default": ["MEDIAPIPE_FORCE_CPU_INFERENCE=0"],
}),
tflite_deps = [
":inference_runner",
Expand Down Expand Up @@ -782,6 +782,10 @@ cc_library(
srcs = [
"inference_calculator_cpu.cc",
],
local_defines = select({
":force_cpu_inference": ["MEDIAPIPE_FORCE_CPU_INFERENCE=1"],
"//conditions:default": ["MEDIAPIPE_FORCE_CPU_INFERENCE=0"],
}),
deps = [
":inference_calculator_interface",
":inference_calculator_utils",
Expand Down
5 changes: 2 additions & 3 deletions mediapipe/calculators/tensor/inference_calculator.cc
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ class InferenceCalculatorSelectorImpl
subgraph_node);
std::vector<absl::string_view> impls;

#if !defined(MEDIAPIPE_FORCE_CPU_INFERENCE) || !MEDIAPIPE_FORCE_CPU_INFERENCE
#if !MEDIAPIPE_FORCE_CPU_INFERENCE

const bool should_use_gpu =
!options.has_delegate() || // Use GPU delegate if not specified
Expand All @@ -73,8 +73,7 @@ class InferenceCalculatorSelectorImpl
impls.emplace_back("GlAdvanced");
}
}
#endif // !defined(MEDIAPIPE_FORCE_CPU_INFERENCE) ||
// !MEDIAPIPE_FORCE_CPU_INFERENCE
#endif // !MEDIAPIPE_FORCE_CPU_INFERENCE
impls.emplace_back("Cpu");
impls.emplace_back("Xnnpack");
std::vector<std::string> missing_impls;
Expand Down
2 changes: 1 addition & 1 deletion mediapipe/calculators/tensor/inference_calculator_cpu.cc
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ InferenceCalculatorCpuImpl::MaybeCreateDelegate(CalculatorContext* cc) {
}
#endif // MEDIAPIPE_ANDROID

#if defined(__EMSCRIPTEN__)
#if defined(__EMSCRIPTEN__) || MEDIAPIPE_FORCE_CPU_INFERENCE
const bool use_xnnpack = true;
#else
const bool use_xnnpack = opts_has_delegate && opts_delegate.has_xnnpack();
Expand Down

0 comments on commit c4f475e

Please sign in to comment.