Namespace mediapipe::anonymous_namespace{inference_calculator_utils.cc}
Namespace List > mediapipe > anonymous_namespace{inference_calculator_utils.cc}
Public Functions
Type | Name |
---|---|
absl::Status | CopyTensorToTfLiteTensor (const Tensor & input_tensor, TfLiteTensor & tflite_tensor) |
absl::Status | CopyTensorToTfLiteTensor< char > (const Tensor & input_tensor, TfLiteTensor & tflite_tensor) |
absl::Status | CopyTfLiteTensorToTensor (const TfLiteTensor & tflite_tensor, Tensor & output_tensor) |
absl::Status | CopyTfLiteTensorToTensor< char > (const TfLiteTensor & tflite_tensor, Tensor & output_tensor) |
int | GetXnnpackDefaultNumThreads () |
std::ostream & | operator<< (std::ostream & os, const TfLiteIntArray & array) |
bool | operator== (Tensor::ElementType tensor_type, TfLiteType tflite_type) |
bool | operator== (const TfLiteIntArray & lhs, const std::vector< int > & rhs) |
Public Functions Documentation
function CopyTensorToTfLiteTensor
template<typename T>
absl::Status mediapipe::anonymous_namespace{inference_calculator_utils.cc}::CopyTensorToTfLiteTensor (
const Tensor & input_tensor,
TfLiteTensor & tflite_tensor
)
function CopyTensorToTfLiteTensor< char >
template<>
absl::Status mediapipe::anonymous_namespace{inference_calculator_utils.cc}::CopyTensorToTfLiteTensor< char > (
const Tensor & input_tensor,
TfLiteTensor & tflite_tensor
)
function CopyTfLiteTensorToTensor
template<typename T>
absl::Status mediapipe::anonymous_namespace{inference_calculator_utils.cc}::CopyTfLiteTensorToTensor (
const TfLiteTensor & tflite_tensor,
Tensor & output_tensor
)
function CopyTfLiteTensorToTensor< char >
template<>
absl::Status mediapipe::anonymous_namespace{inference_calculator_utils.cc}::CopyTfLiteTensorToTensor< char > (
const TfLiteTensor & tflite_tensor,
Tensor & output_tensor
)
function GetXnnpackDefaultNumThreads
int mediapipe::anonymous_namespace{inference_calculator_utils.cc}::GetXnnpackDefaultNumThreads ()
function operator<<
std::ostream & mediapipe::anonymous_namespace{inference_calculator_utils.cc}::operator<< (
std::ostream & os,
const TfLiteIntArray & array
)
function operator==
bool mediapipe::anonymous_namespace{inference_calculator_utils.cc}::operator== (
Tensor::ElementType tensor_type,
TfLiteType tflite_type
)
function operator==
bool mediapipe::anonymous_namespace{inference_calculator_utils.cc}::operator== (
const TfLiteIntArray & lhs,
const std::vector< int > & rhs
)
The documentation for this class was generated from the following file /home/friedel/devel/ILLIXR-plugins/hand_tracking/mediapipe/calculators/tensor/inference_calculator_utils.cc