Skip to content

File inference_calculator.pb.h

FileList > calculators > tensor > inference_calculator.pb.h

Go to the source code of this file

  • #include <limits>
  • #include <string>
  • #include <google/protobuf/port_def.inc>
  • #include <google/protobuf/port_undef.inc>
  • #include <google/protobuf/io/coded_stream.h>
  • #include <google/protobuf/arena.h>
  • #include <google/protobuf/arenastring.h>
  • #include <google/protobuf/generated_message_bases.h>
  • #include <google/protobuf/generated_message_table_driven.h>
  • #include <google/protobuf/generated_message_util.h>
  • #include <google/protobuf/metadata_lite.h>
  • #include <google/protobuf/generated_message_reflection.h>
  • #include <google/protobuf/message.h>
  • #include <google/protobuf/repeated_field.h>
  • #include <google/protobuf/extension_set.h>
  • #include <google/protobuf/generated_enum_reflection.h>
  • #include <google/protobuf/unknown_field_set.h>
  • #include "mediapipe/framework/calculator.pb.h"
  • #include "mediapipe/framework/calculator_options.pb.h"

Namespaces

Type Name
namespace internal
namespace mediapipe

Classes

Type Name
struct TableStruct_mediapipe_2fcalculators_2ftensor_2finference_5fcalculator_2eproto
struct is_proto_enum< ::mediapipe::InferenceCalculatorOptions_Delegate_Gpu_Api > <>
struct is_proto_enum< ::mediapipe::InferenceCalculatorOptions_Delegate_Gpu_CacheWritingBehavior > <>
struct is_proto_enum< ::mediapipe::InferenceCalculatorOptions_Delegate_Gpu_InferenceUsage > <>
class InferenceCalculatorOptions
class InferenceCalculatorOptions_Delegate
class InferenceCalculatorOptions_Delegate_Gpu
class InferenceCalculatorOptions_Delegate_Nnapi
class InferenceCalculatorOptions_Delegate_TfLite
class InferenceCalculatorOptions_Delegate_Xnnpack
class InferenceCalculatorOptions_InputOutputConfig
class InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink
class InferenceCalculatorOptions_InputOutputConfig_TensorIndicesMap
class InferenceCalculatorOptions_InputOutputConfig_TensorNamesMap

Public Attributes

Type Name
const ::PROTOBUF_NAMESPACE_ID::internal::DescriptorTable descriptor_table_mediapipe_2fcalculators_2ftensor_2finference_5fcalculator_2eproto

Public Functions

Type Name
PROTOBUF_NAMESPACE_OPEN ::mediapipe::InferenceCalculatorOptions * CreateMaybeMessage<::mediapipe::InferenceCalculatorOptions > (Arena *)
::mediapipe::InferenceCalculatorOptions_Delegate * CreateMaybeMessage<::mediapipe::InferenceCalculatorOptions_Delegate > (Arena *)
::mediapipe::InferenceCalculatorOptions_Delegate_Gpu * CreateMaybeMessage<::mediapipe::InferenceCalculatorOptions_Delegate_Gpu > (Arena *)
::mediapipe::InferenceCalculatorOptions_Delegate_Nnapi * CreateMaybeMessage<::mediapipe::InferenceCalculatorOptions_Delegate_Nnapi > (Arena *)
::mediapipe::InferenceCalculatorOptions_Delegate_TfLite * CreateMaybeMessage<::mediapipe::InferenceCalculatorOptions_Delegate_TfLite > (Arena *)
::mediapipe::InferenceCalculatorOptions_Delegate_Xnnpack * CreateMaybeMessage<::mediapipe::InferenceCalculatorOptions_Delegate_Xnnpack > (Arena *)
::mediapipe::InferenceCalculatorOptions_InputOutputConfig * CreateMaybeMessage<::mediapipe::InferenceCalculatorOptions_InputOutputConfig > (Arena *)
::mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink * CreateMaybeMessage<::mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink > (Arena *)
::mediapipe::InferenceCalculatorOptions_InputOutputConfig_TensorIndicesMap * CreateMaybeMessage<::mediapipe::InferenceCalculatorOptions_InputOutputConfig_TensorIndicesMap > (Arena *)
::mediapipe::InferenceCalculatorOptions_InputOutputConfig_TensorNamesMap * CreateMaybeMessage<::mediapipe::InferenceCalculatorOptions_InputOutputConfig_TensorNamesMap > (Arena *)
const EnumDescriptor * GetEnumDescriptor< ::mediapipe::InferenceCalculatorOptions_Delegate_Gpu_Api > ()
const EnumDescriptor * GetEnumDescriptor< ::mediapipe::InferenceCalculatorOptions_Delegate_Gpu_CacheWritingBehavior > ()
const EnumDescriptor * GetEnumDescriptor< ::mediapipe::InferenceCalculatorOptions_Delegate_Gpu_InferenceUsage > ()

Macros

Type Name
define PROTOBUF_INTERNAL_EXPORT_mediapipe_2fcalculators_2ftensor_2finference_5fcalculator_2eproto

Public Attributes Documentation

variable descriptor_table_mediapipe_2fcalculators_2ftensor_2finference_5fcalculator_2eproto

const ::PROTOBUF_NAMESPACE_ID::internal::DescriptorTable descriptor_table_mediapipe_2fcalculators_2ftensor_2finference_5fcalculator_2eproto;

Public Functions Documentation

function CreateMaybeMessage<::mediapipe::InferenceCalculatorOptions >

template<>
PROTOBUF_NAMESPACE_OPEN ::mediapipe::InferenceCalculatorOptions * CreateMaybeMessage<::mediapipe::InferenceCalculatorOptions > (
    Arena *
) 

function CreateMaybeMessage<::mediapipe::InferenceCalculatorOptions_Delegate >

template<>
::mediapipe::InferenceCalculatorOptions_Delegate * CreateMaybeMessage<::mediapipe::InferenceCalculatorOptions_Delegate > (
    Arena *
) 

function CreateMaybeMessage<::mediapipe::InferenceCalculatorOptions_Delegate_Gpu >

template<>
::mediapipe::InferenceCalculatorOptions_Delegate_Gpu * CreateMaybeMessage<::mediapipe::InferenceCalculatorOptions_Delegate_Gpu > (
    Arena *
) 

function CreateMaybeMessage<::mediapipe::InferenceCalculatorOptions_Delegate_Nnapi >

template<>
::mediapipe::InferenceCalculatorOptions_Delegate_Nnapi * CreateMaybeMessage<::mediapipe::InferenceCalculatorOptions_Delegate_Nnapi > (
    Arena *
) 

function CreateMaybeMessage<::mediapipe::InferenceCalculatorOptions_Delegate_TfLite >

template<>
::mediapipe::InferenceCalculatorOptions_Delegate_TfLite * CreateMaybeMessage<::mediapipe::InferenceCalculatorOptions_Delegate_TfLite > (
    Arena *
) 

function CreateMaybeMessage<::mediapipe::InferenceCalculatorOptions_Delegate_Xnnpack >

template<>
::mediapipe::InferenceCalculatorOptions_Delegate_Xnnpack * CreateMaybeMessage<::mediapipe::InferenceCalculatorOptions_Delegate_Xnnpack > (
    Arena *
) 

function CreateMaybeMessage<::mediapipe::InferenceCalculatorOptions_InputOutputConfig >

template<>
::mediapipe::InferenceCalculatorOptions_InputOutputConfig * CreateMaybeMessage<::mediapipe::InferenceCalculatorOptions_InputOutputConfig > (
    Arena *
) 

template<>
::mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink * CreateMaybeMessage<::mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink > (
    Arena *
) 

function CreateMaybeMessage<::mediapipe::InferenceCalculatorOptions_InputOutputConfig_TensorIndicesMap >

template<>
::mediapipe::InferenceCalculatorOptions_InputOutputConfig_TensorIndicesMap * CreateMaybeMessage<::mediapipe::InferenceCalculatorOptions_InputOutputConfig_TensorIndicesMap > (
    Arena *
) 

function CreateMaybeMessage<::mediapipe::InferenceCalculatorOptions_InputOutputConfig_TensorNamesMap >

template<>
::mediapipe::InferenceCalculatorOptions_InputOutputConfig_TensorNamesMap * CreateMaybeMessage<::mediapipe::InferenceCalculatorOptions_InputOutputConfig_TensorNamesMap > (
    Arena *
) 

function GetEnumDescriptor< ::mediapipe::InferenceCalculatorOptions_Delegate_Gpu_Api >

template<>
inline const EnumDescriptor * GetEnumDescriptor< ::mediapipe::InferenceCalculatorOptions_Delegate_Gpu_Api > () 

function GetEnumDescriptor< ::mediapipe::InferenceCalculatorOptions_Delegate_Gpu_CacheWritingBehavior >

template<>
inline const EnumDescriptor * GetEnumDescriptor< ::mediapipe::InferenceCalculatorOptions_Delegate_Gpu_CacheWritingBehavior > () 

function GetEnumDescriptor< ::mediapipe::InferenceCalculatorOptions_Delegate_Gpu_InferenceUsage >

template<>
inline const EnumDescriptor * GetEnumDescriptor< ::mediapipe::InferenceCalculatorOptions_Delegate_Gpu_InferenceUsage > () 

Macro Definition Documentation

define PROTOBUF_INTERNAL_EXPORT_mediapipe_2fcalculators_2ftensor_2finference_5fcalculator_2eproto

#define PROTOBUF_INTERNAL_EXPORT_mediapipe_2fcalculators_2ftensor_2finference_5fcalculator_2eproto 


The documentation for this class was generated from the following file /home/friedel/devel/ILLIXR-plugins/hand_tracking/build/mediapipe/calculators/tensor/inference_calculator.pb.h