Skip to content

Class mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink

ClassList > mediapipe > InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink

Inherits the following classes: PROTOBUF_NAMESPACE_ID::Message

Classes

Type Name
class _Internal

Public Types

Type Name
enum int InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink

Public Static Attributes

Type Name
const ClassData _class_data_ = /* multi line expression */
constexpr int kIndexInFileMessages = /* multi line expression */

Public Functions

Type Name
size_t ByteSizeLong () const
PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear ()
void CopyFrom (const InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink & from)
int GetCachedSize () const
const ::PROTOBUF_NAMESPACE_ID::Message::ClassData * GetClassData () const
::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata () const
InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink ()
constexpr InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink (::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized)
InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink (const InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink & from)
InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink (InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink && from) noexcept
bool IsInitialized () const
void MergeFrom (const InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink & from)
InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink * New (::PROTOBUF_NAMESPACE_ID::Arena * arena=nullptr) const
void Swap (InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink * other)
void UnsafeArenaSwap (InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink * other)
const char * _InternalParse (const char * ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext * ctx)
uint8_t * _InternalSerialize (uint8_t * target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream * stream) const
void clear_from_output_tensor_name ()
void clear_to_input_tensor_name ()
const std::string & from_output_tensor_name () const
bool has_from_output_tensor_name () const
bool has_to_input_tensor_name () const
std::string * mutable_from_output_tensor_name ()
std::string * mutable_to_input_tensor_name ()
inline ::PROTOBUF_NAMESPACE_ID::UnknownFieldSet * mutable_unknown_fields ()
InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink & operator= (const InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink & from)
InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink & operator= (InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink && from) noexcept
PROTOBUF_NODISCARD std::string * release_from_output_tensor_name ()
PROTOBUF_NODISCARD std::string * release_to_input_tensor_name ()
void set_allocated_from_output_tensor_name (std::string * from_output_tensor_name)
void set_allocated_to_input_tensor_name (std::string * to_input_tensor_name)
void set_from_output_tensor_name (ArgT0 && arg0, ArgT... args)
PROTOBUF_ALWAYS_INLINE void set_from_output_tensor_name (ArgT0 && arg0, ArgT... args)
void set_to_input_tensor_name (ArgT0 && arg0, ArgT... args)
PROTOBUF_ALWAYS_INLINE void set_to_input_tensor_name (ArgT0 && arg0, ArgT... args)
const std::string & to_input_tensor_name () const
const ::PROTOBUF_NAMESPACE_ID::UnknownFieldSet & unknown_fields () const
~InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink () override

Public Static Functions

Type Name
const ::PROTOBUF_NAMESPACE_ID::Descriptor * GetDescriptor ()
const ::PROTOBUF_NAMESPACE_ID::Reflection * GetReflection ()
const InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink & default_instance ()
const ::PROTOBUF_NAMESPACE_ID::Descriptor * descriptor ()
const InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink * internal_default_instance ()

Protected Functions

Type Name
InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink (::PROTOBUF_NAMESPACE_ID::Arena * arena, bool is_message_owned=false)

Public Types Documentation

enum mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink {
    kFromOutputTensorNameFieldNumber = 1,
    kToInputTensorNameFieldNumber = 2
};

Public Static Attributes Documentation

variable _class_data_

const ::PROTOBUF_NAMESPACE_ID::Message::ClassData mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::_class_data_;

variable kIndexInFileMessages

constexpr int mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::kIndexInFileMessages;

Public Functions Documentation

function ByteSizeLong

size_t mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::ByteSizeLong () const

function Clear

PROTOBUF_ATTRIBUTE_REINITIALIZES void mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::Clear () 

function CopyFrom

void mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::CopyFrom (
    const InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink & from
) 

function GetCachedSize

inline int mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::GetCachedSize () const

function GetClassData

const ::PROTOBUF_NAMESPACE_ID::Message::ClassData * mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::GetClassData () const

function GetMetadata

::PROTOBUF_NAMESPACE_ID::Metadata mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::GetMetadata () const

inline mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink () 

explicit constexpr mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink (
    ::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized
) 

mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink (
    const InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink & from
) 

inline mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink (
    InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink && from
) noexcept

function IsInitialized

bool mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::IsInitialized () const

function MergeFrom

void mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::MergeFrom (
    const InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink & from
) 

function New

inline InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink * mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::New (
    ::PROTOBUF_NAMESPACE_ID::Arena * arena=nullptr
) const

function Swap

inline void mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::Swap (
    InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink * other
) 

function UnsafeArenaSwap

inline void mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::UnsafeArenaSwap (
    InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink * other
) 

function _InternalParse

const char * mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::_InternalParse (
    const char * ptr,
    ::PROTOBUF_NAMESPACE_ID::internal::ParseContext * ctx
) 

function _InternalSerialize

uint8_t * mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::_InternalSerialize (
    uint8_t * target,
    ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream * stream
) const

function clear_from_output_tensor_name

inline void mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::clear_from_output_tensor_name () 

function clear_to_input_tensor_name

inline void mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::clear_to_input_tensor_name () 

function from_output_tensor_name

inline const std::string & mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::from_output_tensor_name () const

function has_from_output_tensor_name

inline bool mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::has_from_output_tensor_name () const

function has_to_input_tensor_name

inline bool mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::has_to_input_tensor_name () const

function mutable_from_output_tensor_name

inline std::string * mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::mutable_from_output_tensor_name () 

function mutable_to_input_tensor_name

inline std::string * mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::mutable_to_input_tensor_name () 

function mutable_unknown_fields

inline inline ::PROTOBUF_NAMESPACE_ID::UnknownFieldSet * mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::mutable_unknown_fields () 

function operator=

inline InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink & mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::operator= (
    const InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink & from
) 

function operator=

inline InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink & mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::operator= (
    InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink && from
) noexcept

function release_from_output_tensor_name

inline PROTOBUF_NODISCARD std::string * mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::release_from_output_tensor_name () 

function release_to_input_tensor_name

inline PROTOBUF_NODISCARD std::string * mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::release_to_input_tensor_name () 

function set_allocated_from_output_tensor_name

inline void mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::set_allocated_from_output_tensor_name (
    std::string * from_output_tensor_name
) 

function set_allocated_to_input_tensor_name

inline void mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::set_allocated_to_input_tensor_name (
    std::string * to_input_tensor_name
) 

function set_from_output_tensor_name [1/2]

template<typename ArgT0, typename... ArgT>
void mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::set_from_output_tensor_name (
    ArgT0 && arg0,
    ArgT... args
) 

function set_from_output_tensor_name [2/2]

template<typename ArgT0, typename... ArgT>
inline PROTOBUF_ALWAYS_INLINE void mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::set_from_output_tensor_name (
    ArgT0 && arg0,
    ArgT... args
) 

function set_to_input_tensor_name [1/2]

template<typename ArgT0, typename... ArgT>
void mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::set_to_input_tensor_name (
    ArgT0 && arg0,
    ArgT... args
) 

function set_to_input_tensor_name [2/2]

template<typename ArgT0, typename... ArgT>
inline PROTOBUF_ALWAYS_INLINE void mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::set_to_input_tensor_name (
    ArgT0 && arg0,
    ArgT... args
) 

function to_input_tensor_name

inline const std::string & mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::to_input_tensor_name () const

function unknown_fields

inline const ::PROTOBUF_NAMESPACE_ID::UnknownFieldSet & mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::unknown_fields () const

mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::~InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink () override

Public Static Functions Documentation

function GetDescriptor

static inline const ::PROTOBUF_NAMESPACE_ID::Descriptor * mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::GetDescriptor () 

function GetReflection

static inline const ::PROTOBUF_NAMESPACE_ID::Reflection * mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::GetReflection () 

function default_instance

static inline const InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink & mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::default_instance () 

function descriptor

static inline const ::PROTOBUF_NAMESPACE_ID::Descriptor * mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::descriptor () 

function internal_default_instance

static inline const InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink * mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::internal_default_instance () 

Protected Functions Documentation

explicit mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink (
    ::PROTOBUF_NAMESPACE_ID::Arena * arena,
    bool is_message_owned=false
) 

Friends Documentation

friend InternalHelper

template<typename T>
class mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::InternalHelper (
    ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper
) 

friend AnyMetadata

class mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::AnyMetadata (
    ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata
) 

friend TableStruct_mediapipe_2fcalculators_2ftensor_2finference_5fcalculator_2eproto

struct mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::TableStruct_mediapipe_2fcalculators_2ftensor_2finference_5fcalculator_2eproto (
    ::TableStruct_mediapipe_2fcalculators_2ftensor_2finference_5fcalculator_2eproto
) 

friend swap

inline void mediapipe::InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink::swap (
    InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink & a,
    InferenceCalculatorOptions_InputOutputConfig_FeedbackTensorLink & b
) 


The documentation for this class was generated from the following file /home/friedel/devel/ILLIXR-plugins/hand_tracking/build/mediapipe/calculators/tensor/inference_calculator.pb.h