Skip to content

Class mediapipe::api2::InferenceCalculatorGlImpl::GpuInferenceRunner

ClassList > GpuInferenceRunner

Public Functions

Type Name
const InputOutputTensorNames & GetInputOutputTensorNames () const
absl::Status Init (CalculatorContext * cc, std::shared_ptr< GlContext > gl_context)
absl::Status LoadDelegate (CalculatorContext * cc, const mediapipe::InferenceCalculatorOptions::Delegate & delegate_options)
absl::Status LoadDelegateAndAllocateTensors (CalculatorContext * cc, const mediapipe::InferenceCalculatorOptions::Delegate & delegate_options)
absl::Status LoadModel (CalculatorContext * cc)
absl::Status Process (CalculatorContext * cc, const TensorSpan & input_tensors, std::vector< Tensor > & output_tensors)
~GpuInferenceRunner ()

Public Functions Documentation

function GetInputOutputTensorNames

const InputOutputTensorNames & GpuInferenceRunner::GetInputOutputTensorNames () const

function Init

absl::Status GpuInferenceRunner::Init (
    CalculatorContext * cc,
    std::shared_ptr< GlContext > gl_context
) 

function LoadDelegate

absl::Status GpuInferenceRunner::LoadDelegate (
    CalculatorContext * cc,
    const mediapipe::InferenceCalculatorOptions::Delegate & delegate_options
) 

function LoadDelegateAndAllocateTensors

absl::Status GpuInferenceRunner::LoadDelegateAndAllocateTensors (
    CalculatorContext * cc,
    const mediapipe::InferenceCalculatorOptions::Delegate & delegate_options
) 

function LoadModel

absl::Status GpuInferenceRunner::LoadModel (
    CalculatorContext * cc
) 

function Process

absl::Status GpuInferenceRunner::Process (
    CalculatorContext * cc,
    const TensorSpan & input_tensors,
    std::vector< Tensor > & output_tensors
) 

function ~GpuInferenceRunner

GpuInferenceRunner::~GpuInferenceRunner () 


The documentation for this class was generated from the following file /home/friedel/devel/ILLIXR-plugins/hand_tracking/mediapipe/calculators/tensor/inference_calculator_gl.cc