34#include "ove_config.h"
40#include "ove/storage.h"
49 OVE_TENSOR_FLOAT32 = 0,
85#ifdef CONFIG_OVE_INFER
103int ove_model_init(
ove_model_t *model, ove_model_storage_t *storage,
145#elif !defined(__ZIG_CIMPORT__)
148#define ove_model_create(pm, cfg) \
149 ({ static ove_model_storage_t _ove_stor_; \
150 static uint8_t __attribute__((aligned(16))) \
151 _ove_arena_[(cfg)->arena_size]; \
152 ove_model_init((pm), &_ove_stor_, _ove_arena_, (cfg)); })
153#define ove_model_destroy(m) ove_model_deinit(m)
184int ove_model_input(
ove_model_t model,
unsigned int index,
201int ove_model_output(
ove_model_t model,
unsigned int index,
213uint64_t ove_model_last_inference_us(
ove_model_t model);
219static inline void ove_model_destroy(
ove_model_t m) { (void)m; }
223static inline uint64_t ove_model_last_inference_us(
ove_model_t m) { (void)m;
return 0; }
ove_tensor_type
Tensor element types.
Definition infer.h:48
#define OVE_ERR_NOT_SUPPORTED
The requested feature is not supported by the active backend.
Definition types.h:38
struct ove_model * ove_model_t
Opaque handle for an ML inference model session.
Definition types.h:118
Configuration for an ML inference session.
Definition infer.h:79
size_t model_size
Definition infer.h:81
const void * model_data
Definition infer.h:80
size_t arena_size
Definition infer.h:82
Tensor descriptor returned by ove_model_input() / ove_model_output().
Definition infer.h:63
unsigned int ndims
Definition infer.h:67
void * data
Definition infer.h:64
int dims[5]
Definition infer.h:68
size_t size
Definition infer.h:65
enum ove_tensor_type type
Definition infer.h:66