]> git.proxmox.com Git - ceph.git/blame - ceph/src/arrow/cpp/src/generated/SparseTensor_generated.h
import quincy 17.2.0
[ceph.git] / ceph / src / arrow / cpp / src / generated / SparseTensor_generated.h
CommitLineData
1d09f67e
TL
1// automatically generated by the FlatBuffers compiler, do not modify
2
3
4#ifndef FLATBUFFERS_GENERATED_SPARSETENSOR_ORG_APACHE_ARROW_FLATBUF_H_
5#define FLATBUFFERS_GENERATED_SPARSETENSOR_ORG_APACHE_ARROW_FLATBUF_H_
6
7#include "flatbuffers/flatbuffers.h"
8
9#include "Schema_generated.h"
10#include "Tensor_generated.h"
11
12namespace org {
13namespace apache {
14namespace arrow {
15namespace flatbuf {
16
17struct SparseTensorIndexCOO;
18struct SparseTensorIndexCOOBuilder;
19
20struct SparseMatrixIndexCSX;
21struct SparseMatrixIndexCSXBuilder;
22
23struct SparseTensorIndexCSF;
24struct SparseTensorIndexCSFBuilder;
25
26struct SparseTensor;
27struct SparseTensorBuilder;
28
29enum class SparseMatrixCompressedAxis : int16_t {
30 Row = 0,
31 Column = 1,
32 MIN = Row,
33 MAX = Column
34};
35
36inline const SparseMatrixCompressedAxis (&EnumValuesSparseMatrixCompressedAxis())[2] {
37 static const SparseMatrixCompressedAxis values[] = {
38 SparseMatrixCompressedAxis::Row,
39 SparseMatrixCompressedAxis::Column
40 };
41 return values;
42}
43
44inline const char * const *EnumNamesSparseMatrixCompressedAxis() {
45 static const char * const names[3] = {
46 "Row",
47 "Column",
48 nullptr
49 };
50 return names;
51}
52
53inline const char *EnumNameSparseMatrixCompressedAxis(SparseMatrixCompressedAxis e) {
54 if (flatbuffers::IsOutRange(e, SparseMatrixCompressedAxis::Row, SparseMatrixCompressedAxis::Column)) return "";
55 const size_t index = static_cast<size_t>(e);
56 return EnumNamesSparseMatrixCompressedAxis()[index];
57}
58
59enum class SparseTensorIndex : uint8_t {
60 NONE = 0,
61 SparseTensorIndexCOO = 1,
62 SparseMatrixIndexCSX = 2,
63 SparseTensorIndexCSF = 3,
64 MIN = NONE,
65 MAX = SparseTensorIndexCSF
66};
67
68inline const SparseTensorIndex (&EnumValuesSparseTensorIndex())[4] {
69 static const SparseTensorIndex values[] = {
70 SparseTensorIndex::NONE,
71 SparseTensorIndex::SparseTensorIndexCOO,
72 SparseTensorIndex::SparseMatrixIndexCSX,
73 SparseTensorIndex::SparseTensorIndexCSF
74 };
75 return values;
76}
77
78inline const char * const *EnumNamesSparseTensorIndex() {
79 static const char * const names[5] = {
80 "NONE",
81 "SparseTensorIndexCOO",
82 "SparseMatrixIndexCSX",
83 "SparseTensorIndexCSF",
84 nullptr
85 };
86 return names;
87}
88
89inline const char *EnumNameSparseTensorIndex(SparseTensorIndex e) {
90 if (flatbuffers::IsOutRange(e, SparseTensorIndex::NONE, SparseTensorIndex::SparseTensorIndexCSF)) return "";
91 const size_t index = static_cast<size_t>(e);
92 return EnumNamesSparseTensorIndex()[index];
93}
94
95template<typename T> struct SparseTensorIndexTraits {
96 static const SparseTensorIndex enum_value = SparseTensorIndex::NONE;
97};
98
99template<> struct SparseTensorIndexTraits<org::apache::arrow::flatbuf::SparseTensorIndexCOO> {
100 static const SparseTensorIndex enum_value = SparseTensorIndex::SparseTensorIndexCOO;
101};
102
103template<> struct SparseTensorIndexTraits<org::apache::arrow::flatbuf::SparseMatrixIndexCSX> {
104 static const SparseTensorIndex enum_value = SparseTensorIndex::SparseMatrixIndexCSX;
105};
106
107template<> struct SparseTensorIndexTraits<org::apache::arrow::flatbuf::SparseTensorIndexCSF> {
108 static const SparseTensorIndex enum_value = SparseTensorIndex::SparseTensorIndexCSF;
109};
110
111bool VerifySparseTensorIndex(flatbuffers::Verifier &verifier, const void *obj, SparseTensorIndex type);
112bool VerifySparseTensorIndexVector(flatbuffers::Verifier &verifier, const flatbuffers::Vector<flatbuffers::Offset<void>> *values, const flatbuffers::Vector<uint8_t> *types);
113
114/// ----------------------------------------------------------------------
115/// EXPERIMENTAL: Data structures for sparse tensors
116/// Coordinate (COO) format of sparse tensor index.
117///
118/// COO's index list are represented as a NxM matrix,
119/// where N is the number of non-zero values,
120/// and M is the number of dimensions of a sparse tensor.
121///
122/// indicesBuffer stores the location and size of the data of this indices
123/// matrix. The value type and the stride of the indices matrix is
124/// specified in indicesType and indicesStrides fields.
125///
126/// For example, let X be a 2x3x4x5 tensor, and it has the following
127/// 6 non-zero values:
128/// ```text
129/// X[0, 1, 2, 0] := 1
130/// X[1, 1, 2, 3] := 2
131/// X[0, 2, 1, 0] := 3
132/// X[0, 1, 3, 0] := 4
133/// X[0, 1, 2, 1] := 5
134/// X[1, 2, 0, 4] := 6
135/// ```
136/// In COO format, the index matrix of X is the following 4x6 matrix:
137/// ```text
138/// [[0, 0, 0, 0, 1, 1],
139/// [1, 1, 1, 2, 1, 2],
140/// [2, 2, 3, 1, 2, 0],
141/// [0, 1, 0, 0, 3, 4]]
142/// ```
143/// When isCanonical is true, the indices is sorted in lexicographical order
144/// (row-major order), and it does not have duplicated entries. Otherwise,
145/// the indices may not be sorted, or may have duplicated entries.
146struct SparseTensorIndexCOO FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
147 typedef SparseTensorIndexCOOBuilder Builder;
148 enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
149 VT_INDICESTYPE = 4,
150 VT_INDICESSTRIDES = 6,
151 VT_INDICESBUFFER = 8,
152 VT_ISCANONICAL = 10
153 };
154 /// The type of values in indicesBuffer
155 const org::apache::arrow::flatbuf::Int *indicesType() const {
156 return GetPointer<const org::apache::arrow::flatbuf::Int *>(VT_INDICESTYPE);
157 }
158 /// Non-negative byte offsets to advance one value cell along each dimension
159 /// If omitted, default to row-major order (C-like).
160 const flatbuffers::Vector<int64_t> *indicesStrides() const {
161 return GetPointer<const flatbuffers::Vector<int64_t> *>(VT_INDICESSTRIDES);
162 }
163 /// The location and size of the indices matrix's data
164 const org::apache::arrow::flatbuf::Buffer *indicesBuffer() const {
165 return GetStruct<const org::apache::arrow::flatbuf::Buffer *>(VT_INDICESBUFFER);
166 }
167 /// This flag is true if and only if the indices matrix is sorted in
168 /// row-major order, and does not have duplicated entries.
169 /// This sort order is the same as of Tensorflow's SparseTensor,
170 /// but it is inverse order of SciPy's canonical coo_matrix
171 /// (SciPy employs column-major order for its coo_matrix).
172 bool isCanonical() const {
173 return GetField<uint8_t>(VT_ISCANONICAL, 0) != 0;
174 }
175 bool Verify(flatbuffers::Verifier &verifier) const {
176 return VerifyTableStart(verifier) &&
177 VerifyOffsetRequired(verifier, VT_INDICESTYPE) &&
178 verifier.VerifyTable(indicesType()) &&
179 VerifyOffset(verifier, VT_INDICESSTRIDES) &&
180 verifier.VerifyVector(indicesStrides()) &&
181 VerifyFieldRequired<org::apache::arrow::flatbuf::Buffer>(verifier, VT_INDICESBUFFER) &&
182 VerifyField<uint8_t>(verifier, VT_ISCANONICAL) &&
183 verifier.EndTable();
184 }
185};
186
187struct SparseTensorIndexCOOBuilder {
188 typedef SparseTensorIndexCOO Table;
189 flatbuffers::FlatBufferBuilder &fbb_;
190 flatbuffers::uoffset_t start_;
191 void add_indicesType(flatbuffers::Offset<org::apache::arrow::flatbuf::Int> indicesType) {
192 fbb_.AddOffset(SparseTensorIndexCOO::VT_INDICESTYPE, indicesType);
193 }
194 void add_indicesStrides(flatbuffers::Offset<flatbuffers::Vector<int64_t>> indicesStrides) {
195 fbb_.AddOffset(SparseTensorIndexCOO::VT_INDICESSTRIDES, indicesStrides);
196 }
197 void add_indicesBuffer(const org::apache::arrow::flatbuf::Buffer *indicesBuffer) {
198 fbb_.AddStruct(SparseTensorIndexCOO::VT_INDICESBUFFER, indicesBuffer);
199 }
200 void add_isCanonical(bool isCanonical) {
201 fbb_.AddElement<uint8_t>(SparseTensorIndexCOO::VT_ISCANONICAL, static_cast<uint8_t>(isCanonical), 0);
202 }
203 explicit SparseTensorIndexCOOBuilder(flatbuffers::FlatBufferBuilder &_fbb)
204 : fbb_(_fbb) {
205 start_ = fbb_.StartTable();
206 }
207 SparseTensorIndexCOOBuilder &operator=(const SparseTensorIndexCOOBuilder &);
208 flatbuffers::Offset<SparseTensorIndexCOO> Finish() {
209 const auto end = fbb_.EndTable(start_);
210 auto o = flatbuffers::Offset<SparseTensorIndexCOO>(end);
211 fbb_.Required(o, SparseTensorIndexCOO::VT_INDICESTYPE);
212 fbb_.Required(o, SparseTensorIndexCOO::VT_INDICESBUFFER);
213 return o;
214 }
215};
216
217inline flatbuffers::Offset<SparseTensorIndexCOO> CreateSparseTensorIndexCOO(
218 flatbuffers::FlatBufferBuilder &_fbb,
219 flatbuffers::Offset<org::apache::arrow::flatbuf::Int> indicesType = 0,
220 flatbuffers::Offset<flatbuffers::Vector<int64_t>> indicesStrides = 0,
221 const org::apache::arrow::flatbuf::Buffer *indicesBuffer = 0,
222 bool isCanonical = false) {
223 SparseTensorIndexCOOBuilder builder_(_fbb);
224 builder_.add_indicesBuffer(indicesBuffer);
225 builder_.add_indicesStrides(indicesStrides);
226 builder_.add_indicesType(indicesType);
227 builder_.add_isCanonical(isCanonical);
228 return builder_.Finish();
229}
230
231inline flatbuffers::Offset<SparseTensorIndexCOO> CreateSparseTensorIndexCOODirect(
232 flatbuffers::FlatBufferBuilder &_fbb,
233 flatbuffers::Offset<org::apache::arrow::flatbuf::Int> indicesType = 0,
234 const std::vector<int64_t> *indicesStrides = nullptr,
235 const org::apache::arrow::flatbuf::Buffer *indicesBuffer = 0,
236 bool isCanonical = false) {
237 auto indicesStrides__ = indicesStrides ? _fbb.CreateVector<int64_t>(*indicesStrides) : 0;
238 return org::apache::arrow::flatbuf::CreateSparseTensorIndexCOO(
239 _fbb,
240 indicesType,
241 indicesStrides__,
242 indicesBuffer,
243 isCanonical);
244}
245
246/// Compressed Sparse format, that is matrix-specific.
247struct SparseMatrixIndexCSX FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
248 typedef SparseMatrixIndexCSXBuilder Builder;
249 enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
250 VT_COMPRESSEDAXIS = 4,
251 VT_INDPTRTYPE = 6,
252 VT_INDPTRBUFFER = 8,
253 VT_INDICESTYPE = 10,
254 VT_INDICESBUFFER = 12
255 };
256 /// Which axis, row or column, is compressed
257 org::apache::arrow::flatbuf::SparseMatrixCompressedAxis compressedAxis() const {
258 return static_cast<org::apache::arrow::flatbuf::SparseMatrixCompressedAxis>(GetField<int16_t>(VT_COMPRESSEDAXIS, 0));
259 }
260 /// The type of values in indptrBuffer
261 const org::apache::arrow::flatbuf::Int *indptrType() const {
262 return GetPointer<const org::apache::arrow::flatbuf::Int *>(VT_INDPTRTYPE);
263 }
264 /// indptrBuffer stores the location and size of indptr array that
265 /// represents the range of the rows.
266 /// The i-th row spans from `indptr[i]` to `indptr[i+1]` in the data.
267 /// The length of this array is 1 + (the number of rows), and the type
268 /// of index value is long.
269 ///
270 /// For example, let X be the following 6x4 matrix:
271 /// ```text
272 /// X := [[0, 1, 2, 0],
273 /// [0, 0, 3, 0],
274 /// [0, 4, 0, 5],
275 /// [0, 0, 0, 0],
276 /// [6, 0, 7, 8],
277 /// [0, 9, 0, 0]].
278 /// ```
279 /// The array of non-zero values in X is:
280 /// ```text
281 /// values(X) = [1, 2, 3, 4, 5, 6, 7, 8, 9].
282 /// ```
283 /// And the indptr of X is:
284 /// ```text
285 /// indptr(X) = [0, 2, 3, 5, 5, 8, 10].
286 /// ```
287 const org::apache::arrow::flatbuf::Buffer *indptrBuffer() const {
288 return GetStruct<const org::apache::arrow::flatbuf::Buffer *>(VT_INDPTRBUFFER);
289 }
290 /// The type of values in indicesBuffer
291 const org::apache::arrow::flatbuf::Int *indicesType() const {
292 return GetPointer<const org::apache::arrow::flatbuf::Int *>(VT_INDICESTYPE);
293 }
294 /// indicesBuffer stores the location and size of the array that
295 /// contains the column indices of the corresponding non-zero values.
296 /// The type of index value is long.
297 ///
298 /// For example, the indices of the above X is:
299 /// ```text
300 /// indices(X) = [1, 2, 2, 1, 3, 0, 2, 3, 1].
301 /// ```
302 /// Note that the indices are sorted in lexicographical order for each row.
303 const org::apache::arrow::flatbuf::Buffer *indicesBuffer() const {
304 return GetStruct<const org::apache::arrow::flatbuf::Buffer *>(VT_INDICESBUFFER);
305 }
306 bool Verify(flatbuffers::Verifier &verifier) const {
307 return VerifyTableStart(verifier) &&
308 VerifyField<int16_t>(verifier, VT_COMPRESSEDAXIS) &&
309 VerifyOffsetRequired(verifier, VT_INDPTRTYPE) &&
310 verifier.VerifyTable(indptrType()) &&
311 VerifyFieldRequired<org::apache::arrow::flatbuf::Buffer>(verifier, VT_INDPTRBUFFER) &&
312 VerifyOffsetRequired(verifier, VT_INDICESTYPE) &&
313 verifier.VerifyTable(indicesType()) &&
314 VerifyFieldRequired<org::apache::arrow::flatbuf::Buffer>(verifier, VT_INDICESBUFFER) &&
315 verifier.EndTable();
316 }
317};
318
319struct SparseMatrixIndexCSXBuilder {
320 typedef SparseMatrixIndexCSX Table;
321 flatbuffers::FlatBufferBuilder &fbb_;
322 flatbuffers::uoffset_t start_;
323 void add_compressedAxis(org::apache::arrow::flatbuf::SparseMatrixCompressedAxis compressedAxis) {
324 fbb_.AddElement<int16_t>(SparseMatrixIndexCSX::VT_COMPRESSEDAXIS, static_cast<int16_t>(compressedAxis), 0);
325 }
326 void add_indptrType(flatbuffers::Offset<org::apache::arrow::flatbuf::Int> indptrType) {
327 fbb_.AddOffset(SparseMatrixIndexCSX::VT_INDPTRTYPE, indptrType);
328 }
329 void add_indptrBuffer(const org::apache::arrow::flatbuf::Buffer *indptrBuffer) {
330 fbb_.AddStruct(SparseMatrixIndexCSX::VT_INDPTRBUFFER, indptrBuffer);
331 }
332 void add_indicesType(flatbuffers::Offset<org::apache::arrow::flatbuf::Int> indicesType) {
333 fbb_.AddOffset(SparseMatrixIndexCSX::VT_INDICESTYPE, indicesType);
334 }
335 void add_indicesBuffer(const org::apache::arrow::flatbuf::Buffer *indicesBuffer) {
336 fbb_.AddStruct(SparseMatrixIndexCSX::VT_INDICESBUFFER, indicesBuffer);
337 }
338 explicit SparseMatrixIndexCSXBuilder(flatbuffers::FlatBufferBuilder &_fbb)
339 : fbb_(_fbb) {
340 start_ = fbb_.StartTable();
341 }
342 SparseMatrixIndexCSXBuilder &operator=(const SparseMatrixIndexCSXBuilder &);
343 flatbuffers::Offset<SparseMatrixIndexCSX> Finish() {
344 const auto end = fbb_.EndTable(start_);
345 auto o = flatbuffers::Offset<SparseMatrixIndexCSX>(end);
346 fbb_.Required(o, SparseMatrixIndexCSX::VT_INDPTRTYPE);
347 fbb_.Required(o, SparseMatrixIndexCSX::VT_INDPTRBUFFER);
348 fbb_.Required(o, SparseMatrixIndexCSX::VT_INDICESTYPE);
349 fbb_.Required(o, SparseMatrixIndexCSX::VT_INDICESBUFFER);
350 return o;
351 }
352};
353
354inline flatbuffers::Offset<SparseMatrixIndexCSX> CreateSparseMatrixIndexCSX(
355 flatbuffers::FlatBufferBuilder &_fbb,
356 org::apache::arrow::flatbuf::SparseMatrixCompressedAxis compressedAxis = org::apache::arrow::flatbuf::SparseMatrixCompressedAxis::Row,
357 flatbuffers::Offset<org::apache::arrow::flatbuf::Int> indptrType = 0,
358 const org::apache::arrow::flatbuf::Buffer *indptrBuffer = 0,
359 flatbuffers::Offset<org::apache::arrow::flatbuf::Int> indicesType = 0,
360 const org::apache::arrow::flatbuf::Buffer *indicesBuffer = 0) {
361 SparseMatrixIndexCSXBuilder builder_(_fbb);
362 builder_.add_indicesBuffer(indicesBuffer);
363 builder_.add_indicesType(indicesType);
364 builder_.add_indptrBuffer(indptrBuffer);
365 builder_.add_indptrType(indptrType);
366 builder_.add_compressedAxis(compressedAxis);
367 return builder_.Finish();
368}
369
370/// Compressed Sparse Fiber (CSF) sparse tensor index.
371struct SparseTensorIndexCSF FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
372 typedef SparseTensorIndexCSFBuilder Builder;
373 enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
374 VT_INDPTRTYPE = 4,
375 VT_INDPTRBUFFERS = 6,
376 VT_INDICESTYPE = 8,
377 VT_INDICESBUFFERS = 10,
378 VT_AXISORDER = 12
379 };
380 /// CSF is a generalization of compressed sparse row (CSR) index.
381 /// See [smith2017knl](http://shaden.io/pub-files/smith2017knl.pdf)
382 ///
383 /// CSF index recursively compresses each dimension of a tensor into a set
384 /// of prefix trees. Each path from a root to leaf forms one tensor
385 /// non-zero index. CSF is implemented with two arrays of buffers and one
386 /// arrays of integers.
387 ///
388 /// For example, let X be a 2x3x4x5 tensor and let it have the following
389 /// 8 non-zero values:
390 /// ```text
391 /// X[0, 0, 0, 1] := 1
392 /// X[0, 0, 0, 2] := 2
393 /// X[0, 1, 0, 0] := 3
394 /// X[0, 1, 0, 2] := 4
395 /// X[0, 1, 1, 0] := 5
396 /// X[1, 1, 1, 0] := 6
397 /// X[1, 1, 1, 1] := 7
398 /// X[1, 1, 1, 2] := 8
399 /// ```
400 /// As a prefix tree this would be represented as:
401 /// ```text
402 /// 0 1
403 /// / \ |
404 /// 0 1 1
405 /// / / \ |
406 /// 0 0 1 1
407 /// /| /| | /| |
408 /// 1 2 0 2 0 0 1 2
409 /// ```
410 /// The type of values in indptrBuffers
411 const org::apache::arrow::flatbuf::Int *indptrType() const {
412 return GetPointer<const org::apache::arrow::flatbuf::Int *>(VT_INDPTRTYPE);
413 }
414 /// indptrBuffers stores the sparsity structure.
415 /// Each two consecutive dimensions in a tensor correspond to a buffer in
416 /// indptrBuffers. A pair of consecutive values at `indptrBuffers[dim][i]`
417 /// and `indptrBuffers[dim][i + 1]` signify a range of nodes in
418 /// `indicesBuffers[dim + 1]` who are children of `indicesBuffers[dim][i]` node.
419 ///
420 /// For example, the indptrBuffers for the above X is:
421 /// ```text
422 /// indptrBuffer(X) = [
423 /// [0, 2, 3],
424 /// [0, 1, 3, 4],
425 /// [0, 2, 4, 5, 8]
426 /// ].
427 /// ```
428 const flatbuffers::Vector<const org::apache::arrow::flatbuf::Buffer *> *indptrBuffers() const {
429 return GetPointer<const flatbuffers::Vector<const org::apache::arrow::flatbuf::Buffer *> *>(VT_INDPTRBUFFERS);
430 }
431 /// The type of values in indicesBuffers
432 const org::apache::arrow::flatbuf::Int *indicesType() const {
433 return GetPointer<const org::apache::arrow::flatbuf::Int *>(VT_INDICESTYPE);
434 }
435 /// indicesBuffers stores values of nodes.
436 /// Each tensor dimension corresponds to a buffer in indicesBuffers.
437 /// For example, the indicesBuffers for the above X is:
438 /// ```text
439 /// indicesBuffer(X) = [
440 /// [0, 1],
441 /// [0, 1, 1],
442 /// [0, 0, 1, 1],
443 /// [1, 2, 0, 2, 0, 0, 1, 2]
444 /// ].
445 /// ```
446 const flatbuffers::Vector<const org::apache::arrow::flatbuf::Buffer *> *indicesBuffers() const {
447 return GetPointer<const flatbuffers::Vector<const org::apache::arrow::flatbuf::Buffer *> *>(VT_INDICESBUFFERS);
448 }
449 /// axisOrder stores the sequence in which dimensions were traversed to
450 /// produce the prefix tree.
451 /// For example, the axisOrder for the above X is:
452 /// ```text
453 /// axisOrder(X) = [0, 1, 2, 3].
454 /// ```
455 const flatbuffers::Vector<int32_t> *axisOrder() const {
456 return GetPointer<const flatbuffers::Vector<int32_t> *>(VT_AXISORDER);
457 }
458 bool Verify(flatbuffers::Verifier &verifier) const {
459 return VerifyTableStart(verifier) &&
460 VerifyOffsetRequired(verifier, VT_INDPTRTYPE) &&
461 verifier.VerifyTable(indptrType()) &&
462 VerifyOffsetRequired(verifier, VT_INDPTRBUFFERS) &&
463 verifier.VerifyVector(indptrBuffers()) &&
464 VerifyOffsetRequired(verifier, VT_INDICESTYPE) &&
465 verifier.VerifyTable(indicesType()) &&
466 VerifyOffsetRequired(verifier, VT_INDICESBUFFERS) &&
467 verifier.VerifyVector(indicesBuffers()) &&
468 VerifyOffsetRequired(verifier, VT_AXISORDER) &&
469 verifier.VerifyVector(axisOrder()) &&
470 verifier.EndTable();
471 }
472};
473
474struct SparseTensorIndexCSFBuilder {
475 typedef SparseTensorIndexCSF Table;
476 flatbuffers::FlatBufferBuilder &fbb_;
477 flatbuffers::uoffset_t start_;
478 void add_indptrType(flatbuffers::Offset<org::apache::arrow::flatbuf::Int> indptrType) {
479 fbb_.AddOffset(SparseTensorIndexCSF::VT_INDPTRTYPE, indptrType);
480 }
481 void add_indptrBuffers(flatbuffers::Offset<flatbuffers::Vector<const org::apache::arrow::flatbuf::Buffer *>> indptrBuffers) {
482 fbb_.AddOffset(SparseTensorIndexCSF::VT_INDPTRBUFFERS, indptrBuffers);
483 }
484 void add_indicesType(flatbuffers::Offset<org::apache::arrow::flatbuf::Int> indicesType) {
485 fbb_.AddOffset(SparseTensorIndexCSF::VT_INDICESTYPE, indicesType);
486 }
487 void add_indicesBuffers(flatbuffers::Offset<flatbuffers::Vector<const org::apache::arrow::flatbuf::Buffer *>> indicesBuffers) {
488 fbb_.AddOffset(SparseTensorIndexCSF::VT_INDICESBUFFERS, indicesBuffers);
489 }
490 void add_axisOrder(flatbuffers::Offset<flatbuffers::Vector<int32_t>> axisOrder) {
491 fbb_.AddOffset(SparseTensorIndexCSF::VT_AXISORDER, axisOrder);
492 }
493 explicit SparseTensorIndexCSFBuilder(flatbuffers::FlatBufferBuilder &_fbb)
494 : fbb_(_fbb) {
495 start_ = fbb_.StartTable();
496 }
497 SparseTensorIndexCSFBuilder &operator=(const SparseTensorIndexCSFBuilder &);
498 flatbuffers::Offset<SparseTensorIndexCSF> Finish() {
499 const auto end = fbb_.EndTable(start_);
500 auto o = flatbuffers::Offset<SparseTensorIndexCSF>(end);
501 fbb_.Required(o, SparseTensorIndexCSF::VT_INDPTRTYPE);
502 fbb_.Required(o, SparseTensorIndexCSF::VT_INDPTRBUFFERS);
503 fbb_.Required(o, SparseTensorIndexCSF::VT_INDICESTYPE);
504 fbb_.Required(o, SparseTensorIndexCSF::VT_INDICESBUFFERS);
505 fbb_.Required(o, SparseTensorIndexCSF::VT_AXISORDER);
506 return o;
507 }
508};
509
510inline flatbuffers::Offset<SparseTensorIndexCSF> CreateSparseTensorIndexCSF(
511 flatbuffers::FlatBufferBuilder &_fbb,
512 flatbuffers::Offset<org::apache::arrow::flatbuf::Int> indptrType = 0,
513 flatbuffers::Offset<flatbuffers::Vector<const org::apache::arrow::flatbuf::Buffer *>> indptrBuffers = 0,
514 flatbuffers::Offset<org::apache::arrow::flatbuf::Int> indicesType = 0,
515 flatbuffers::Offset<flatbuffers::Vector<const org::apache::arrow::flatbuf::Buffer *>> indicesBuffers = 0,
516 flatbuffers::Offset<flatbuffers::Vector<int32_t>> axisOrder = 0) {
517 SparseTensorIndexCSFBuilder builder_(_fbb);
518 builder_.add_axisOrder(axisOrder);
519 builder_.add_indicesBuffers(indicesBuffers);
520 builder_.add_indicesType(indicesType);
521 builder_.add_indptrBuffers(indptrBuffers);
522 builder_.add_indptrType(indptrType);
523 return builder_.Finish();
524}
525
526inline flatbuffers::Offset<SparseTensorIndexCSF> CreateSparseTensorIndexCSFDirect(
527 flatbuffers::FlatBufferBuilder &_fbb,
528 flatbuffers::Offset<org::apache::arrow::flatbuf::Int> indptrType = 0,
529 const std::vector<org::apache::arrow::flatbuf::Buffer> *indptrBuffers = nullptr,
530 flatbuffers::Offset<org::apache::arrow::flatbuf::Int> indicesType = 0,
531 const std::vector<org::apache::arrow::flatbuf::Buffer> *indicesBuffers = nullptr,
532 const std::vector<int32_t> *axisOrder = nullptr) {
533 auto indptrBuffers__ = indptrBuffers ? _fbb.CreateVectorOfStructs<org::apache::arrow::flatbuf::Buffer>(*indptrBuffers) : 0;
534 auto indicesBuffers__ = indicesBuffers ? _fbb.CreateVectorOfStructs<org::apache::arrow::flatbuf::Buffer>(*indicesBuffers) : 0;
535 auto axisOrder__ = axisOrder ? _fbb.CreateVector<int32_t>(*axisOrder) : 0;
536 return org::apache::arrow::flatbuf::CreateSparseTensorIndexCSF(
537 _fbb,
538 indptrType,
539 indptrBuffers__,
540 indicesType,
541 indicesBuffers__,
542 axisOrder__);
543}
544
545struct SparseTensor FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table {
546 typedef SparseTensorBuilder Builder;
547 enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE {
548 VT_TYPE_TYPE = 4,
549 VT_TYPE = 6,
550 VT_SHAPE = 8,
551 VT_NON_ZERO_LENGTH = 10,
552 VT_SPARSEINDEX_TYPE = 12,
553 VT_SPARSEINDEX = 14,
554 VT_DATA = 16
555 };
556 org::apache::arrow::flatbuf::Type type_type() const {
557 return static_cast<org::apache::arrow::flatbuf::Type>(GetField<uint8_t>(VT_TYPE_TYPE, 0));
558 }
559 /// The type of data contained in a value cell.
560 /// Currently only fixed-width value types are supported,
561 /// no strings or nested types.
562 const void *type() const {
563 return GetPointer<const void *>(VT_TYPE);
564 }
565 template<typename T> const T *type_as() const;
566 const org::apache::arrow::flatbuf::Null *type_as_Null() const {
567 return type_type() == org::apache::arrow::flatbuf::Type::Null ? static_cast<const org::apache::arrow::flatbuf::Null *>(type()) : nullptr;
568 }
569 const org::apache::arrow::flatbuf::Int *type_as_Int() const {
570 return type_type() == org::apache::arrow::flatbuf::Type::Int ? static_cast<const org::apache::arrow::flatbuf::Int *>(type()) : nullptr;
571 }
572 const org::apache::arrow::flatbuf::FloatingPoint *type_as_FloatingPoint() const {
573 return type_type() == org::apache::arrow::flatbuf::Type::FloatingPoint ? static_cast<const org::apache::arrow::flatbuf::FloatingPoint *>(type()) : nullptr;
574 }
575 const org::apache::arrow::flatbuf::Binary *type_as_Binary() const {
576 return type_type() == org::apache::arrow::flatbuf::Type::Binary ? static_cast<const org::apache::arrow::flatbuf::Binary *>(type()) : nullptr;
577 }
578 const org::apache::arrow::flatbuf::Utf8 *type_as_Utf8() const {
579 return type_type() == org::apache::arrow::flatbuf::Type::Utf8 ? static_cast<const org::apache::arrow::flatbuf::Utf8 *>(type()) : nullptr;
580 }
581 const org::apache::arrow::flatbuf::Bool *type_as_Bool() const {
582 return type_type() == org::apache::arrow::flatbuf::Type::Bool ? static_cast<const org::apache::arrow::flatbuf::Bool *>(type()) : nullptr;
583 }
584 const org::apache::arrow::flatbuf::Decimal *type_as_Decimal() const {
585 return type_type() == org::apache::arrow::flatbuf::Type::Decimal ? static_cast<const org::apache::arrow::flatbuf::Decimal *>(type()) : nullptr;
586 }
587 const org::apache::arrow::flatbuf::Date *type_as_Date() const {
588 return type_type() == org::apache::arrow::flatbuf::Type::Date ? static_cast<const org::apache::arrow::flatbuf::Date *>(type()) : nullptr;
589 }
590 const org::apache::arrow::flatbuf::Time *type_as_Time() const {
591 return type_type() == org::apache::arrow::flatbuf::Type::Time ? static_cast<const org::apache::arrow::flatbuf::Time *>(type()) : nullptr;
592 }
593 const org::apache::arrow::flatbuf::Timestamp *type_as_Timestamp() const {
594 return type_type() == org::apache::arrow::flatbuf::Type::Timestamp ? static_cast<const org::apache::arrow::flatbuf::Timestamp *>(type()) : nullptr;
595 }
596 const org::apache::arrow::flatbuf::Interval *type_as_Interval() const {
597 return type_type() == org::apache::arrow::flatbuf::Type::Interval ? static_cast<const org::apache::arrow::flatbuf::Interval *>(type()) : nullptr;
598 }
599 const org::apache::arrow::flatbuf::List *type_as_List() const {
600 return type_type() == org::apache::arrow::flatbuf::Type::List ? static_cast<const org::apache::arrow::flatbuf::List *>(type()) : nullptr;
601 }
602 const org::apache::arrow::flatbuf::Struct_ *type_as_Struct_() const {
603 return type_type() == org::apache::arrow::flatbuf::Type::Struct_ ? static_cast<const org::apache::arrow::flatbuf::Struct_ *>(type()) : nullptr;
604 }
605 const org::apache::arrow::flatbuf::Union *type_as_Union() const {
606 return type_type() == org::apache::arrow::flatbuf::Type::Union ? static_cast<const org::apache::arrow::flatbuf::Union *>(type()) : nullptr;
607 }
608 const org::apache::arrow::flatbuf::FixedSizeBinary *type_as_FixedSizeBinary() const {
609 return type_type() == org::apache::arrow::flatbuf::Type::FixedSizeBinary ? static_cast<const org::apache::arrow::flatbuf::FixedSizeBinary *>(type()) : nullptr;
610 }
611 const org::apache::arrow::flatbuf::FixedSizeList *type_as_FixedSizeList() const {
612 return type_type() == org::apache::arrow::flatbuf::Type::FixedSizeList ? static_cast<const org::apache::arrow::flatbuf::FixedSizeList *>(type()) : nullptr;
613 }
614 const org::apache::arrow::flatbuf::Map *type_as_Map() const {
615 return type_type() == org::apache::arrow::flatbuf::Type::Map ? static_cast<const org::apache::arrow::flatbuf::Map *>(type()) : nullptr;
616 }
617 const org::apache::arrow::flatbuf::Duration *type_as_Duration() const {
618 return type_type() == org::apache::arrow::flatbuf::Type::Duration ? static_cast<const org::apache::arrow::flatbuf::Duration *>(type()) : nullptr;
619 }
620 const org::apache::arrow::flatbuf::LargeBinary *type_as_LargeBinary() const {
621 return type_type() == org::apache::arrow::flatbuf::Type::LargeBinary ? static_cast<const org::apache::arrow::flatbuf::LargeBinary *>(type()) : nullptr;
622 }
623 const org::apache::arrow::flatbuf::LargeUtf8 *type_as_LargeUtf8() const {
624 return type_type() == org::apache::arrow::flatbuf::Type::LargeUtf8 ? static_cast<const org::apache::arrow::flatbuf::LargeUtf8 *>(type()) : nullptr;
625 }
626 const org::apache::arrow::flatbuf::LargeList *type_as_LargeList() const {
627 return type_type() == org::apache::arrow::flatbuf::Type::LargeList ? static_cast<const org::apache::arrow::flatbuf::LargeList *>(type()) : nullptr;
628 }
629 /// The dimensions of the tensor, optionally named.
630 const flatbuffers::Vector<flatbuffers::Offset<org::apache::arrow::flatbuf::TensorDim>> *shape() const {
631 return GetPointer<const flatbuffers::Vector<flatbuffers::Offset<org::apache::arrow::flatbuf::TensorDim>> *>(VT_SHAPE);
632 }
633 /// The number of non-zero values in a sparse tensor.
634 int64_t non_zero_length() const {
635 return GetField<int64_t>(VT_NON_ZERO_LENGTH, 0);
636 }
637 org::apache::arrow::flatbuf::SparseTensorIndex sparseIndex_type() const {
638 return static_cast<org::apache::arrow::flatbuf::SparseTensorIndex>(GetField<uint8_t>(VT_SPARSEINDEX_TYPE, 0));
639 }
640 /// Sparse tensor index
641 const void *sparseIndex() const {
642 return GetPointer<const void *>(VT_SPARSEINDEX);
643 }
644 template<typename T> const T *sparseIndex_as() const;
645 const org::apache::arrow::flatbuf::SparseTensorIndexCOO *sparseIndex_as_SparseTensorIndexCOO() const {
646 return sparseIndex_type() == org::apache::arrow::flatbuf::SparseTensorIndex::SparseTensorIndexCOO ? static_cast<const org::apache::arrow::flatbuf::SparseTensorIndexCOO *>(sparseIndex()) : nullptr;
647 }
648 const org::apache::arrow::flatbuf::SparseMatrixIndexCSX *sparseIndex_as_SparseMatrixIndexCSX() const {
649 return sparseIndex_type() == org::apache::arrow::flatbuf::SparseTensorIndex::SparseMatrixIndexCSX ? static_cast<const org::apache::arrow::flatbuf::SparseMatrixIndexCSX *>(sparseIndex()) : nullptr;
650 }
651 const org::apache::arrow::flatbuf::SparseTensorIndexCSF *sparseIndex_as_SparseTensorIndexCSF() const {
652 return sparseIndex_type() == org::apache::arrow::flatbuf::SparseTensorIndex::SparseTensorIndexCSF ? static_cast<const org::apache::arrow::flatbuf::SparseTensorIndexCSF *>(sparseIndex()) : nullptr;
653 }
654 /// The location and size of the tensor's data
655 const org::apache::arrow::flatbuf::Buffer *data() const {
656 return GetStruct<const org::apache::arrow::flatbuf::Buffer *>(VT_DATA);
657 }
658 bool Verify(flatbuffers::Verifier &verifier) const {
659 return VerifyTableStart(verifier) &&
660 VerifyField<uint8_t>(verifier, VT_TYPE_TYPE) &&
661 VerifyOffsetRequired(verifier, VT_TYPE) &&
662 VerifyType(verifier, type(), type_type()) &&
663 VerifyOffsetRequired(verifier, VT_SHAPE) &&
664 verifier.VerifyVector(shape()) &&
665 verifier.VerifyVectorOfTables(shape()) &&
666 VerifyField<int64_t>(verifier, VT_NON_ZERO_LENGTH) &&
667 VerifyField<uint8_t>(verifier, VT_SPARSEINDEX_TYPE) &&
668 VerifyOffsetRequired(verifier, VT_SPARSEINDEX) &&
669 VerifySparseTensorIndex(verifier, sparseIndex(), sparseIndex_type()) &&
670 VerifyFieldRequired<org::apache::arrow::flatbuf::Buffer>(verifier, VT_DATA) &&
671 verifier.EndTable();
672 }
673};
674
675template<> inline const org::apache::arrow::flatbuf::Null *SparseTensor::type_as<org::apache::arrow::flatbuf::Null>() const {
676 return type_as_Null();
677}
678
679template<> inline const org::apache::arrow::flatbuf::Int *SparseTensor::type_as<org::apache::arrow::flatbuf::Int>() const {
680 return type_as_Int();
681}
682
683template<> inline const org::apache::arrow::flatbuf::FloatingPoint *SparseTensor::type_as<org::apache::arrow::flatbuf::FloatingPoint>() const {
684 return type_as_FloatingPoint();
685}
686
687template<> inline const org::apache::arrow::flatbuf::Binary *SparseTensor::type_as<org::apache::arrow::flatbuf::Binary>() const {
688 return type_as_Binary();
689}
690
691template<> inline const org::apache::arrow::flatbuf::Utf8 *SparseTensor::type_as<org::apache::arrow::flatbuf::Utf8>() const {
692 return type_as_Utf8();
693}
694
695template<> inline const org::apache::arrow::flatbuf::Bool *SparseTensor::type_as<org::apache::arrow::flatbuf::Bool>() const {
696 return type_as_Bool();
697}
698
699template<> inline const org::apache::arrow::flatbuf::Decimal *SparseTensor::type_as<org::apache::arrow::flatbuf::Decimal>() const {
700 return type_as_Decimal();
701}
702
703template<> inline const org::apache::arrow::flatbuf::Date *SparseTensor::type_as<org::apache::arrow::flatbuf::Date>() const {
704 return type_as_Date();
705}
706
707template<> inline const org::apache::arrow::flatbuf::Time *SparseTensor::type_as<org::apache::arrow::flatbuf::Time>() const {
708 return type_as_Time();
709}
710
711template<> inline const org::apache::arrow::flatbuf::Timestamp *SparseTensor::type_as<org::apache::arrow::flatbuf::Timestamp>() const {
712 return type_as_Timestamp();
713}
714
715template<> inline const org::apache::arrow::flatbuf::Interval *SparseTensor::type_as<org::apache::arrow::flatbuf::Interval>() const {
716 return type_as_Interval();
717}
718
719template<> inline const org::apache::arrow::flatbuf::List *SparseTensor::type_as<org::apache::arrow::flatbuf::List>() const {
720 return type_as_List();
721}
722
723template<> inline const org::apache::arrow::flatbuf::Struct_ *SparseTensor::type_as<org::apache::arrow::flatbuf::Struct_>() const {
724 return type_as_Struct_();
725}
726
727template<> inline const org::apache::arrow::flatbuf::Union *SparseTensor::type_as<org::apache::arrow::flatbuf::Union>() const {
728 return type_as_Union();
729}
730
731template<> inline const org::apache::arrow::flatbuf::FixedSizeBinary *SparseTensor::type_as<org::apache::arrow::flatbuf::FixedSizeBinary>() const {
732 return type_as_FixedSizeBinary();
733}
734
735template<> inline const org::apache::arrow::flatbuf::FixedSizeList *SparseTensor::type_as<org::apache::arrow::flatbuf::FixedSizeList>() const {
736 return type_as_FixedSizeList();
737}
738
739template<> inline const org::apache::arrow::flatbuf::Map *SparseTensor::type_as<org::apache::arrow::flatbuf::Map>() const {
740 return type_as_Map();
741}
742
743template<> inline const org::apache::arrow::flatbuf::Duration *SparseTensor::type_as<org::apache::arrow::flatbuf::Duration>() const {
744 return type_as_Duration();
745}
746
747template<> inline const org::apache::arrow::flatbuf::LargeBinary *SparseTensor::type_as<org::apache::arrow::flatbuf::LargeBinary>() const {
748 return type_as_LargeBinary();
749}
750
751template<> inline const org::apache::arrow::flatbuf::LargeUtf8 *SparseTensor::type_as<org::apache::arrow::flatbuf::LargeUtf8>() const {
752 return type_as_LargeUtf8();
753}
754
755template<> inline const org::apache::arrow::flatbuf::LargeList *SparseTensor::type_as<org::apache::arrow::flatbuf::LargeList>() const {
756 return type_as_LargeList();
757}
758
759template<> inline const org::apache::arrow::flatbuf::SparseTensorIndexCOO *SparseTensor::sparseIndex_as<org::apache::arrow::flatbuf::SparseTensorIndexCOO>() const {
760 return sparseIndex_as_SparseTensorIndexCOO();
761}
762
763template<> inline const org::apache::arrow::flatbuf::SparseMatrixIndexCSX *SparseTensor::sparseIndex_as<org::apache::arrow::flatbuf::SparseMatrixIndexCSX>() const {
764 return sparseIndex_as_SparseMatrixIndexCSX();
765}
766
767template<> inline const org::apache::arrow::flatbuf::SparseTensorIndexCSF *SparseTensor::sparseIndex_as<org::apache::arrow::flatbuf::SparseTensorIndexCSF>() const {
768 return sparseIndex_as_SparseTensorIndexCSF();
769}
770
771struct SparseTensorBuilder {
772 typedef SparseTensor Table;
773 flatbuffers::FlatBufferBuilder &fbb_;
774 flatbuffers::uoffset_t start_;
775 void add_type_type(org::apache::arrow::flatbuf::Type type_type) {
776 fbb_.AddElement<uint8_t>(SparseTensor::VT_TYPE_TYPE, static_cast<uint8_t>(type_type), 0);
777 }
778 void add_type(flatbuffers::Offset<void> type) {
779 fbb_.AddOffset(SparseTensor::VT_TYPE, type);
780 }
781 void add_shape(flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<org::apache::arrow::flatbuf::TensorDim>>> shape) {
782 fbb_.AddOffset(SparseTensor::VT_SHAPE, shape);
783 }
784 void add_non_zero_length(int64_t non_zero_length) {
785 fbb_.AddElement<int64_t>(SparseTensor::VT_NON_ZERO_LENGTH, non_zero_length, 0);
786 }
787 void add_sparseIndex_type(org::apache::arrow::flatbuf::SparseTensorIndex sparseIndex_type) {
788 fbb_.AddElement<uint8_t>(SparseTensor::VT_SPARSEINDEX_TYPE, static_cast<uint8_t>(sparseIndex_type), 0);
789 }
790 void add_sparseIndex(flatbuffers::Offset<void> sparseIndex) {
791 fbb_.AddOffset(SparseTensor::VT_SPARSEINDEX, sparseIndex);
792 }
793 void add_data(const org::apache::arrow::flatbuf::Buffer *data) {
794 fbb_.AddStruct(SparseTensor::VT_DATA, data);
795 }
796 explicit SparseTensorBuilder(flatbuffers::FlatBufferBuilder &_fbb)
797 : fbb_(_fbb) {
798 start_ = fbb_.StartTable();
799 }
800 SparseTensorBuilder &operator=(const SparseTensorBuilder &);
801 flatbuffers::Offset<SparseTensor> Finish() {
802 const auto end = fbb_.EndTable(start_);
803 auto o = flatbuffers::Offset<SparseTensor>(end);
804 fbb_.Required(o, SparseTensor::VT_TYPE);
805 fbb_.Required(o, SparseTensor::VT_SHAPE);
806 fbb_.Required(o, SparseTensor::VT_SPARSEINDEX);
807 fbb_.Required(o, SparseTensor::VT_DATA);
808 return o;
809 }
810};
811
812inline flatbuffers::Offset<SparseTensor> CreateSparseTensor(
813 flatbuffers::FlatBufferBuilder &_fbb,
814 org::apache::arrow::flatbuf::Type type_type = org::apache::arrow::flatbuf::Type::NONE,
815 flatbuffers::Offset<void> type = 0,
816 flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<org::apache::arrow::flatbuf::TensorDim>>> shape = 0,
817 int64_t non_zero_length = 0,
818 org::apache::arrow::flatbuf::SparseTensorIndex sparseIndex_type = org::apache::arrow::flatbuf::SparseTensorIndex::NONE,
819 flatbuffers::Offset<void> sparseIndex = 0,
820 const org::apache::arrow::flatbuf::Buffer *data = 0) {
821 SparseTensorBuilder builder_(_fbb);
822 builder_.add_non_zero_length(non_zero_length);
823 builder_.add_data(data);
824 builder_.add_sparseIndex(sparseIndex);
825 builder_.add_shape(shape);
826 builder_.add_type(type);
827 builder_.add_sparseIndex_type(sparseIndex_type);
828 builder_.add_type_type(type_type);
829 return builder_.Finish();
830}
831
832inline flatbuffers::Offset<SparseTensor> CreateSparseTensorDirect(
833 flatbuffers::FlatBufferBuilder &_fbb,
834 org::apache::arrow::flatbuf::Type type_type = org::apache::arrow::flatbuf::Type::NONE,
835 flatbuffers::Offset<void> type = 0,
836 const std::vector<flatbuffers::Offset<org::apache::arrow::flatbuf::TensorDim>> *shape = nullptr,
837 int64_t non_zero_length = 0,
838 org::apache::arrow::flatbuf::SparseTensorIndex sparseIndex_type = org::apache::arrow::flatbuf::SparseTensorIndex::NONE,
839 flatbuffers::Offset<void> sparseIndex = 0,
840 const org::apache::arrow::flatbuf::Buffer *data = 0) {
841 auto shape__ = shape ? _fbb.CreateVector<flatbuffers::Offset<org::apache::arrow::flatbuf::TensorDim>>(*shape) : 0;
842 return org::apache::arrow::flatbuf::CreateSparseTensor(
843 _fbb,
844 type_type,
845 type,
846 shape__,
847 non_zero_length,
848 sparseIndex_type,
849 sparseIndex,
850 data);
851}
852
853inline bool VerifySparseTensorIndex(flatbuffers::Verifier &verifier, const void *obj, SparseTensorIndex type) {
854 switch (type) {
855 case SparseTensorIndex::NONE: {
856 return true;
857 }
858 case SparseTensorIndex::SparseTensorIndexCOO: {
859 auto ptr = reinterpret_cast<const org::apache::arrow::flatbuf::SparseTensorIndexCOO *>(obj);
860 return verifier.VerifyTable(ptr);
861 }
862 case SparseTensorIndex::SparseMatrixIndexCSX: {
863 auto ptr = reinterpret_cast<const org::apache::arrow::flatbuf::SparseMatrixIndexCSX *>(obj);
864 return verifier.VerifyTable(ptr);
865 }
866 case SparseTensorIndex::SparseTensorIndexCSF: {
867 auto ptr = reinterpret_cast<const org::apache::arrow::flatbuf::SparseTensorIndexCSF *>(obj);
868 return verifier.VerifyTable(ptr);
869 }
870 default: return true;
871 }
872}
873
874inline bool VerifySparseTensorIndexVector(flatbuffers::Verifier &verifier, const flatbuffers::Vector<flatbuffers::Offset<void>> *values, const flatbuffers::Vector<uint8_t> *types) {
875 if (!values || !types) return !values && !types;
876 if (values->size() != types->size()) return false;
877 for (flatbuffers::uoffset_t i = 0; i < values->size(); ++i) {
878 if (!VerifySparseTensorIndex(
879 verifier, values->Get(i), types->GetEnum<SparseTensorIndex>(i))) {
880 return false;
881 }
882 }
883 return true;
884}
885
886inline const org::apache::arrow::flatbuf::SparseTensor *GetSparseTensor(const void *buf) {
887 return flatbuffers::GetRoot<org::apache::arrow::flatbuf::SparseTensor>(buf);
888}
889
890inline const org::apache::arrow::flatbuf::SparseTensor *GetSizePrefixedSparseTensor(const void *buf) {
891 return flatbuffers::GetSizePrefixedRoot<org::apache::arrow::flatbuf::SparseTensor>(buf);
892}
893
894inline bool VerifySparseTensorBuffer(
895 flatbuffers::Verifier &verifier) {
896 return verifier.VerifyBuffer<org::apache::arrow::flatbuf::SparseTensor>(nullptr);
897}
898
899inline bool VerifySizePrefixedSparseTensorBuffer(
900 flatbuffers::Verifier &verifier) {
901 return verifier.VerifySizePrefixedBuffer<org::apache::arrow::flatbuf::SparseTensor>(nullptr);
902}
903
904inline void FinishSparseTensorBuffer(
905 flatbuffers::FlatBufferBuilder &fbb,
906 flatbuffers::Offset<org::apache::arrow::flatbuf::SparseTensor> root) {
907 fbb.Finish(root);
908}
909
910inline void FinishSizePrefixedSparseTensorBuffer(
911 flatbuffers::FlatBufferBuilder &fbb,
912 flatbuffers::Offset<org::apache::arrow::flatbuf::SparseTensor> root) {
913 fbb.FinishSizePrefixed(root);
914}
915
916} // namespace flatbuf
917} // namespace arrow
918} // namespace apache
919} // namespace org
920
921#endif // FLATBUFFERS_GENERATED_SPARSETENSOR_ORG_APACHE_ARROW_FLATBUF_H_