]>
Commit | Line | Data |
---|---|---|
1d09f67e TL |
1 | // Licensed to the Apache Software Foundation (ASF) under one |
2 | // or more contributor license agreements. See the NOTICE file | |
3 | // distributed with this work for additional information | |
4 | // regarding copyright ownership. The ASF licenses this file | |
5 | // to you under the Apache License, Version 2.0 (the | |
6 | // "License"); you may not use this file except in compliance | |
7 | // with the License. You may obtain a copy of the License at | |
8 | // | |
9 | // http://www.apache.org/licenses/LICENSE-2.0 | |
10 | // | |
11 | // Unless required by applicable law or agreed to in writing, software | |
12 | // distributed under the License is distributed on an "AS IS" BASIS, | |
13 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 | // See the License for the specific language governing permissions and | |
15 | // limitations under the License. | |
16 | ||
17 | // Code generated by the FlatBuffers compiler. DO NOT EDIT. | |
18 | ||
19 | package flatbuf | |
20 | ||
21 | import ( | |
22 | flatbuffers "github.com/google/flatbuffers/go" | |
23 | ) | |
24 | ||
25 | /// ---------------------------------------------------------------------- | |
26 | /// EXPERIMENTAL: Data structures for sparse tensors | |
27 | /// Coordinate (COO) format of sparse tensor index. | |
28 | /// | |
29 | /// COO's index list are represented as a NxM matrix, | |
30 | /// where N is the number of non-zero values, | |
31 | /// and M is the number of dimensions of a sparse tensor. | |
32 | /// | |
33 | /// indicesBuffer stores the location and size of the data of this indices | |
34 | /// matrix. The value type and the stride of the indices matrix is | |
35 | /// specified in indicesType and indicesStrides fields. | |
36 | /// | |
37 | /// For example, let X be a 2x3x4x5 tensor, and it has the following | |
38 | /// 6 non-zero values: | |
39 | /// ```text | |
40 | /// X[0, 1, 2, 0] := 1 | |
41 | /// X[1, 1, 2, 3] := 2 | |
42 | /// X[0, 2, 1, 0] := 3 | |
43 | /// X[0, 1, 3, 0] := 4 | |
44 | /// X[0, 1, 2, 1] := 5 | |
45 | /// X[1, 2, 0, 4] := 6 | |
46 | /// ``` | |
47 | /// In COO format, the index matrix of X is the following 4x6 matrix: | |
48 | /// ```text | |
49 | /// [[0, 0, 0, 0, 1, 1], | |
50 | /// [1, 1, 1, 2, 1, 2], | |
51 | /// [2, 2, 3, 1, 2, 0], | |
52 | /// [0, 1, 0, 0, 3, 4]] | |
53 | /// ``` | |
54 | /// When isCanonical is true, the indices is sorted in lexicographical order | |
55 | /// (row-major order), and it does not have duplicated entries. Otherwise, | |
56 | /// the indices may not be sorted, or may have duplicated entries. | |
57 | type SparseTensorIndexCOO struct { | |
58 | _tab flatbuffers.Table | |
59 | } | |
60 | ||
61 | func GetRootAsSparseTensorIndexCOO(buf []byte, offset flatbuffers.UOffsetT) *SparseTensorIndexCOO { | |
62 | n := flatbuffers.GetUOffsetT(buf[offset:]) | |
63 | x := &SparseTensorIndexCOO{} | |
64 | x.Init(buf, n+offset) | |
65 | return x | |
66 | } | |
67 | ||
68 | func (rcv *SparseTensorIndexCOO) Init(buf []byte, i flatbuffers.UOffsetT) { | |
69 | rcv._tab.Bytes = buf | |
70 | rcv._tab.Pos = i | |
71 | } | |
72 | ||
73 | func (rcv *SparseTensorIndexCOO) Table() flatbuffers.Table { | |
74 | return rcv._tab | |
75 | } | |
76 | ||
77 | /// The type of values in indicesBuffer | |
78 | func (rcv *SparseTensorIndexCOO) IndicesType(obj *Int) *Int { | |
79 | o := flatbuffers.UOffsetT(rcv._tab.Offset(4)) | |
80 | if o != 0 { | |
81 | x := rcv._tab.Indirect(o + rcv._tab.Pos) | |
82 | if obj == nil { | |
83 | obj = new(Int) | |
84 | } | |
85 | obj.Init(rcv._tab.Bytes, x) | |
86 | return obj | |
87 | } | |
88 | return nil | |
89 | } | |
90 | ||
91 | /// The type of values in indicesBuffer | |
92 | /// Non-negative byte offsets to advance one value cell along each dimension | |
93 | /// If omitted, default to row-major order (C-like). | |
94 | func (rcv *SparseTensorIndexCOO) IndicesStrides(j int) int64 { | |
95 | o := flatbuffers.UOffsetT(rcv._tab.Offset(6)) | |
96 | if o != 0 { | |
97 | a := rcv._tab.Vector(o) | |
98 | return rcv._tab.GetInt64(a + flatbuffers.UOffsetT(j*8)) | |
99 | } | |
100 | return 0 | |
101 | } | |
102 | ||
103 | func (rcv *SparseTensorIndexCOO) IndicesStridesLength() int { | |
104 | o := flatbuffers.UOffsetT(rcv._tab.Offset(6)) | |
105 | if o != 0 { | |
106 | return rcv._tab.VectorLen(o) | |
107 | } | |
108 | return 0 | |
109 | } | |
110 | ||
111 | /// Non-negative byte offsets to advance one value cell along each dimension | |
112 | /// If omitted, default to row-major order (C-like). | |
113 | func (rcv *SparseTensorIndexCOO) MutateIndicesStrides(j int, n int64) bool { | |
114 | o := flatbuffers.UOffsetT(rcv._tab.Offset(6)) | |
115 | if o != 0 { | |
116 | a := rcv._tab.Vector(o) | |
117 | return rcv._tab.MutateInt64(a+flatbuffers.UOffsetT(j*8), n) | |
118 | } | |
119 | return false | |
120 | } | |
121 | ||
122 | /// The location and size of the indices matrix's data | |
123 | func (rcv *SparseTensorIndexCOO) IndicesBuffer(obj *Buffer) *Buffer { | |
124 | o := flatbuffers.UOffsetT(rcv._tab.Offset(8)) | |
125 | if o != 0 { | |
126 | x := o + rcv._tab.Pos | |
127 | if obj == nil { | |
128 | obj = new(Buffer) | |
129 | } | |
130 | obj.Init(rcv._tab.Bytes, x) | |
131 | return obj | |
132 | } | |
133 | return nil | |
134 | } | |
135 | ||
136 | /// The location and size of the indices matrix's data | |
137 | /// This flag is true if and only if the indices matrix is sorted in | |
138 | /// row-major order, and does not have duplicated entries. | |
139 | /// This sort order is the same as of Tensorflow's SparseTensor, | |
140 | /// but it is inverse order of SciPy's canonical coo_matrix | |
141 | /// (SciPy employs column-major order for its coo_matrix). | |
142 | func (rcv *SparseTensorIndexCOO) IsCanonical() bool { | |
143 | o := flatbuffers.UOffsetT(rcv._tab.Offset(10)) | |
144 | if o != 0 { | |
145 | return rcv._tab.GetBool(o + rcv._tab.Pos) | |
146 | } | |
147 | return false | |
148 | } | |
149 | ||
150 | /// This flag is true if and only if the indices matrix is sorted in | |
151 | /// row-major order, and does not have duplicated entries. | |
152 | /// This sort order is the same as of Tensorflow's SparseTensor, | |
153 | /// but it is inverse order of SciPy's canonical coo_matrix | |
154 | /// (SciPy employs column-major order for its coo_matrix). | |
155 | func (rcv *SparseTensorIndexCOO) MutateIsCanonical(n bool) bool { | |
156 | return rcv._tab.MutateBoolSlot(10, n) | |
157 | } | |
158 | ||
159 | func SparseTensorIndexCOOStart(builder *flatbuffers.Builder) { | |
160 | builder.StartObject(4) | |
161 | } | |
162 | func SparseTensorIndexCOOAddIndicesType(builder *flatbuffers.Builder, indicesType flatbuffers.UOffsetT) { | |
163 | builder.PrependUOffsetTSlot(0, flatbuffers.UOffsetT(indicesType), 0) | |
164 | } | |
165 | func SparseTensorIndexCOOAddIndicesStrides(builder *flatbuffers.Builder, indicesStrides flatbuffers.UOffsetT) { | |
166 | builder.PrependUOffsetTSlot(1, flatbuffers.UOffsetT(indicesStrides), 0) | |
167 | } | |
168 | func SparseTensorIndexCOOStartIndicesStridesVector(builder *flatbuffers.Builder, numElems int) flatbuffers.UOffsetT { | |
169 | return builder.StartVector(8, numElems, 8) | |
170 | } | |
171 | func SparseTensorIndexCOOAddIndicesBuffer(builder *flatbuffers.Builder, indicesBuffer flatbuffers.UOffsetT) { | |
172 | builder.PrependStructSlot(2, flatbuffers.UOffsetT(indicesBuffer), 0) | |
173 | } | |
174 | func SparseTensorIndexCOOAddIsCanonical(builder *flatbuffers.Builder, isCanonical bool) { | |
175 | builder.PrependBoolSlot(3, isCanonical, false) | |
176 | } | |
177 | func SparseTensorIndexCOOEnd(builder *flatbuffers.Builder) flatbuffers.UOffsetT { | |
178 | return builder.EndObject() | |
179 | } |