]>
Commit | Line | Data |
---|---|---|
1d09f67e TL |
1 | // Licensed to the Apache Software Foundation (ASF) under one |
2 | // or more contributor license agreements. See the NOTICE file | |
3 | // distributed with this work for additional information | |
4 | // regarding copyright ownership. The ASF licenses this file | |
5 | // to you under the Apache License, Version 2.0 (the | |
6 | // "License"); you may not use this file except in compliance | |
7 | // with the License. You may obtain a copy of the License at | |
8 | // | |
9 | // http://www.apache.org/licenses/LICENSE-2.0 | |
10 | // | |
11 | // Unless required by applicable law or agreed to in writing, | |
12 | // software distributed under the License is distributed on an | |
13 | // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY | |
14 | // KIND, either express or implied. See the License for the | |
15 | // specific language governing permissions and limitations | |
16 | // under the License. | |
17 | ||
18 | import '../../jest-extensions'; | |
19 | import * as generate from '../../generate-test-data'; | |
20 | import { | |
21 | Table, Schema, Field, DataType, Dictionary, Int32, Float32, Utf8, Null, Int32Vector | |
22 | } from 'apache-arrow'; | |
23 | ||
24 | const toSchema = (...xs: [string, DataType][]) => new Schema(xs.map((x) => new Field(...x))); | |
25 | const schema1 = toSchema(['a', new Int32()], ['b', new Float32()], ['c', new Dictionary(new Utf8(), new Int32())]); | |
26 | const schema2 = toSchema(['d', new Int32()], ['e', new Float32()], ['f', new Utf8()]); | |
27 | const nullSchema = new Schema([new Field('null', new Null())]); | |
28 | ||
29 | schema1.metadata.set('foo', 'bar'); | |
30 | ||
31 | function createTable<T extends { [key: string]: DataType } = any>(schema: Schema<T>, chunkLengths: number[]) { | |
32 | return generate.table(chunkLengths, schema).table; | |
33 | } | |
34 | ||
35 | describe('Table#serialize()', () => { | |
36 | ||
37 | test(`doesn't swap the order of buffers that share the same underlying ArrayBuffer but are in a different order`, () => { | |
38 | const values = new Int32Array([0, 1, 2, 3, 4, 5, 6, 7]); | |
39 | const expected = values.slice(); | |
40 | const x = Int32Vector.from(values.subarray(4, 8)); // back | |
41 | const y = Int32Vector.from(values.subarray(0, 4)); // front | |
42 | const source = Table.new([x, y], ['x', 'y']); | |
43 | const table = Table.from(source.serialize()); | |
44 | expect(table.getColumn('x').toArray()).toEqual(expected.subarray(4, 8)); | |
45 | expect(table.getColumn('y').toArray()).toEqual(expected.subarray(0, 4)); | |
46 | }); | |
47 | ||
48 | test(`Table#empty round-trips through serialization`, () => { | |
49 | const source = Table.empty(); | |
50 | source.schema.metadata.set('foo', 'bar'); | |
51 | expect(source).toHaveLength(0); | |
52 | expect(source.numCols).toBe(0); | |
53 | const result = Table.from(source.serialize()); | |
54 | expect(result).toEqualTable(source); | |
55 | expect(result.schema.metadata.get('foo')).toEqual('bar'); | |
56 | }); | |
57 | ||
58 | test(`Schema metadata round-trips through serialization`, () => { | |
59 | const source = createTable(schema1, [20]); | |
60 | expect(source).toHaveLength(20); | |
61 | expect(source.numCols).toBe(3); | |
62 | const result = Table.from(source.serialize()); | |
63 | expect(result).toEqualTable(source); | |
64 | expect(result.schema.metadata.get('foo')).toEqual('bar'); | |
65 | }); | |
66 | ||
67 | test(`Table#assign an empty Table to a Table with a zero-length Null column round-trips through serialization`, () => { | |
68 | const table1 = new Table(nullSchema); | |
69 | const table2 = Table.empty(); | |
70 | const source = table1.assign(table2); | |
71 | expect(source).toHaveLength(0); | |
72 | expect(source.numCols).toBe(1); | |
73 | const result = Table.from(source.serialize()); | |
74 | expect(result).toEqualTable(source); | |
75 | }); | |
76 | ||
77 | const chunkLengths = [] as number[]; | |
78 | for (let i = -1; ++i < 3;) { | |
79 | chunkLengths[i * 2] = (Math.random() * 100) | 0; | |
80 | chunkLengths[i * 2 + 1] = 0; | |
81 | const table = <T extends { [key: string]: DataType } = any>(schema: Schema<T>) => createTable(schema, chunkLengths); | |
82 | test(`Table#select round-trips through serialization`, () => { | |
83 | const source = table(schema1).select('a', 'c'); | |
84 | expect(source.numCols).toBe(2); | |
85 | const result = Table.from(source.serialize()); | |
86 | expect(result).toEqualTable(source); | |
87 | }); | |
88 | test(`Table#selectAt round-trips through serialization`, () => { | |
89 | const source = table(schema1).selectAt(0, 2); | |
90 | expect(source.numCols).toBe(2); | |
91 | const result = Table.from(source.serialize()); | |
92 | expect(result).toEqualTable(source); | |
93 | }); | |
94 | test(`Table#assign round-trips through serialization`, () => { | |
95 | const source = table(schema1).assign(table(schema2)); | |
96 | expect(source.numCols).toBe(6); | |
97 | const result = Table.from(source.serialize()); | |
98 | expect(result).toEqualTable(source); | |
99 | expect(result.schema.metadata.get('foo')).toEqual('bar'); | |
100 | }); | |
101 | test(`Table#assign with an empty table round-trips through serialization`, () => { | |
102 | const table1 = table(schema1); | |
103 | const source = table1.assign(Table.empty()); | |
104 | expect(source.numCols).toBe(table1.numCols); | |
105 | expect(source).toHaveLength(table1.length); | |
106 | const result = Table.from(source.serialize()); | |
107 | expect(result).toEqualTable(source); | |
108 | expect(result.schema.metadata.get('foo')).toEqual('bar'); | |
109 | }); | |
110 | test(`Table#assign with a zero-length Null column round-trips through serialization`, () => { | |
111 | const table1 = new Table(nullSchema); | |
112 | const table2 = table(schema1); | |
113 | const source = table1.assign(table2); | |
114 | expect(source).toHaveLength(table2.length); | |
115 | expect(source.numCols).toBe(4); | |
116 | const result = Table.from(source.serialize()); | |
117 | expect(result).toEqualTable(source); | |
118 | expect(result.schema.metadata.get('foo')).toEqual('bar'); | |
119 | }); | |
120 | test(`Table#assign with different lengths and number of chunks round-trips through serialization`, () => { | |
121 | const table1 = table(schema1); | |
122 | const table2 = createTable(schema2, [102, 4, 10, 97, 10, 2, 4]); | |
123 | const source = table1.assign(table2); | |
124 | expect(source.numCols).toBe(6); | |
125 | expect(source).toHaveLength(Math.max(table1.length, table2.length)); | |
126 | const result = Table.from(source.serialize()); | |
127 | expect(result).toEqualTable(source); | |
128 | expect(result.schema.metadata.get('foo')).toEqual('bar'); | |
129 | }); | |
130 | test(`Table#select with Table#assign the result of Table#selectAt round-trips through serialization`, () => { | |
131 | const table1 = table(schema1); | |
132 | const table2 = table(schema2); | |
133 | const source = table1.select('a', 'c').assign(table2.selectAt(2)); | |
134 | expect(source.numCols).toBe(3); | |
135 | const result = Table.from(source.serialize()); | |
136 | expect(result).toEqualTable(source); | |
137 | expect(result.schema.metadata.get('foo')).toEqual('bar'); | |
138 | }); | |
139 | test(`Table#slice round-trips through serialization`, () => { | |
140 | const table1 = table(schema1); | |
141 | const length = table1.length; | |
142 | const [begin, end] = [length * .25, length * .75].map((x) => x | 0); | |
143 | const source = table1.slice(begin, end); | |
144 | expect(source.numCols).toBe(3); | |
145 | expect(source).toHaveLength(end - begin); | |
146 | const result = Table.from(source.serialize()); | |
147 | expect(result).toEqualTable(source); | |
148 | expect(result.schema.metadata.get('foo')).toEqual('bar'); | |
149 | }); | |
150 | test(`Table#concat of two slices round-trips through serialization`, () => { | |
151 | const table1 = table(schema1); | |
152 | const length = table1.length; | |
153 | const [begin1, end1] = [length * .10, length * .20].map((x) => x | 0); | |
154 | const [begin2, end2] = [length * .80, length * .90].map((x) => x | 0); | |
155 | const slice1 = table1.slice(begin1, end1); | |
156 | const slice2 = table1.slice(begin2, end2); | |
157 | const source = slice1.concat(slice2); | |
158 | expect(slice1).toHaveLength(end1 - begin1); | |
159 | expect(slice2).toHaveLength(end2 - begin2); | |
160 | expect(source).toHaveLength((end1 - begin1) + (end2 - begin2)); | |
161 | [slice1, slice2, source].forEach((x) => expect(x.numCols).toBe(3)); | |
162 | const result = Table.from(source.serialize()); | |
163 | expect(result).toEqualTable(source); | |
164 | expect(result.schema.metadata.get('foo')).toEqual('bar'); | |
165 | }); | |
166 | } | |
167 | }); |