]>
Commit | Line | Data |
---|---|---|
7c673cae FG |
1 | /* |
2 | * Fast buffer writer with spare management. | |
3 | */ | |
4 | ||
5 | #include "duk_internal.h" | |
6 | ||
7 | /* | |
8 | * Macro support functions (use only macros in calling code) | |
9 | */ | |
10 | ||
11 | DUK_LOCAL void duk__bw_update_ptrs(duk_hthread *thr, duk_bufwriter_ctx *bw_ctx, duk_size_t curr_offset, duk_size_t new_length) { | |
12 | duk_uint8_t *p; | |
13 | ||
14 | DUK_ASSERT(thr != NULL); | |
15 | DUK_ASSERT(bw_ctx != NULL); | |
16 | DUK_UNREF(thr); | |
17 | ||
18 | p = (duk_uint8_t *) DUK_HBUFFER_DYNAMIC_GET_DATA_PTR(thr->heap, bw_ctx->buf); | |
19 | DUK_ASSERT(p != NULL || (DUK_HBUFFER_DYNAMIC_GET_SIZE(bw_ctx->buf) == 0 && curr_offset == 0 && new_length == 0)); | |
20 | bw_ctx->p = p + curr_offset; | |
21 | bw_ctx->p_base = p; | |
22 | bw_ctx->p_limit = p + new_length; | |
23 | } | |
24 | ||
25 | DUK_INTERNAL void duk_bw_init(duk_hthread *thr, duk_bufwriter_ctx *bw_ctx, duk_hbuffer_dynamic *h_buf) { | |
26 | ||
27 | DUK_ASSERT(thr != NULL); | |
28 | DUK_ASSERT(bw_ctx != NULL); | |
29 | DUK_ASSERT(h_buf != NULL); | |
30 | DUK_UNREF(thr); | |
31 | ||
32 | bw_ctx->buf = h_buf; | |
33 | duk__bw_update_ptrs(thr, bw_ctx, 0, DUK_HBUFFER_DYNAMIC_GET_SIZE(h_buf)); | |
34 | } | |
35 | ||
36 | DUK_INTERNAL void duk_bw_init_pushbuf(duk_hthread *thr, duk_bufwriter_ctx *bw_ctx, duk_size_t buf_size) { | |
37 | duk_context *ctx; | |
38 | ||
39 | DUK_ASSERT(thr != NULL); | |
40 | DUK_ASSERT(bw_ctx != NULL); | |
41 | ctx = (duk_context *) thr; | |
42 | ||
43 | (void) duk_push_dynamic_buffer(ctx, buf_size); | |
44 | bw_ctx->buf = (duk_hbuffer_dynamic *) duk_get_hbuffer(ctx, -1); | |
45 | duk__bw_update_ptrs(thr, bw_ctx, 0, buf_size); | |
46 | } | |
47 | ||
48 | /* Resize target buffer for requested size. Called by the macro only when the | |
49 | * fast path test (= there is space) fails. | |
50 | */ | |
51 | DUK_INTERNAL duk_uint8_t *duk_bw_resize(duk_hthread *thr, duk_bufwriter_ctx *bw_ctx, duk_size_t sz) { | |
52 | duk_size_t curr_off; | |
53 | duk_size_t add_sz; | |
54 | duk_size_t new_sz; | |
55 | ||
56 | DUK_ASSERT(thr != NULL); | |
57 | DUK_ASSERT(bw_ctx != NULL); | |
58 | ||
59 | /* We could do this operation without caller updating bw_ctx->ptr, | |
60 | * but by writing it back here we can share code better. | |
61 | */ | |
62 | ||
63 | curr_off = (duk_size_t) (bw_ctx->p - bw_ctx->p_base); | |
64 | add_sz = (curr_off >> DUK_BW_SPARE_SHIFT) + DUK_BW_SPARE_ADD; | |
65 | new_sz = curr_off + sz + add_sz; | |
66 | if (new_sz < curr_off) { | |
67 | /* overflow */ | |
11fdf7f2 | 68 | DUK_ERROR_RANGE(thr, DUK_STR_BUFFER_TOO_LONG); |
7c673cae FG |
69 | return NULL; /* not reachable */ |
70 | } | |
71 | #if 0 /* for manual torture testing: tight allocation, useful with valgrind */ | |
72 | new_sz = curr_off + sz; | |
73 | #endif | |
74 | ||
75 | /* This is important to ensure dynamic buffer data pointer is not | |
76 | * NULL (which is possible if buffer size is zero), which in turn | |
77 | * causes portability issues with e.g. memmove() and memcpy(). | |
78 | */ | |
79 | DUK_ASSERT(new_sz >= 1); | |
80 | ||
81 | DUK_DD(DUK_DDPRINT("resize bufferwriter from %ld to %ld (add_sz=%ld)", (long) curr_off, (long) new_sz, (long) add_sz)); | |
82 | ||
83 | duk_hbuffer_resize(thr, bw_ctx->buf, new_sz); | |
84 | duk__bw_update_ptrs(thr, bw_ctx, curr_off, new_sz); | |
85 | return bw_ctx->p; | |
86 | } | |
87 | ||
88 | /* Make buffer compact, matching current written size. */ | |
89 | DUK_INTERNAL void duk_bw_compact(duk_hthread *thr, duk_bufwriter_ctx *bw_ctx) { | |
90 | duk_size_t len; | |
91 | ||
92 | DUK_ASSERT(thr != NULL); | |
93 | DUK_ASSERT(bw_ctx != NULL); | |
94 | DUK_UNREF(thr); | |
95 | ||
96 | len = (duk_size_t) (bw_ctx->p - bw_ctx->p_base); | |
97 | duk_hbuffer_resize(thr, bw_ctx->buf, len); | |
98 | duk__bw_update_ptrs(thr, bw_ctx, len, len); | |
99 | } | |
100 | ||
101 | DUK_INTERNAL void duk_bw_write_raw_slice(duk_hthread *thr, duk_bufwriter_ctx *bw, duk_size_t src_off, duk_size_t len) { | |
102 | duk_uint8_t *p_base; | |
103 | ||
104 | DUK_ASSERT(thr != NULL); | |
105 | DUK_ASSERT(bw != NULL); | |
106 | DUK_ASSERT(src_off <= DUK_BW_GET_SIZE(thr, bw)); | |
107 | DUK_ASSERT(len <= DUK_BW_GET_SIZE(thr, bw)); | |
108 | DUK_ASSERT(src_off + len <= DUK_BW_GET_SIZE(thr, bw)); | |
109 | DUK_UNREF(thr); | |
110 | ||
111 | p_base = bw->p_base; | |
112 | DUK_MEMCPY((void *) bw->p, | |
113 | (const void *) (p_base + src_off), | |
11fdf7f2 | 114 | (size_t) len); |
7c673cae FG |
115 | bw->p += len; |
116 | } | |
117 | ||
118 | DUK_INTERNAL void duk_bw_write_ensure_slice(duk_hthread *thr, duk_bufwriter_ctx *bw, duk_size_t src_off, duk_size_t len) { | |
119 | DUK_ASSERT(thr != NULL); | |
120 | DUK_ASSERT(bw != NULL); | |
121 | DUK_ASSERT(src_off <= DUK_BW_GET_SIZE(thr, bw)); | |
122 | DUK_ASSERT(len <= DUK_BW_GET_SIZE(thr, bw)); | |
123 | DUK_ASSERT(src_off + len <= DUK_BW_GET_SIZE(thr, bw)); | |
124 | DUK_UNREF(thr); | |
125 | ||
126 | DUK_BW_ENSURE(thr, bw, len); | |
127 | duk_bw_write_raw_slice(thr, bw, src_off, len); | |
128 | } | |
129 | ||
130 | DUK_INTERNAL void duk_bw_insert_raw_bytes(duk_hthread *thr, duk_bufwriter_ctx *bw, duk_size_t dst_off, const duk_uint8_t *buf, duk_size_t len) { | |
131 | duk_uint8_t *p_base; | |
132 | duk_size_t buf_sz, move_sz; | |
133 | ||
134 | DUK_ASSERT(thr != NULL); | |
135 | DUK_ASSERT(bw != NULL); | |
136 | DUK_ASSERT(dst_off <= DUK_BW_GET_SIZE(thr, bw)); | |
137 | DUK_ASSERT(buf != NULL); | |
138 | DUK_UNREF(thr); | |
139 | ||
140 | p_base = bw->p_base; | |
141 | buf_sz = bw->p - p_base; | |
142 | move_sz = buf_sz - dst_off; | |
143 | ||
144 | DUK_ASSERT(p_base != NULL); /* buffer size is >= 1 */ | |
145 | DUK_MEMMOVE((void *) (p_base + dst_off + len), | |
146 | (const void *) (p_base + dst_off), | |
11fdf7f2 | 147 | (size_t) move_sz); |
7c673cae FG |
148 | DUK_MEMCPY((void *) (p_base + dst_off), |
149 | (const void *) buf, | |
11fdf7f2 | 150 | (size_t) len); |
7c673cae FG |
151 | bw->p += len; |
152 | } | |
153 | ||
154 | DUK_INTERNAL void duk_bw_insert_ensure_bytes(duk_hthread *thr, duk_bufwriter_ctx *bw, duk_size_t dst_off, const duk_uint8_t *buf, duk_size_t len) { | |
155 | DUK_ASSERT(thr != NULL); | |
156 | DUK_ASSERT(bw != NULL); | |
157 | DUK_ASSERT(dst_off <= DUK_BW_GET_SIZE(thr, bw)); | |
158 | DUK_ASSERT(buf != NULL); | |
159 | DUK_UNREF(thr); | |
160 | ||
161 | DUK_BW_ENSURE(thr, bw, len); | |
162 | duk_bw_insert_raw_bytes(thr, bw, dst_off, buf, len); | |
163 | } | |
164 | ||
165 | DUK_INTERNAL void duk_bw_insert_raw_slice(duk_hthread *thr, duk_bufwriter_ctx *bw, duk_size_t dst_off, duk_size_t src_off, duk_size_t len) { | |
166 | duk_uint8_t *p_base; | |
167 | duk_size_t buf_sz, move_sz; | |
168 | ||
169 | DUK_ASSERT(thr != NULL); | |
170 | DUK_ASSERT(bw != NULL); | |
171 | DUK_ASSERT(dst_off <= DUK_BW_GET_SIZE(thr, bw)); | |
172 | DUK_ASSERT(src_off <= DUK_BW_GET_SIZE(thr, bw)); | |
173 | DUK_ASSERT(len <= DUK_BW_GET_SIZE(thr, bw)); | |
174 | DUK_ASSERT(src_off + len <= DUK_BW_GET_SIZE(thr, bw)); | |
175 | DUK_UNREF(thr); | |
176 | ||
177 | p_base = bw->p_base; | |
178 | ||
179 | /* Don't support "straddled" source now. */ | |
180 | DUK_ASSERT(dst_off <= src_off || dst_off >= src_off + len); | |
181 | ||
182 | if (dst_off <= src_off) { | |
183 | /* Target is before source. Source offset is expressed as | |
184 | * a "before change" offset. Account for the memmove. | |
185 | */ | |
186 | src_off += len; | |
187 | } | |
188 | ||
189 | buf_sz = bw->p - p_base; | |
190 | move_sz = buf_sz - dst_off; | |
191 | ||
192 | DUK_ASSERT(p_base != NULL); /* buffer size is >= 1 */ | |
193 | DUK_MEMMOVE((void *) (p_base + dst_off + len), | |
194 | (const void *) (p_base + dst_off), | |
11fdf7f2 | 195 | (size_t) move_sz); |
7c673cae FG |
196 | DUK_MEMCPY((void *) (p_base + dst_off), |
197 | (const void *) (p_base + src_off), | |
11fdf7f2 | 198 | (size_t) len); |
7c673cae FG |
199 | bw->p += len; |
200 | } | |
201 | ||
202 | DUK_INTERNAL void duk_bw_insert_ensure_slice(duk_hthread *thr, duk_bufwriter_ctx *bw, duk_size_t dst_off, duk_size_t src_off, duk_size_t len) { | |
203 | DUK_ASSERT(thr != NULL); | |
204 | DUK_ASSERT(bw != NULL); | |
205 | DUK_ASSERT(dst_off <= DUK_BW_GET_SIZE(thr, bw)); | |
206 | DUK_ASSERT(src_off <= DUK_BW_GET_SIZE(thr, bw)); | |
207 | DUK_ASSERT(len <= DUK_BW_GET_SIZE(thr, bw)); | |
208 | DUK_ASSERT(src_off + len <= DUK_BW_GET_SIZE(thr, bw)); | |
209 | DUK_UNREF(thr); | |
210 | ||
211 | /* Don't support "straddled" source now. */ | |
212 | DUK_ASSERT(dst_off <= src_off || dst_off >= src_off + len); | |
213 | ||
214 | DUK_BW_ENSURE(thr, bw, len); | |
215 | duk_bw_insert_raw_slice(thr, bw, dst_off, src_off, len); | |
216 | } | |
217 | ||
218 | DUK_INTERNAL duk_uint8_t *duk_bw_insert_raw_area(duk_hthread *thr, duk_bufwriter_ctx *bw, duk_size_t off, duk_size_t len) { | |
219 | duk_uint8_t *p_base, *p_dst, *p_src; | |
220 | duk_size_t buf_sz, move_sz; | |
221 | ||
222 | DUK_ASSERT(thr != NULL); | |
223 | DUK_ASSERT(bw != NULL); | |
224 | DUK_ASSERT(off <= DUK_BW_GET_SIZE(thr, bw)); | |
225 | DUK_UNREF(thr); | |
226 | ||
227 | p_base = bw->p_base; | |
228 | buf_sz = bw->p - p_base; | |
229 | move_sz = buf_sz - off; | |
230 | p_dst = p_base + off + len; | |
231 | p_src = p_base + off; | |
11fdf7f2 | 232 | DUK_MEMMOVE((void *) p_dst, (const void *) p_src, (size_t) move_sz); |
7c673cae FG |
233 | return p_src; /* point to start of 'reserved area' */ |
234 | } | |
235 | ||
236 | DUK_INTERNAL duk_uint8_t *duk_bw_insert_ensure_area(duk_hthread *thr, duk_bufwriter_ctx *bw, duk_size_t off, duk_size_t len) { | |
237 | DUK_ASSERT(thr != NULL); | |
238 | DUK_ASSERT(bw != NULL); | |
239 | DUK_ASSERT(off <= DUK_BW_GET_SIZE(thr, bw)); | |
240 | DUK_UNREF(thr); | |
241 | ||
242 | DUK_BW_ENSURE(thr, bw, len); | |
243 | return duk_bw_insert_raw_area(thr, bw, off, len); | |
244 | } | |
245 | ||
246 | DUK_INTERNAL void duk_bw_remove_raw_slice(duk_hthread *thr, duk_bufwriter_ctx *bw, duk_size_t off, duk_size_t len) { | |
247 | duk_size_t move_sz; | |
248 | ||
249 | duk_uint8_t *p_base; | |
250 | duk_uint8_t *p_src; | |
251 | duk_uint8_t *p_dst; | |
252 | ||
253 | DUK_ASSERT(thr != NULL); | |
254 | DUK_ASSERT(bw != NULL); | |
255 | DUK_ASSERT(off <= DUK_BW_GET_SIZE(thr, bw)); | |
256 | DUK_ASSERT(len <= DUK_BW_GET_SIZE(thr, bw)); | |
257 | DUK_ASSERT(off + len <= DUK_BW_GET_SIZE(thr, bw)); | |
258 | DUK_UNREF(thr); | |
259 | ||
260 | p_base = bw->p_base; | |
261 | p_dst = p_base + off; | |
262 | p_src = p_dst + len; | |
263 | move_sz = (duk_size_t) (bw->p - p_src); | |
264 | DUK_MEMMOVE((void *) p_dst, | |
265 | (const void *) p_src, | |
11fdf7f2 | 266 | (size_t) move_sz); |
7c673cae FG |
267 | bw->p -= len; |
268 | } | |
269 | ||
270 | /* | |
271 | * Macro support functions for reading/writing raw data. | |
272 | * | |
273 | * These are done using mempcy to ensure they're valid even for unaligned | |
274 | * reads/writes on platforms where alignment counts. On x86 at least gcc | |
275 | * is able to compile these into a bswap+mov. "Always inline" is used to | |
276 | * ensure these macros compile to minimal code. | |
277 | * | |
278 | * Not really bufwriter related, but currently used together. | |
279 | */ | |
280 | ||
281 | DUK_INTERNAL DUK_ALWAYS_INLINE duk_uint16_t duk_raw_read_u16_be(duk_uint8_t **p) { | |
282 | union { | |
283 | duk_uint8_t b[2]; | |
284 | duk_uint16_t x; | |
285 | } u; | |
286 | ||
11fdf7f2 | 287 | DUK_MEMCPY((void *) u.b, (const void *) (*p), (size_t) 2); |
7c673cae FG |
288 | u.x = DUK_NTOH16(u.x); |
289 | *p += 2; | |
290 | return u.x; | |
291 | } | |
292 | ||
293 | DUK_INTERNAL DUK_ALWAYS_INLINE duk_uint32_t duk_raw_read_u32_be(duk_uint8_t **p) { | |
294 | union { | |
295 | duk_uint8_t b[4]; | |
296 | duk_uint32_t x; | |
297 | } u; | |
298 | ||
11fdf7f2 | 299 | DUK_MEMCPY((void *) u.b, (const void *) (*p), (size_t) 4); |
7c673cae FG |
300 | u.x = DUK_NTOH32(u.x); |
301 | *p += 4; | |
302 | return u.x; | |
303 | } | |
304 | ||
305 | DUK_INTERNAL DUK_ALWAYS_INLINE duk_double_t duk_raw_read_double_be(duk_uint8_t **p) { | |
306 | duk_double_union du; | |
307 | union { | |
308 | duk_uint8_t b[4]; | |
309 | duk_uint32_t x; | |
310 | } u; | |
311 | ||
11fdf7f2 | 312 | DUK_MEMCPY((void *) u.b, (const void *) (*p), (size_t) 4); |
7c673cae FG |
313 | u.x = DUK_NTOH32(u.x); |
314 | du.ui[DUK_DBL_IDX_UI0] = u.x; | |
11fdf7f2 | 315 | DUK_MEMCPY((void *) u.b, (const void *) (*p + 4), (size_t) 4); |
7c673cae FG |
316 | u.x = DUK_NTOH32(u.x); |
317 | du.ui[DUK_DBL_IDX_UI1] = u.x; | |
318 | *p += 8; | |
319 | ||
320 | return du.d; | |
321 | } | |
322 | ||
323 | DUK_INTERNAL DUK_ALWAYS_INLINE void duk_raw_write_u16_be(duk_uint8_t **p, duk_uint16_t val) { | |
324 | union { | |
325 | duk_uint8_t b[2]; | |
326 | duk_uint16_t x; | |
327 | } u; | |
328 | ||
329 | u.x = DUK_HTON16(val); | |
11fdf7f2 | 330 | DUK_MEMCPY((void *) (*p), (const void *) u.b, (size_t) 2); |
7c673cae FG |
331 | *p += 2; |
332 | } | |
333 | ||
334 | DUK_INTERNAL DUK_ALWAYS_INLINE void duk_raw_write_u32_be(duk_uint8_t **p, duk_uint32_t val) { | |
335 | union { | |
336 | duk_uint8_t b[4]; | |
337 | duk_uint32_t x; | |
338 | } u; | |
339 | ||
340 | u.x = DUK_HTON32(val); | |
11fdf7f2 | 341 | DUK_MEMCPY((void *) (*p), (const void *) u.b, (size_t) 4); |
7c673cae FG |
342 | *p += 4; |
343 | } | |
344 | ||
345 | DUK_INTERNAL DUK_ALWAYS_INLINE void duk_raw_write_double_be(duk_uint8_t **p, duk_double_t val) { | |
346 | duk_double_union du; | |
347 | union { | |
348 | duk_uint8_t b[4]; | |
349 | duk_uint32_t x; | |
350 | } u; | |
351 | ||
352 | du.d = val; | |
353 | u.x = du.ui[DUK_DBL_IDX_UI0]; | |
354 | u.x = DUK_HTON32(u.x); | |
11fdf7f2 | 355 | DUK_MEMCPY((void *) (*p), (const void *) u.b, (size_t) 4); |
7c673cae FG |
356 | u.x = du.ui[DUK_DBL_IDX_UI1]; |
357 | u.x = DUK_HTON32(u.x); | |
11fdf7f2 | 358 | DUK_MEMCPY((void *) (*p + 4), (const void *) u.b, (size_t) 4); |
7c673cae FG |
359 | *p += 8; |
360 | } |