]> git.proxmox.com Git - rustc.git/blob - library/std/src/io/impls.rs
New upstream version 1.49.0~beta.4+dfsg1
[rustc.git] / library / std / src / io / impls.rs
1 #[cfg(test)]
2 mod tests;
3
4 use crate::cmp;
5 use crate::fmt;
6 use crate::io::{
7 self, BufRead, Error, ErrorKind, Initializer, IoSlice, IoSliceMut, Read, Seek, SeekFrom, Write,
8 };
9 use crate::mem;
10
11 // =============================================================================
12 // Forwarding implementations
13
14 #[stable(feature = "rust1", since = "1.0.0")]
15 impl<R: Read + ?Sized> Read for &mut R {
16 #[inline]
17 fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
18 (**self).read(buf)
19 }
20
21 #[inline]
22 fn read_vectored(&mut self, bufs: &mut [IoSliceMut<'_>]) -> io::Result<usize> {
23 (**self).read_vectored(bufs)
24 }
25
26 #[inline]
27 fn is_read_vectored(&self) -> bool {
28 (**self).is_read_vectored()
29 }
30
31 #[inline]
32 unsafe fn initializer(&self) -> Initializer {
33 (**self).initializer()
34 }
35
36 #[inline]
37 fn read_to_end(&mut self, buf: &mut Vec<u8>) -> io::Result<usize> {
38 (**self).read_to_end(buf)
39 }
40
41 #[inline]
42 fn read_to_string(&mut self, buf: &mut String) -> io::Result<usize> {
43 (**self).read_to_string(buf)
44 }
45
46 #[inline]
47 fn read_exact(&mut self, buf: &mut [u8]) -> io::Result<()> {
48 (**self).read_exact(buf)
49 }
50 }
51 #[stable(feature = "rust1", since = "1.0.0")]
52 impl<W: Write + ?Sized> Write for &mut W {
53 #[inline]
54 fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
55 (**self).write(buf)
56 }
57
58 #[inline]
59 fn write_vectored(&mut self, bufs: &[IoSlice<'_>]) -> io::Result<usize> {
60 (**self).write_vectored(bufs)
61 }
62
63 #[inline]
64 fn is_write_vectored(&self) -> bool {
65 (**self).is_write_vectored()
66 }
67
68 #[inline]
69 fn flush(&mut self) -> io::Result<()> {
70 (**self).flush()
71 }
72
73 #[inline]
74 fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {
75 (**self).write_all(buf)
76 }
77
78 #[inline]
79 fn write_fmt(&mut self, fmt: fmt::Arguments<'_>) -> io::Result<()> {
80 (**self).write_fmt(fmt)
81 }
82 }
83 #[stable(feature = "rust1", since = "1.0.0")]
84 impl<S: Seek + ?Sized> Seek for &mut S {
85 #[inline]
86 fn seek(&mut self, pos: SeekFrom) -> io::Result<u64> {
87 (**self).seek(pos)
88 }
89 }
90 #[stable(feature = "rust1", since = "1.0.0")]
91 impl<B: BufRead + ?Sized> BufRead for &mut B {
92 #[inline]
93 fn fill_buf(&mut self) -> io::Result<&[u8]> {
94 (**self).fill_buf()
95 }
96
97 #[inline]
98 fn consume(&mut self, amt: usize) {
99 (**self).consume(amt)
100 }
101
102 #[inline]
103 fn read_until(&mut self, byte: u8, buf: &mut Vec<u8>) -> io::Result<usize> {
104 (**self).read_until(byte, buf)
105 }
106
107 #[inline]
108 fn read_line(&mut self, buf: &mut String) -> io::Result<usize> {
109 (**self).read_line(buf)
110 }
111 }
112
113 #[stable(feature = "rust1", since = "1.0.0")]
114 impl<R: Read + ?Sized> Read for Box<R> {
115 #[inline]
116 fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
117 (**self).read(buf)
118 }
119
120 #[inline]
121 fn read_vectored(&mut self, bufs: &mut [IoSliceMut<'_>]) -> io::Result<usize> {
122 (**self).read_vectored(bufs)
123 }
124
125 #[inline]
126 fn is_read_vectored(&self) -> bool {
127 (**self).is_read_vectored()
128 }
129
130 #[inline]
131 unsafe fn initializer(&self) -> Initializer {
132 (**self).initializer()
133 }
134
135 #[inline]
136 fn read_to_end(&mut self, buf: &mut Vec<u8>) -> io::Result<usize> {
137 (**self).read_to_end(buf)
138 }
139
140 #[inline]
141 fn read_to_string(&mut self, buf: &mut String) -> io::Result<usize> {
142 (**self).read_to_string(buf)
143 }
144
145 #[inline]
146 fn read_exact(&mut self, buf: &mut [u8]) -> io::Result<()> {
147 (**self).read_exact(buf)
148 }
149 }
150 #[stable(feature = "rust1", since = "1.0.0")]
151 impl<W: Write + ?Sized> Write for Box<W> {
152 #[inline]
153 fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
154 (**self).write(buf)
155 }
156
157 #[inline]
158 fn write_vectored(&mut self, bufs: &[IoSlice<'_>]) -> io::Result<usize> {
159 (**self).write_vectored(bufs)
160 }
161
162 #[inline]
163 fn is_write_vectored(&self) -> bool {
164 (**self).is_write_vectored()
165 }
166
167 #[inline]
168 fn flush(&mut self) -> io::Result<()> {
169 (**self).flush()
170 }
171
172 #[inline]
173 fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {
174 (**self).write_all(buf)
175 }
176
177 #[inline]
178 fn write_fmt(&mut self, fmt: fmt::Arguments<'_>) -> io::Result<()> {
179 (**self).write_fmt(fmt)
180 }
181 }
182 #[stable(feature = "rust1", since = "1.0.0")]
183 impl<S: Seek + ?Sized> Seek for Box<S> {
184 #[inline]
185 fn seek(&mut self, pos: SeekFrom) -> io::Result<u64> {
186 (**self).seek(pos)
187 }
188 }
189 #[stable(feature = "rust1", since = "1.0.0")]
190 impl<B: BufRead + ?Sized> BufRead for Box<B> {
191 #[inline]
192 fn fill_buf(&mut self) -> io::Result<&[u8]> {
193 (**self).fill_buf()
194 }
195
196 #[inline]
197 fn consume(&mut self, amt: usize) {
198 (**self).consume(amt)
199 }
200
201 #[inline]
202 fn read_until(&mut self, byte: u8, buf: &mut Vec<u8>) -> io::Result<usize> {
203 (**self).read_until(byte, buf)
204 }
205
206 #[inline]
207 fn read_line(&mut self, buf: &mut String) -> io::Result<usize> {
208 (**self).read_line(buf)
209 }
210 }
211
212 // Used by panicking::default_hook
213 #[cfg(test)]
214 /// This impl is only used by printing logic, so any error returned is always
215 /// of kind `Other`, and should be ignored.
216 impl Write for dyn ::realstd::io::LocalOutput {
217 fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
218 (*self).write(buf).map_err(|_| ErrorKind::Other.into())
219 }
220
221 fn flush(&mut self) -> io::Result<()> {
222 (*self).flush().map_err(|_| ErrorKind::Other.into())
223 }
224 }
225
226 // =============================================================================
227 // In-memory buffer implementations
228
229 /// Read is implemented for `&[u8]` by copying from the slice.
230 ///
231 /// Note that reading updates the slice to point to the yet unread part.
232 /// The slice will be empty when EOF is reached.
233 #[stable(feature = "rust1", since = "1.0.0")]
234 impl Read for &[u8] {
235 #[inline]
236 fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
237 let amt = cmp::min(buf.len(), self.len());
238 let (a, b) = self.split_at(amt);
239
240 // First check if the amount of bytes we want to read is small:
241 // `copy_from_slice` will generally expand to a call to `memcpy`, and
242 // for a single byte the overhead is significant.
243 if amt == 1 {
244 buf[0] = a[0];
245 } else {
246 buf[..amt].copy_from_slice(a);
247 }
248
249 *self = b;
250 Ok(amt)
251 }
252
253 #[inline]
254 fn read_vectored(&mut self, bufs: &mut [IoSliceMut<'_>]) -> io::Result<usize> {
255 let mut nread = 0;
256 for buf in bufs {
257 nread += self.read(buf)?;
258 if self.is_empty() {
259 break;
260 }
261 }
262
263 Ok(nread)
264 }
265
266 #[inline]
267 fn is_read_vectored(&self) -> bool {
268 true
269 }
270
271 #[inline]
272 unsafe fn initializer(&self) -> Initializer {
273 Initializer::nop()
274 }
275
276 #[inline]
277 fn read_exact(&mut self, buf: &mut [u8]) -> io::Result<()> {
278 if buf.len() > self.len() {
279 return Err(Error::new(ErrorKind::UnexpectedEof, "failed to fill whole buffer"));
280 }
281 let (a, b) = self.split_at(buf.len());
282
283 // First check if the amount of bytes we want to read is small:
284 // `copy_from_slice` will generally expand to a call to `memcpy`, and
285 // for a single byte the overhead is significant.
286 if buf.len() == 1 {
287 buf[0] = a[0];
288 } else {
289 buf.copy_from_slice(a);
290 }
291
292 *self = b;
293 Ok(())
294 }
295
296 #[inline]
297 fn read_to_end(&mut self, buf: &mut Vec<u8>) -> io::Result<usize> {
298 buf.extend_from_slice(*self);
299 let len = self.len();
300 *self = &self[len..];
301 Ok(len)
302 }
303 }
304
305 #[stable(feature = "rust1", since = "1.0.0")]
306 impl BufRead for &[u8] {
307 #[inline]
308 fn fill_buf(&mut self) -> io::Result<&[u8]> {
309 Ok(*self)
310 }
311
312 #[inline]
313 fn consume(&mut self, amt: usize) {
314 *self = &self[amt..];
315 }
316 }
317
318 /// Write is implemented for `&mut [u8]` by copying into the slice, overwriting
319 /// its data.
320 ///
321 /// Note that writing updates the slice to point to the yet unwritten part.
322 /// The slice will be empty when it has been completely overwritten.
323 #[stable(feature = "rust1", since = "1.0.0")]
324 impl Write for &mut [u8] {
325 #[inline]
326 fn write(&mut self, data: &[u8]) -> io::Result<usize> {
327 let amt = cmp::min(data.len(), self.len());
328 let (a, b) = mem::replace(self, &mut []).split_at_mut(amt);
329 a.copy_from_slice(&data[..amt]);
330 *self = b;
331 Ok(amt)
332 }
333
334 #[inline]
335 fn write_vectored(&mut self, bufs: &[IoSlice<'_>]) -> io::Result<usize> {
336 let mut nwritten = 0;
337 for buf in bufs {
338 nwritten += self.write(buf)?;
339 if self.is_empty() {
340 break;
341 }
342 }
343
344 Ok(nwritten)
345 }
346
347 #[inline]
348 fn is_write_vectored(&self) -> bool {
349 true
350 }
351
352 #[inline]
353 fn write_all(&mut self, data: &[u8]) -> io::Result<()> {
354 if self.write(data)? == data.len() {
355 Ok(())
356 } else {
357 Err(Error::new(ErrorKind::WriteZero, "failed to write whole buffer"))
358 }
359 }
360
361 #[inline]
362 fn flush(&mut self) -> io::Result<()> {
363 Ok(())
364 }
365 }
366
367 /// Write is implemented for `Vec<u8>` by appending to the vector.
368 /// The vector will grow as needed.
369 #[stable(feature = "rust1", since = "1.0.0")]
370 impl Write for Vec<u8> {
371 #[inline]
372 fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
373 self.extend_from_slice(buf);
374 Ok(buf.len())
375 }
376
377 #[inline]
378 fn write_vectored(&mut self, bufs: &[IoSlice<'_>]) -> io::Result<usize> {
379 let len = bufs.iter().map(|b| b.len()).sum();
380 self.reserve(len);
381 for buf in bufs {
382 self.extend_from_slice(buf);
383 }
384 Ok(len)
385 }
386
387 #[inline]
388 fn is_write_vectored(&self) -> bool {
389 true
390 }
391
392 #[inline]
393 fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {
394 self.extend_from_slice(buf);
395 Ok(())
396 }
397
398 #[inline]
399 fn flush(&mut self) -> io::Result<()> {
400 Ok(())
401 }
402 }