]> git.proxmox.com Git - rustc.git/blame - library/std/src/io/impls.rs
New upstream version 1.52.0~beta.3+dfsg1
[rustc.git] / library / std / src / io / impls.rs
CommitLineData
1b1a35ee
XL
1#[cfg(test)]
2mod tests;
3
6a06907d 4use crate::alloc::Allocator;
532ac7d7 5use crate::cmp;
532ac7d7 6use crate::fmt;
60c5eb7d
XL
7use crate::io::{
8 self, BufRead, Error, ErrorKind, Initializer, IoSlice, IoSliceMut, Read, Seek, SeekFrom, Write,
9};
532ac7d7 10use crate::mem;
85aaf69f
SL
11
12// =============================================================================
13// Forwarding implementations
14
c34b1796 15#[stable(feature = "rust1", since = "1.0.0")]
9fa01778 16impl<R: Read + ?Sized> Read for &mut R {
c34b1796
AL
17 #[inline]
18 fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
19 (**self).read(buf)
20 }
21
9fa01778 22 #[inline]
48663c56 23 fn read_vectored(&mut self, bufs: &mut [IoSliceMut<'_>]) -> io::Result<usize> {
9fa01778
XL
24 (**self).read_vectored(bufs)
25 }
26
f9f354fc
XL
27 #[inline]
28 fn is_read_vectored(&self) -> bool {
29 (**self).is_read_vectored()
30 }
31
041b39d2
XL
32 #[inline]
33 unsafe fn initializer(&self) -> Initializer {
34 (**self).initializer()
35 }
36
c34b1796
AL
37 #[inline]
38 fn read_to_end(&mut self, buf: &mut Vec<u8>) -> io::Result<usize> {
39 (**self).read_to_end(buf)
40 }
41
42 #[inline]
43 fn read_to_string(&mut self, buf: &mut String) -> io::Result<usize> {
44 (**self).read_to_string(buf)
45 }
e9174d1e
SL
46
47 #[inline]
48 fn read_exact(&mut self, buf: &mut [u8]) -> io::Result<()> {
49 (**self).read_exact(buf)
50 }
85aaf69f 51}
c34b1796 52#[stable(feature = "rust1", since = "1.0.0")]
9fa01778 53impl<W: Write + ?Sized> Write for &mut W {
c34b1796 54 #[inline]
60c5eb7d
XL
55 fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
56 (**self).write(buf)
57 }
c34b1796 58
9fa01778 59 #[inline]
48663c56 60 fn write_vectored(&mut self, bufs: &[IoSlice<'_>]) -> io::Result<usize> {
9fa01778
XL
61 (**self).write_vectored(bufs)
62 }
63
f9f354fc
XL
64 #[inline]
65 fn is_write_vectored(&self) -> bool {
66 (**self).is_write_vectored()
67 }
68
c34b1796 69 #[inline]
60c5eb7d
XL
70 fn flush(&mut self) -> io::Result<()> {
71 (**self).flush()
72 }
c34b1796
AL
73
74 #[inline]
75 fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {
76 (**self).write_all(buf)
77 }
78
79 #[inline]
532ac7d7 80 fn write_fmt(&mut self, fmt: fmt::Arguments<'_>) -> io::Result<()> {
c34b1796
AL
81 (**self).write_fmt(fmt)
82 }
85aaf69f 83}
c34b1796 84#[stable(feature = "rust1", since = "1.0.0")]
9fa01778 85impl<S: Seek + ?Sized> Seek for &mut S {
c34b1796 86 #[inline]
60c5eb7d
XL
87 fn seek(&mut self, pos: SeekFrom) -> io::Result<u64> {
88 (**self).seek(pos)
89 }
85aaf69f 90}
c34b1796 91#[stable(feature = "rust1", since = "1.0.0")]
9fa01778 92impl<B: BufRead + ?Sized> BufRead for &mut B {
c34b1796 93 #[inline]
60c5eb7d
XL
94 fn fill_buf(&mut self) -> io::Result<&[u8]> {
95 (**self).fill_buf()
96 }
c34b1796
AL
97
98 #[inline]
60c5eb7d
XL
99 fn consume(&mut self, amt: usize) {
100 (**self).consume(amt)
101 }
c34b1796
AL
102
103 #[inline]
104 fn read_until(&mut self, byte: u8, buf: &mut Vec<u8>) -> io::Result<usize> {
105 (**self).read_until(byte, buf)
106 }
107
108 #[inline]
109 fn read_line(&mut self, buf: &mut String) -> io::Result<usize> {
110 (**self).read_line(buf)
111 }
85aaf69f
SL
112}
113
c34b1796 114#[stable(feature = "rust1", since = "1.0.0")]
85aaf69f 115impl<R: Read + ?Sized> Read for Box<R> {
c34b1796
AL
116 #[inline]
117 fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
118 (**self).read(buf)
119 }
120
9fa01778 121 #[inline]
48663c56 122 fn read_vectored(&mut self, bufs: &mut [IoSliceMut<'_>]) -> io::Result<usize> {
9fa01778
XL
123 (**self).read_vectored(bufs)
124 }
125
f9f354fc
XL
126 #[inline]
127 fn is_read_vectored(&self) -> bool {
128 (**self).is_read_vectored()
129 }
130
041b39d2
XL
131 #[inline]
132 unsafe fn initializer(&self) -> Initializer {
133 (**self).initializer()
134 }
135
c34b1796
AL
136 #[inline]
137 fn read_to_end(&mut self, buf: &mut Vec<u8>) -> io::Result<usize> {
138 (**self).read_to_end(buf)
139 }
140
141 #[inline]
142 fn read_to_string(&mut self, buf: &mut String) -> io::Result<usize> {
143 (**self).read_to_string(buf)
144 }
e9174d1e
SL
145
146 #[inline]
147 fn read_exact(&mut self, buf: &mut [u8]) -> io::Result<()> {
148 (**self).read_exact(buf)
149 }
85aaf69f 150}
c34b1796 151#[stable(feature = "rust1", since = "1.0.0")]
85aaf69f 152impl<W: Write + ?Sized> Write for Box<W> {
c34b1796 153 #[inline]
60c5eb7d
XL
154 fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
155 (**self).write(buf)
156 }
c34b1796 157
9fa01778 158 #[inline]
48663c56 159 fn write_vectored(&mut self, bufs: &[IoSlice<'_>]) -> io::Result<usize> {
9fa01778
XL
160 (**self).write_vectored(bufs)
161 }
162
f9f354fc
XL
163 #[inline]
164 fn is_write_vectored(&self) -> bool {
165 (**self).is_write_vectored()
166 }
167
c34b1796 168 #[inline]
60c5eb7d
XL
169 fn flush(&mut self) -> io::Result<()> {
170 (**self).flush()
171 }
c34b1796
AL
172
173 #[inline]
174 fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {
175 (**self).write_all(buf)
176 }
177
178 #[inline]
532ac7d7 179 fn write_fmt(&mut self, fmt: fmt::Arguments<'_>) -> io::Result<()> {
c34b1796
AL
180 (**self).write_fmt(fmt)
181 }
85aaf69f 182}
c34b1796 183#[stable(feature = "rust1", since = "1.0.0")]
85aaf69f 184impl<S: Seek + ?Sized> Seek for Box<S> {
c34b1796 185 #[inline]
60c5eb7d
XL
186 fn seek(&mut self, pos: SeekFrom) -> io::Result<u64> {
187 (**self).seek(pos)
188 }
85aaf69f 189}
c34b1796 190#[stable(feature = "rust1", since = "1.0.0")]
85aaf69f 191impl<B: BufRead + ?Sized> BufRead for Box<B> {
c34b1796 192 #[inline]
60c5eb7d
XL
193 fn fill_buf(&mut self) -> io::Result<&[u8]> {
194 (**self).fill_buf()
195 }
c34b1796
AL
196
197 #[inline]
60c5eb7d
XL
198 fn consume(&mut self, amt: usize) {
199 (**self).consume(amt)
200 }
c34b1796
AL
201
202 #[inline]
203 fn read_until(&mut self, byte: u8, buf: &mut Vec<u8>) -> io::Result<usize> {
204 (**self).read_until(byte, buf)
205 }
206
207 #[inline]
208 fn read_line(&mut self, buf: &mut String) -> io::Result<usize> {
209 (**self).read_line(buf)
210 }
85aaf69f
SL
211}
212
213// =============================================================================
214// In-memory buffer implementations
215
c30ab7b3
SL
216/// Read is implemented for `&[u8]` by copying from the slice.
217///
218/// Note that reading updates the slice to point to the yet unread part.
219/// The slice will be empty when EOF is reached.
c34b1796 220#[stable(feature = "rust1", since = "1.0.0")]
9fa01778 221impl Read for &[u8] {
c34b1796 222 #[inline]
85aaf69f
SL
223 fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
224 let amt = cmp::min(buf.len(), self.len());
225 let (a, b) = self.split_at(amt);
476ff2be
SL
226
227 // First check if the amount of bytes we want to read is small:
228 // `copy_from_slice` will generally expand to a call to `memcpy`, and
229 // for a single byte the overhead is significant.
230 if amt == 1 {
231 buf[0] = a[0];
232 } else {
233 buf[..amt].copy_from_slice(a);
234 }
235
85aaf69f
SL
236 *self = b;
237 Ok(amt)
238 }
e9174d1e 239
9fa01778 240 #[inline]
48663c56 241 fn read_vectored(&mut self, bufs: &mut [IoSliceMut<'_>]) -> io::Result<usize> {
9fa01778
XL
242 let mut nread = 0;
243 for buf in bufs {
244 nread += self.read(buf)?;
245 if self.is_empty() {
246 break;
247 }
248 }
249
250 Ok(nread)
251 }
252
f9f354fc
XL
253 #[inline]
254 fn is_read_vectored(&self) -> bool {
255 true
256 }
257
041b39d2
XL
258 #[inline]
259 unsafe fn initializer(&self) -> Initializer {
260 Initializer::nop()
261 }
262
e9174d1e
SL
263 #[inline]
264 fn read_exact(&mut self, buf: &mut [u8]) -> io::Result<()> {
265 if buf.len() > self.len() {
60c5eb7d 266 return Err(Error::new(ErrorKind::UnexpectedEof, "failed to fill whole buffer"));
e9174d1e
SL
267 }
268 let (a, b) = self.split_at(buf.len());
476ff2be
SL
269
270 // First check if the amount of bytes we want to read is small:
271 // `copy_from_slice` will generally expand to a call to `memcpy`, and
272 // for a single byte the overhead is significant.
273 if buf.len() == 1 {
274 buf[0] = a[0];
275 } else {
276 buf.copy_from_slice(a);
277 }
278
e9174d1e
SL
279 *self = b;
280 Ok(())
281 }
ea8adc8c
XL
282
283 #[inline]
284 fn read_to_end(&mut self, buf: &mut Vec<u8>) -> io::Result<usize> {
285 buf.extend_from_slice(*self);
286 let len = self.len();
287 *self = &self[len..];
288 Ok(len)
289 }
85aaf69f
SL
290}
291
c34b1796 292#[stable(feature = "rust1", since = "1.0.0")]
9fa01778 293impl BufRead for &[u8] {
c34b1796 294 #[inline]
60c5eb7d
XL
295 fn fill_buf(&mut self) -> io::Result<&[u8]> {
296 Ok(*self)
297 }
c34b1796
AL
298
299 #[inline]
60c5eb7d
XL
300 fn consume(&mut self, amt: usize) {
301 *self = &self[amt..];
302 }
85aaf69f
SL
303}
304
c30ab7b3
SL
305/// Write is implemented for `&mut [u8]` by copying into the slice, overwriting
306/// its data.
307///
308/// Note that writing updates the slice to point to the yet unwritten part.
309/// The slice will be empty when it has been completely overwritten.
fc512014
XL
310///
311/// If the number of bytes to be written exceeds the size of the slice, write operations will
312/// return short writes: ultimately, `Ok(0)`; in this situation, `write_all` returns an error of
313/// kind `ErrorKind::WriteZero`.
c34b1796 314#[stable(feature = "rust1", since = "1.0.0")]
9fa01778 315impl Write for &mut [u8] {
c34b1796 316 #[inline]
85aaf69f
SL
317 fn write(&mut self, data: &[u8]) -> io::Result<usize> {
318 let amt = cmp::min(data.len(), self.len());
319 let (a, b) = mem::replace(self, &mut []).split_at_mut(amt);
7453a54e 320 a.copy_from_slice(&data[..amt]);
85aaf69f
SL
321 *self = b;
322 Ok(amt)
323 }
c34b1796 324
9fa01778 325 #[inline]
48663c56 326 fn write_vectored(&mut self, bufs: &[IoSlice<'_>]) -> io::Result<usize> {
9fa01778
XL
327 let mut nwritten = 0;
328 for buf in bufs {
329 nwritten += self.write(buf)?;
330 if self.is_empty() {
331 break;
332 }
333 }
334
335 Ok(nwritten)
336 }
337
f9f354fc
XL
338 #[inline]
339 fn is_write_vectored(&self) -> bool {
340 true
341 }
342
c34b1796
AL
343 #[inline]
344 fn write_all(&mut self, data: &[u8]) -> io::Result<()> {
54a0048b 345 if self.write(data)? == data.len() {
c34b1796
AL
346 Ok(())
347 } else {
348 Err(Error::new(ErrorKind::WriteZero, "failed to write whole buffer"))
349 }
350 }
351
352 #[inline]
60c5eb7d
XL
353 fn flush(&mut self) -> io::Result<()> {
354 Ok(())
355 }
85aaf69f
SL
356}
357
c30ab7b3
SL
358/// Write is implemented for `Vec<u8>` by appending to the vector.
359/// The vector will grow as needed.
c34b1796 360#[stable(feature = "rust1", since = "1.0.0")]
6a06907d 361impl<A: Allocator> Write for Vec<u8, A> {
c34b1796 362 #[inline]
85aaf69f 363 fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
92a42be0 364 self.extend_from_slice(buf);
85aaf69f
SL
365 Ok(buf.len())
366 }
c34b1796 367
9fa01778 368 #[inline]
48663c56 369 fn write_vectored(&mut self, bufs: &[IoSlice<'_>]) -> io::Result<usize> {
9fa01778
XL
370 let len = bufs.iter().map(|b| b.len()).sum();
371 self.reserve(len);
372 for buf in bufs {
373 self.extend_from_slice(buf);
374 }
375 Ok(len)
376 }
377
f9f354fc
XL
378 #[inline]
379 fn is_write_vectored(&self) -> bool {
380 true
381 }
382
c34b1796
AL
383 #[inline]
384 fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {
92a42be0 385 self.extend_from_slice(buf);
c34b1796
AL
386 Ok(())
387 }
388
389 #[inline]
60c5eb7d
XL
390 fn flush(&mut self) -> io::Result<()> {
391 Ok(())
392 }
85aaf69f 393}