rustc (1.45.0+dfsg1-2) UNRELEASED; urgency=medium
* Add some more big-endian test patches.
+ * Backport some patches to fix some testsuite ICEs.
-- Ximin Luo <infinity0@debian.org> Thu, 06 Aug 2020 14:21:07 +0100
#u-allow-system-compiler-rt.patch
u-fix-exec.patch
u-ignore-endian.patch
+u-249a46f56cbbb6ffecb726d07163741093da6406.patch
u-fb9fa5ba3ee08171e7d2ff35d28ec0dd93b0287b.patch
u-1629fed4c0bdc6d3246ea63a91f600bcb8874626.patch
+u-5ceff6b96af9a21e044545b9e064433feccaf659.patch
+u-c0c3327ade8b0caa3043425ee45842c86fbd4f76.patch
+u-1f0895162ba5a783d4d73d5c263552eaca9343b3.patch
# not forwarded, or forwarded but unlikely to be merged
u-rustc-llvm-cross-flags.patch
--- /dev/null
+From 1f0895162ba5a783d4d73d5c263552eaca9343b3 Mon Sep 17 00:00:00 2001
+From: =?UTF-8?q?Tomasz=20Mi=C4=85sko?= <tomasz.miasko@gmail.com>
+Date: Sun, 14 Jun 2020 00:00:00 +0000
+Subject: [PATCH] Avoid forming references to an uninitialized memory in
+ DroplessArena
+
+Return a pointer from `alloc_raw` instead of a slice. There is no
+practical use for slice as a return type and changing it to a pointer
+avoids forming references to an uninitialized memory.
+---
+ src/librustc_arena/lib.rs | 25 +++++++++++--------------
+ src/librustc_middle/ty/list.rs | 2 +-
+ 2 files changed, 12 insertions(+), 15 deletions(-)
+
+--- a/src/libarena/lib.rs
++++ b/src/libarena/lib.rs
+@@ -375,7 +375,7 @@
+ /// current memory chunk. Returns `None` if there is no free space left to
+ /// satisfy the request.
+ #[inline]
+- fn alloc_raw_without_grow(&self, bytes: usize, align: usize) -> Option<&mut [u8]> {
++ fn alloc_raw_without_grow(&self, bytes: usize, align: usize) -> Option<*mut u8> {
+ let ptr = self.ptr.get() as usize;
+ let end = self.end.get() as usize;
+ // The allocation request fits into the current chunk iff:
+@@ -391,14 +391,14 @@
+ let new_ptr = aligned.checked_add(bytes)?;
+ if new_ptr <= end {
+ self.ptr.set(new_ptr as *mut u8);
+- unsafe { Some(slice::from_raw_parts_mut(aligned as *mut u8, bytes)) }
++ Some(aligned as *mut u8)
+ } else {
+ None
+ }
+ }
+
+ #[inline]
+- pub fn alloc_raw(&self, bytes: usize, align: usize) -> &mut [u8] {
++ pub fn alloc_raw(&self, bytes: usize, align: usize) -> *mut u8 {
+ assert!(bytes != 0);
+ loop {
+ if let Some(a) = self.alloc_raw_without_grow(bytes, align) {
+@@ -414,7 +414,7 @@
+ pub fn alloc<T>(&self, object: T) -> &mut T {
+ assert!(!mem::needs_drop::<T>());
+
+- let mem = self.alloc_raw(mem::size_of::<T>(), mem::align_of::<T>()) as *mut _ as *mut T;
++ let mem = self.alloc_raw(mem::size_of::<T>(), mem::align_of::<T>()) as *mut T;
+
+ unsafe {
+ // Write into uninitialized memory.
+@@ -439,13 +439,11 @@
+ assert!(mem::size_of::<T>() != 0);
+ assert!(!slice.is_empty());
+
+- let mem = self.alloc_raw(slice.len() * mem::size_of::<T>(), mem::align_of::<T>()) as *mut _
+- as *mut T;
++ let mem = self.alloc_raw(slice.len() * mem::size_of::<T>(), mem::align_of::<T>()) as *mut T;
+
+ unsafe {
+- let arena_slice = slice::from_raw_parts_mut(mem, slice.len());
+- arena_slice.copy_from_slice(slice);
+- arena_slice
++ mem.copy_from_nonoverlapping(slice.as_ptr(), slice.len());
++ slice::from_raw_parts_mut(mem, slice.len())
+ }
+ }
+
+@@ -488,7 +486,7 @@
+ return &mut [];
+ }
+ let size = len.checked_mul(mem::size_of::<T>()).unwrap();
+- let mem = self.alloc_raw(size, mem::align_of::<T>()) as *mut _ as *mut T;
++ let mem = self.alloc_raw(size, mem::align_of::<T>()) as *mut T;
+ unsafe { self.write_from_iter(iter, len, mem) }
+ }
+ (_, _) => {
+@@ -503,7 +501,7 @@
+ let len = vec.len();
+ let start_ptr = self
+ .alloc_raw(len * mem::size_of::<T>(), mem::align_of::<T>())
+- as *mut _ as *mut T;
++ as *mut T;
+ vec.as_ptr().copy_to_nonoverlapping(start_ptr, len);
+ vec.set_len(0);
+ slice::from_raw_parts_mut(start_ptr, len)
+@@ -547,8 +545,7 @@
+ impl DropArena {
+ #[inline]
+ pub unsafe fn alloc<T>(&self, object: T) -> &mut T {
+- let mem =
+- self.arena.alloc_raw(mem::size_of::<T>(), mem::align_of::<T>()) as *mut _ as *mut T;
++ let mem = self.arena.alloc_raw(mem::size_of::<T>(), mem::align_of::<T>()) as *mut T;
+ // Write into uninitialized memory.
+ ptr::write(mem, object);
+ let result = &mut *mem;
+@@ -571,7 +568,7 @@
+ let start_ptr = self
+ .arena
+ .alloc_raw(len.checked_mul(mem::size_of::<T>()).unwrap(), mem::align_of::<T>())
+- as *mut _ as *mut T;
++ as *mut T;
+
+ let mut destructors = self.destructors.borrow_mut();
+ // Reserve space for the destructors so we can't panic while adding them
+--- a/src/librustc_middle/ty/list.rs
++++ b/src/librustc_middle/ty/list.rs
+@@ -55,7 +55,7 @@
+ .dropless
+ .alloc_raw(size, cmp::max(mem::align_of::<T>(), mem::align_of::<usize>()));
+ unsafe {
+- let result = &mut *(mem.as_mut_ptr() as *mut List<T>);
++ let result = &mut *(mem as *mut List<T>);
+ // Write the length
+ result.len = slice.len();
+
--- /dev/null
+From 249a46f56cbbb6ffecb726d07163741093da6406 Mon Sep 17 00:00:00 2001
+From: Yerkebulan Tulibergenov <yerkebulan@gmail.com>
+Date: Sat, 13 Jun 2020 00:41:39 -0700
+Subject: [PATCH] pretty/asm.rs should only be tested for x86_64 and not
+ AArch64
+
+---
+ src/test/pretty/asm.pp | 1 +
+ src/test/pretty/asm.rs | 1 +
+ 2 files changed, 2 insertions(+)
+
+diff --git a/src/test/pretty/asm.pp b/src/test/pretty/asm.pp
+index 1723e1cc1cb09..4903050e08ed2 100644
+--- a/src/test/pretty/asm.pp
++++ b/src/test/pretty/asm.pp
+@@ -8,6 +8,7 @@
+
+ // pretty-mode:expanded
+ // pp-exact:asm.pp
++// only-x86_64
+
+ pub fn main() {
+ let a: i32;
+diff --git a/src/test/pretty/asm.rs b/src/test/pretty/asm.rs
+index 9812f1d97e5ff..12c32e6721b33 100644
+--- a/src/test/pretty/asm.rs
++++ b/src/test/pretty/asm.rs
+@@ -2,6 +2,7 @@
+
+ // pretty-mode:expanded
+ // pp-exact:asm.pp
++// only-x86_64
+
+ pub fn main() {
+ let a: i32;
--- /dev/null
+From 5ceff6b96af9a21e044545b9e064433feccaf659 Mon Sep 17 00:00:00 2001
+From: Nicholas Nethercote <nnethercote@mozilla.com>
+Date: Thu, 21 May 2020 10:36:32 +1000
+Subject: [PATCH] Fix off-by-one error in `DroplessArena::alloc_raw`.
+
+This causes unnecessary calls to `grow` when the allocation would fit
+exactly in the remaining space.
+---
+ src/librustc_arena/lib.rs | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+--- a/src/libarena/lib.rs
++++ b/src/libarena/lib.rs
+@@ -386,7 +386,7 @@
+ self.align(align);
+
+ let future_end = intrinsics::arith_offset(self.ptr.get(), bytes as isize);
+- if (future_end as *mut u8) >= self.end.get() {
++ if (future_end as *mut u8) > self.end.get() {
+ self.grow(bytes);
+ }
+
--- /dev/null
+From c0c3327ade8b0caa3043425ee45842c86fbd4f76 Mon Sep 17 00:00:00 2001
+From: =?UTF-8?q?Tomasz=20Mi=C4=85sko?= <tomasz.miasko@gmail.com>
+Date: Thu, 11 Jun 2020 00:00:00 +0000
+Subject: [PATCH] Check for overflow in DroplessArena and return aligned
+ pointer
+
+* Check for overflow when calculating the slice start & end position.
+* Align the pointer obtained from the allocator, ensuring that it
+ satisfies user requested alignment (the allocator is only asked for
+ layout compatible with u8 slice).
+* Remove an incorrect assertion from DroplessArena::align.
+---
+ src/librustc_arena/lib.rs | 53 ++++++++++++++++++++++++---------------
+ 1 file changed, 33 insertions(+), 20 deletions(-)
+
+--- a/src/libarena/lib.rs
++++ b/src/libarena/lib.rs
+@@ -338,13 +338,6 @@
+ }
+
+ impl DroplessArena {
+- #[inline]
+- fn align(&self, align: usize) {
+- let final_address = ((self.ptr.get() as usize) + align - 1) & !(align - 1);
+- self.ptr.set(final_address as *mut u8);
+- assert!(self.ptr <= self.end);
+- }
+-
+ #[inline(never)]
+ #[cold]
+ fn grow(&self, needed_bytes: usize) {
+@@ -378,22 +371,42 @@
+ }
+ }
+
++ /// Allocates a byte slice with specified size and alignment from the
++ /// current memory chunk. Returns `None` if there is no free space left to
++ /// satisfy the request.
+ #[inline]
+- pub fn alloc_raw(&self, bytes: usize, align: usize) -> &mut [u8] {
+- unsafe {
+- assert!(bytes != 0);
+-
+- self.align(align);
++ fn alloc_raw_without_grow(&self, bytes: usize, align: usize) -> Option<&mut [u8]> {
++ let ptr = self.ptr.get() as usize;
++ let end = self.end.get() as usize;
++ // The allocation request fits into the current chunk iff:
++ //
++ // let aligned = align_to(ptr, align);
++ // ptr <= aligned && aligned + bytes <= end
++ //
++ // Except that we work with fixed width integers and need to be careful
++ // about potential overflow in the calcuation. If the overflow does
++ // happen, then we definitely don't have enough free and need to grow
++ // the arena.
++ let aligned = ptr.checked_add(align - 1)? & !(align - 1);
++ let new_ptr = aligned.checked_add(bytes)?;
++ if new_ptr <= end {
++ self.ptr.set(new_ptr as *mut u8);
++ unsafe { Some(slice::from_raw_parts_mut(aligned as *mut u8, bytes)) }
++ } else {
++ None
++ }
++ }
+
+- let future_end = intrinsics::arith_offset(self.ptr.get(), bytes as isize);
+- if (future_end as *mut u8) > self.end.get() {
+- self.grow(bytes);
++ #[inline]
++ pub fn alloc_raw(&self, bytes: usize, align: usize) -> &mut [u8] {
++ assert!(bytes != 0);
++ loop {
++ if let Some(a) = self.alloc_raw_without_grow(bytes, align) {
++ break a;
+ }
+-
+- let ptr = self.ptr.get();
+- // Set the pointer past ourselves
+- self.ptr.set(intrinsics::arith_offset(self.ptr.get(), bytes as isize) as *mut u8);
+- slice::from_raw_parts_mut(ptr, bytes)
++ // No free space left. Allocate a new chunk to satisfy the request.
++ // On failure the grow will panic or abort.
++ self.grow(bytes);
+ }
+ }
+