From 1569f8f812c97002fa54dd6a5778109766a320e1 Mon Sep 17 00:00:00 2001
From: Simon Sapin <simon.sapin@exyr.org>
Date: Mon, 2 Apr 2018 10:38:07 +0200
Subject: [PATCH 01/27] =?UTF-8?q?Inline=20docs=20for=20the=20heap=20module?=
 =?UTF-8?q?=E2=80=99s=20reexports?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 src/liballoc/heap.rs | 2 ++
 src/libstd/heap.rs   | 2 +-
 2 files changed, 3 insertions(+), 1 deletion(-)

diff --git a/src/liballoc/heap.rs b/src/liballoc/heap.rs
index 9296a1130718e..000c0123d9fed 100644
--- a/src/liballoc/heap.rs
+++ b/src/liballoc/heap.rs
@@ -19,7 +19,9 @@ use core::intrinsics::{min_align_of_val, size_of_val};
 use core::mem::{self, ManuallyDrop};
 use core::usize;
 
+#[doc(inline)]
 pub use core::heap::*;
+
 #[doc(hidden)]
 pub mod __core {
     pub use core::*;
diff --git a/src/libstd/heap.rs b/src/libstd/heap.rs
index 4a391372c3a11..2cf0601808705 100644
--- a/src/libstd/heap.rs
+++ b/src/libstd/heap.rs
@@ -14,7 +14,7 @@
 
 pub use alloc::heap::Heap;
 pub use alloc_system::System;
-pub use core::heap::*;
+#[doc(inline)] pub use core::heap::*;
 
 #[cfg(not(test))]
 #[doc(hidden)]

From 9b068867f0ac0851ff0b23381e5b1b1c09b4002e Mon Sep 17 00:00:00 2001
From: Simon Sapin <simon.sapin@exyr.org>
Date: Mon, 2 Apr 2018 11:26:16 +0200
Subject: [PATCH 02/27] Add a core::heap::Void extern type.

---
 src/libcore/heap.rs | 20 ++++++++++++++++++++
 src/libcore/lib.rs  |  1 +
 2 files changed, 21 insertions(+)

diff --git a/src/libcore/heap.rs b/src/libcore/heap.rs
index fe19c923a58d1..80eedb5bff22a 100644
--- a/src/libcore/heap.rs
+++ b/src/libcore/heap.rs
@@ -21,6 +21,26 @@ use mem;
 use usize;
 use ptr::{self, NonNull};
 
+extern {
+    /// An opaque, unsized type. Used for pointers to allocated memory.
+    ///
+    /// This type can only be used behind a pointer like `*mut Void` or `ptr::NonNull<Void>`.
+    /// Such pointers are similar to C’s `void*` type.
+    pub type Void;
+}
+
+impl Void {
+    /// Similar to `std::ptr::null`, which requires `T: Sized`.
+    pub fn null() -> *const Self {
+        0 as _
+    }
+
+    /// Similar to `std::ptr::null_mut`, which requires `T: Sized`.
+    pub fn null_mut() -> *mut Self {
+        0 as _
+    }
+}
+
 /// Represents the combination of a starting address and
 /// a total capacity of the returned block.
 #[derive(Debug)]
diff --git a/src/libcore/lib.rs b/src/libcore/lib.rs
index 9ff8465bc0f2d..722a9de215c45 100644
--- a/src/libcore/lib.rs
+++ b/src/libcore/lib.rs
@@ -75,6 +75,7 @@
 #![feature(custom_attribute)]
 #![feature(doc_cfg)]
 #![feature(doc_spotlight)]
+#![feature(extern_types)]
 #![feature(fn_must_use)]
 #![feature(fundamental)]
 #![feature(intrinsics)]

From c660cedc02e125fe47d90075837c5d8adeb4c097 Mon Sep 17 00:00:00 2001
From: Simon Sapin <simon.sapin@exyr.org>
Date: Tue, 3 Apr 2018 14:07:06 +0200
Subject: [PATCH 03/27] Add a GlobalAlloc trait

---
 src/libcore/heap.rs | 30 ++++++++++++++++++++++++++++++
 1 file changed, 30 insertions(+)

diff --git a/src/libcore/heap.rs b/src/libcore/heap.rs
index 80eedb5bff22a..5c51bb2b51b9c 100644
--- a/src/libcore/heap.rs
+++ b/src/libcore/heap.rs
@@ -404,6 +404,36 @@ impl From<AllocErr> for CollectionAllocErr {
     }
 }
 
+// FIXME: docs
+pub unsafe trait GlobalAlloc {
+    unsafe fn alloc(&self, layout: Layout) -> *mut Void;
+
+    unsafe fn dealloc(&self, ptr: *mut Void, layout: Layout);
+
+    unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut Void {
+        let size = layout.size();
+        let ptr = self.alloc(layout);
+        if !ptr.is_null() {
+            ptr::write_bytes(ptr as *mut u8, 0, size);
+        }
+        ptr
+    }
+
+    unsafe fn realloc(&self, ptr: *mut Void, old_layout: Layout, new_size: usize) -> *mut Void {
+        let new_layout = Layout::from_size_align_unchecked(new_size, old_layout.align());
+        let new_ptr = self.alloc(new_layout);
+        if !new_ptr.is_null() {
+            ptr::copy_nonoverlapping(
+                ptr as *const u8,
+                new_ptr as *mut u8,
+                cmp::min(old_layout.size(), new_size),
+            );
+            self.dealloc(ptr, old_layout);
+        }
+        new_ptr
+    }
+}
+
 /// An implementation of `Alloc` can allocate, reallocate, and
 /// deallocate arbitrary blocks of data described via `Layout`.
 ///

From 1b895d8b88413f72230fbc0f00c67328870a2e9a Mon Sep 17 00:00:00 2001
From: Simon Sapin <simon.sapin@exyr.org>
Date: Tue, 3 Apr 2018 14:36:57 +0200
Subject: [PATCH 04/27] Import the `alloc` crate as `alloc_crate` in std
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

… to make the name `alloc` available.
---
 src/libstd/collections/hash/map.rs   |  4 +---
 src/libstd/collections/hash/table.rs |  5 +----
 src/libstd/collections/mod.rs        | 10 +++++-----
 src/libstd/error.rs                  | 10 +++++-----
 src/libstd/heap.rs                   |  2 +-
 src/libstd/lib.rs                    | 18 +++++++++---------
 src/libstd/sync/mod.rs               |  2 +-
 src/libstd/sync/mpsc/mpsc_queue.rs   |  3 +--
 src/libstd/sync/mpsc/spsc_queue.rs   |  2 +-
 src/libstd/sys/cloudabi/thread.rs    |  2 +-
 src/libstd/sys/redox/thread.rs       |  2 +-
 src/libstd/sys/unix/thread.rs        |  2 +-
 src/libstd/sys/wasm/thread.rs        |  2 +-
 src/libstd/sys/windows/process.rs    |  2 +-
 src/libstd/sys/windows/thread.rs     |  2 +-
 src/libstd/sys_common/at_exit_imp.rs |  2 +-
 src/libstd/sys_common/process.rs     |  2 +-
 src/libstd/sys_common/thread.rs      |  2 +-
 18 files changed, 34 insertions(+), 40 deletions(-)

diff --git a/src/libstd/collections/hash/map.rs b/src/libstd/collections/hash/map.rs
index e0b48e565d02a..73a5df8dc285f 100644
--- a/src/libstd/collections/hash/map.rs
+++ b/src/libstd/collections/hash/map.rs
@@ -11,15 +11,13 @@
 use self::Entry::*;
 use self::VacantEntryState::*;
 
-use alloc::heap::Heap;
-use alloc::allocator::CollectionAllocErr;
 use cell::Cell;
-use core::heap::Alloc;
 use borrow::Borrow;
 use cmp::max;
 use fmt::{self, Debug};
 #[allow(deprecated)]
 use hash::{Hash, Hasher, BuildHasher, SipHasher13};
+use heap::{Heap, Alloc, CollectionAllocErr};
 use iter::{FromIterator, FusedIterator};
 use mem::{self, replace};
 use ops::{Deref, Index};
diff --git a/src/libstd/collections/hash/table.rs b/src/libstd/collections/hash/table.rs
index fa6053d3f6d8e..878cd82a258d2 100644
--- a/src/libstd/collections/hash/table.rs
+++ b/src/libstd/collections/hash/table.rs
@@ -8,17 +8,14 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use alloc::heap::Heap;
-use core::heap::{Alloc, Layout};
-
 use cmp;
 use hash::{BuildHasher, Hash, Hasher};
+use heap::{Heap, Alloc, Layout, CollectionAllocErr};
 use marker;
 use mem::{align_of, size_of, needs_drop};
 use mem;
 use ops::{Deref, DerefMut};
 use ptr::{self, Unique, NonNull};
-use alloc::allocator::CollectionAllocErr;
 
 use self::BucketState::*;
 
diff --git a/src/libstd/collections/mod.rs b/src/libstd/collections/mod.rs
index c7ad27d8d2675..9cf73824deaaf 100644
--- a/src/libstd/collections/mod.rs
+++ b/src/libstd/collections/mod.rs
@@ -424,13 +424,13 @@
 #[doc(hidden)]
 pub use ops::Bound;
 #[stable(feature = "rust1", since = "1.0.0")]
-pub use alloc::{BinaryHeap, BTreeMap, BTreeSet};
+pub use alloc_crate::{BinaryHeap, BTreeMap, BTreeSet};
 #[stable(feature = "rust1", since = "1.0.0")]
-pub use alloc::{LinkedList, VecDeque};
+pub use alloc_crate::{LinkedList, VecDeque};
 #[stable(feature = "rust1", since = "1.0.0")]
-pub use alloc::{binary_heap, btree_map, btree_set};
+pub use alloc_crate::{binary_heap, btree_map, btree_set};
 #[stable(feature = "rust1", since = "1.0.0")]
-pub use alloc::{linked_list, vec_deque};
+pub use alloc_crate::{linked_list, vec_deque};
 
 #[stable(feature = "rust1", since = "1.0.0")]
 pub use self::hash_map::HashMap;
@@ -446,7 +446,7 @@ pub mod range {
 }
 
 #[unstable(feature = "try_reserve", reason = "new API", issue="48043")]
-pub use alloc::allocator::CollectionAllocErr;
+pub use heap::CollectionAllocErr;
 
 mod hash;
 
diff --git a/src/libstd/error.rs b/src/libstd/error.rs
index 3d0c96585b552..4edb897350efd 100644
--- a/src/libstd/error.rs
+++ b/src/libstd/error.rs
@@ -51,13 +51,13 @@
 // coherence challenge (e.g., specialization, neg impls, etc) we can
 // reconsider what crate these items belong in.
 
-use alloc::allocator;
 use any::TypeId;
 use borrow::Cow;
 use cell;
 use char;
 use core::array;
 use fmt::{self, Debug, Display};
+use heap::{AllocErr, CannotReallocInPlace};
 use mem::transmute;
 use num;
 use str;
@@ -241,18 +241,18 @@ impl Error for ! {
 #[unstable(feature = "allocator_api",
            reason = "the precise API and guarantees it provides may be tweaked.",
            issue = "32838")]
-impl Error for allocator::AllocErr {
+impl Error for AllocErr {
     fn description(&self) -> &str {
-        allocator::AllocErr::description(self)
+        AllocErr::description(self)
     }
 }
 
 #[unstable(feature = "allocator_api",
            reason = "the precise API and guarantees it provides may be tweaked.",
            issue = "32838")]
-impl Error for allocator::CannotReallocInPlace {
+impl Error for CannotReallocInPlace {
     fn description(&self) -> &str {
-        allocator::CannotReallocInPlace::description(self)
+        CannotReallocInPlace::description(self)
     }
 }
 
diff --git a/src/libstd/heap.rs b/src/libstd/heap.rs
index 2cf0601808705..b42a1052c49f7 100644
--- a/src/libstd/heap.rs
+++ b/src/libstd/heap.rs
@@ -12,7 +12,7 @@
 
 #![unstable(issue = "32838", feature = "allocator_api")]
 
-pub use alloc::heap::Heap;
+pub use alloc_crate::heap::Heap;
 pub use alloc_system::System;
 #[doc(inline)] pub use core::heap::*;
 
diff --git a/src/libstd/lib.rs b/src/libstd/lib.rs
index c82d600e4a184..ef4205e7a620d 100644
--- a/src/libstd/lib.rs
+++ b/src/libstd/lib.rs
@@ -351,7 +351,7 @@ extern crate core as __core;
 
 #[macro_use]
 #[macro_reexport(vec, format)]
-extern crate alloc;
+extern crate alloc as alloc_crate;
 extern crate alloc_system;
 #[doc(masked)]
 extern crate libc;
@@ -437,21 +437,21 @@ pub use core::u32;
 #[stable(feature = "rust1", since = "1.0.0")]
 pub use core::u64;
 #[stable(feature = "rust1", since = "1.0.0")]
-pub use alloc::boxed;
+pub use alloc_crate::boxed;
 #[stable(feature = "rust1", since = "1.0.0")]
-pub use alloc::rc;
+pub use alloc_crate::rc;
 #[stable(feature = "rust1", since = "1.0.0")]
-pub use alloc::borrow;
+pub use alloc_crate::borrow;
 #[stable(feature = "rust1", since = "1.0.0")]
-pub use alloc::fmt;
+pub use alloc_crate::fmt;
 #[stable(feature = "rust1", since = "1.0.0")]
-pub use alloc::slice;
+pub use alloc_crate::slice;
 #[stable(feature = "rust1", since = "1.0.0")]
-pub use alloc::str;
+pub use alloc_crate::str;
 #[stable(feature = "rust1", since = "1.0.0")]
-pub use alloc::string;
+pub use alloc_crate::string;
 #[stable(feature = "rust1", since = "1.0.0")]
-pub use alloc::vec;
+pub use alloc_crate::vec;
 #[stable(feature = "rust1", since = "1.0.0")]
 pub use core::char;
 #[stable(feature = "i128", since = "1.26.0")]
diff --git a/src/libstd/sync/mod.rs b/src/libstd/sync/mod.rs
index 289b47b34847f..642b284c6c794 100644
--- a/src/libstd/sync/mod.rs
+++ b/src/libstd/sync/mod.rs
@@ -18,7 +18,7 @@
 #![stable(feature = "rust1", since = "1.0.0")]
 
 #[stable(feature = "rust1", since = "1.0.0")]
-pub use alloc::arc::{Arc, Weak};
+pub use alloc_crate::arc::{Arc, Weak};
 #[stable(feature = "rust1", since = "1.0.0")]
 pub use core::sync::atomic;
 
diff --git a/src/libstd/sync/mpsc/mpsc_queue.rs b/src/libstd/sync/mpsc/mpsc_queue.rs
index 296773d20f614..df945ac3859f9 100644
--- a/src/libstd/sync/mpsc/mpsc_queue.rs
+++ b/src/libstd/sync/mpsc/mpsc_queue.rs
@@ -23,10 +23,9 @@
 
 pub use self::PopResult::*;
 
-use alloc::boxed::Box;
 use core::ptr;
 use core::cell::UnsafeCell;
-
+use boxed::Box;
 use sync::atomic::{AtomicPtr, Ordering};
 
 /// A result of the `pop` function.
diff --git a/src/libstd/sync/mpsc/spsc_queue.rs b/src/libstd/sync/mpsc/spsc_queue.rs
index cc4be92276a3b..9482f6958b311 100644
--- a/src/libstd/sync/mpsc/spsc_queue.rs
+++ b/src/libstd/sync/mpsc/spsc_queue.rs
@@ -16,7 +16,7 @@
 
 // http://www.1024cores.net/home/lock-free-algorithms/queues/unbounded-spsc-queue
 
-use alloc::boxed::Box;
+use boxed::Box;
 use core::ptr;
 use core::cell::UnsafeCell;
 
diff --git a/src/libstd/sys/cloudabi/thread.rs b/src/libstd/sys/cloudabi/thread.rs
index a22d9053b6964..5d66936b2a4ff 100644
--- a/src/libstd/sys/cloudabi/thread.rs
+++ b/src/libstd/sys/cloudabi/thread.rs
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use alloc::boxed::FnBox;
+use boxed::FnBox;
 use cmp;
 use ffi::CStr;
 use io;
diff --git a/src/libstd/sys/redox/thread.rs b/src/libstd/sys/redox/thread.rs
index f20350269b7cf..110d46ca3ab03 100644
--- a/src/libstd/sys/redox/thread.rs
+++ b/src/libstd/sys/redox/thread.rs
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use alloc::boxed::FnBox;
+use boxed::FnBox;
 use ffi::CStr;
 use io;
 use mem;
diff --git a/src/libstd/sys/unix/thread.rs b/src/libstd/sys/unix/thread.rs
index 2db3d4a5744e7..9e38880803026 100644
--- a/src/libstd/sys/unix/thread.rs
+++ b/src/libstd/sys/unix/thread.rs
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use alloc::boxed::FnBox;
+use boxed::FnBox;
 use cmp;
 use ffi::CStr;
 use io;
diff --git a/src/libstd/sys/wasm/thread.rs b/src/libstd/sys/wasm/thread.rs
index 7345843b975e4..728e678a2e8c1 100644
--- a/src/libstd/sys/wasm/thread.rs
+++ b/src/libstd/sys/wasm/thread.rs
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use alloc::boxed::FnBox;
+use boxed::FnBox;
 use ffi::CStr;
 use io;
 use sys::{unsupported, Void};
diff --git a/src/libstd/sys/windows/process.rs b/src/libstd/sys/windows/process.rs
index bd5507e8f8967..be442f413740a 100644
--- a/src/libstd/sys/windows/process.rs
+++ b/src/libstd/sys/windows/process.rs
@@ -31,7 +31,7 @@ use sys::stdio;
 use sys::cvt;
 use sys_common::{AsInner, FromInner, IntoInner};
 use sys_common::process::{CommandEnv, EnvKey};
-use alloc::borrow::Borrow;
+use borrow::Borrow;
 
 ////////////////////////////////////////////////////////////////////////////////
 // Command
diff --git a/src/libstd/sys/windows/thread.rs b/src/libstd/sys/windows/thread.rs
index 4b3d1b586b570..b6f63303dc2f0 100644
--- a/src/libstd/sys/windows/thread.rs
+++ b/src/libstd/sys/windows/thread.rs
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use alloc::boxed::FnBox;
+use boxed::FnBox;
 use io;
 use ffi::CStr;
 use mem;
diff --git a/src/libstd/sys_common/at_exit_imp.rs b/src/libstd/sys_common/at_exit_imp.rs
index ce6fd4cb0754b..26da51c9825fb 100644
--- a/src/libstd/sys_common/at_exit_imp.rs
+++ b/src/libstd/sys_common/at_exit_imp.rs
@@ -12,7 +12,7 @@
 //!
 //! Documentation can be found on the `rt::at_exit` function.
 
-use alloc::boxed::FnBox;
+use boxed::FnBox;
 use ptr;
 use sys_common::mutex::Mutex;
 
diff --git a/src/libstd/sys_common/process.rs b/src/libstd/sys_common/process.rs
index d0c5951bd6c0a..ddf0ebe603e08 100644
--- a/src/libstd/sys_common/process.rs
+++ b/src/libstd/sys_common/process.rs
@@ -14,7 +14,7 @@
 use ffi::{OsStr, OsString};
 use env;
 use collections::BTreeMap;
-use alloc::borrow::Borrow;
+use borrow::Borrow;
 
 pub trait EnvKey:
     From<OsString> + Into<OsString> +
diff --git a/src/libstd/sys_common/thread.rs b/src/libstd/sys_common/thread.rs
index f1379b6ec6375..da6f58ef6bb77 100644
--- a/src/libstd/sys_common/thread.rs
+++ b/src/libstd/sys_common/thread.rs
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use alloc::boxed::FnBox;
+use boxed::FnBox;
 use env;
 use sync::atomic::{self, Ordering};
 use sys::stack_overflow;

From 09e8db1e4f33ec82316e1eeaaedad94fe6e1acb5 Mon Sep 17 00:00:00 2001
From: Simon Sapin <simon.sapin@exyr.org>
Date: Tue, 3 Apr 2018 14:41:15 +0200
Subject: [PATCH 05/27] Rename `heap` modules in the core, alloc, and std
 crates to `alloc`

---
 src/liballoc/{heap.rs => alloc.rs} | 0
 src/liballoc/lib.rs                | 8 ++++++--
 src/libcore/{heap.rs => alloc.rs}  | 0
 src/libcore/lib.rs                 | 6 +++++-
 src/libstd/{heap.rs => alloc.rs}   | 0
 src/libstd/lib.rs                  | 6 +++++-
 6 files changed, 16 insertions(+), 4 deletions(-)
 rename src/liballoc/{heap.rs => alloc.rs} (100%)
 rename src/libcore/{heap.rs => alloc.rs} (100%)
 rename src/libstd/{heap.rs => alloc.rs} (100%)

diff --git a/src/liballoc/heap.rs b/src/liballoc/alloc.rs
similarity index 100%
rename from src/liballoc/heap.rs
rename to src/liballoc/alloc.rs
diff --git a/src/liballoc/lib.rs b/src/liballoc/lib.rs
index 5ca3944234270..617bc5c52b3a8 100644
--- a/src/liballoc/lib.rs
+++ b/src/liballoc/lib.rs
@@ -57,7 +57,7 @@
 //!
 //! ## Heap interfaces
 //!
-//! The [`heap`](heap/index.html) module defines the low-level interface to the
+//! The [`alloc`](alloc/index.html) module defines the low-level interface to the
 //! default global allocator. It is not compatible with the libc allocator API.
 
 #![allow(unused_attributes)]
@@ -145,7 +145,11 @@ pub use core::heap as allocator;
 
 // Heaps provided for low-level allocation strategies
 
-pub mod heap;
+pub mod alloc;
+
+#[unstable(feature = "allocator_api", issue = "32838")]
+#[rustc_deprecated(since = "1.27.0", reason = "module renamed to `alloc`")]
+pub use alloc as heap;
 
 // Primitive types using the heaps above
 
diff --git a/src/libcore/heap.rs b/src/libcore/alloc.rs
similarity index 100%
rename from src/libcore/heap.rs
rename to src/libcore/alloc.rs
diff --git a/src/libcore/lib.rs b/src/libcore/lib.rs
index 722a9de215c45..56d4e65d3ac42 100644
--- a/src/libcore/lib.rs
+++ b/src/libcore/lib.rs
@@ -185,7 +185,11 @@ pub mod unicode;
 
 /* Heap memory allocator trait */
 #[allow(missing_docs)]
-pub mod heap;
+pub mod alloc;
+
+#[unstable(feature = "allocator_api", issue = "32838")]
+#[rustc_deprecated(since = "1.27.0", reason = "module renamed to `alloc`")]
+pub use alloc as heap;
 
 // note: does not need to be public
 mod iter_private;
diff --git a/src/libstd/heap.rs b/src/libstd/alloc.rs
similarity index 100%
rename from src/libstd/heap.rs
rename to src/libstd/alloc.rs
diff --git a/src/libstd/lib.rs b/src/libstd/lib.rs
index ef4205e7a620d..3a99e845a1605 100644
--- a/src/libstd/lib.rs
+++ b/src/libstd/lib.rs
@@ -477,7 +477,11 @@ pub mod path;
 pub mod process;
 pub mod sync;
 pub mod time;
-pub mod heap;
+pub mod alloc;
+
+#[unstable(feature = "allocator_api", issue = "32838")]
+#[rustc_deprecated(since = "1.27.0", reason = "module renamed to `alloc`")]
+pub use alloc as heap;
 
 // Platform-abstraction modules
 #[macro_use]

From 743c29bdc5b0a75c648e1317aa5d1d816007f176 Mon Sep 17 00:00:00 2001
From: Simon Sapin <simon.sapin@exyr.org>
Date: Tue, 3 Apr 2018 21:05:10 +0200
Subject: [PATCH 06/27] Actually deprecate heap modules.

---
 src/liballoc/alloc.rs |  2 +-
 src/liballoc/lib.rs   | 10 ++++++++--
 src/libcore/lib.rs    |  5 ++++-
 src/libstd/alloc.rs   |  6 +++---
 src/libstd/lib.rs     |  5 ++++-
 5 files changed, 20 insertions(+), 8 deletions(-)

diff --git a/src/liballoc/alloc.rs b/src/liballoc/alloc.rs
index 000c0123d9fed..2477166966e79 100644
--- a/src/liballoc/alloc.rs
+++ b/src/liballoc/alloc.rs
@@ -20,7 +20,7 @@ use core::mem::{self, ManuallyDrop};
 use core::usize;
 
 #[doc(inline)]
-pub use core::heap::*;
+pub use core::alloc::*;
 
 #[doc(hidden)]
 pub mod __core {
diff --git a/src/liballoc/lib.rs b/src/liballoc/lib.rs
index 617bc5c52b3a8..066698a71df21 100644
--- a/src/liballoc/lib.rs
+++ b/src/liballoc/lib.rs
@@ -141,7 +141,10 @@ mod macros;
 
 #[rustc_deprecated(since = "1.27.0", reason = "use the heap module in core, alloc, or std instead")]
 #[unstable(feature = "allocator_api", issue = "32838")]
-pub use core::heap as allocator;
+/// Use the `alloc` module instead.
+pub mod allocator {
+    pub use alloc::*;
+}
 
 // Heaps provided for low-level allocation strategies
 
@@ -149,7 +152,10 @@ pub mod alloc;
 
 #[unstable(feature = "allocator_api", issue = "32838")]
 #[rustc_deprecated(since = "1.27.0", reason = "module renamed to `alloc`")]
-pub use alloc as heap;
+/// Use the `alloc` module instead.
+pub mod heap {
+    pub use alloc::*;
+}
 
 // Primitive types using the heaps above
 
diff --git a/src/libcore/lib.rs b/src/libcore/lib.rs
index 56d4e65d3ac42..5ebd9e4334cd8 100644
--- a/src/libcore/lib.rs
+++ b/src/libcore/lib.rs
@@ -189,7 +189,10 @@ pub mod alloc;
 
 #[unstable(feature = "allocator_api", issue = "32838")]
 #[rustc_deprecated(since = "1.27.0", reason = "module renamed to `alloc`")]
-pub use alloc as heap;
+/// Use the `alloc` module instead.
+pub mod heap {
+    pub use alloc::*;
+}
 
 // note: does not need to be public
 mod iter_private;
diff --git a/src/libstd/alloc.rs b/src/libstd/alloc.rs
index b42a1052c49f7..77be3e52d7647 100644
--- a/src/libstd/alloc.rs
+++ b/src/libstd/alloc.rs
@@ -12,9 +12,9 @@
 
 #![unstable(issue = "32838", feature = "allocator_api")]
 
-pub use alloc_crate::heap::Heap;
-pub use alloc_system::System;
-#[doc(inline)] pub use core::heap::*;
+#[doc(inline)] pub use alloc_crate::alloc::Heap;
+#[doc(inline)] pub use alloc_system::System;
+#[doc(inline)] pub use core::alloc::*;
 
 #[cfg(not(test))]
 #[doc(hidden)]
diff --git a/src/libstd/lib.rs b/src/libstd/lib.rs
index 3a99e845a1605..25ba75fd35eb8 100644
--- a/src/libstd/lib.rs
+++ b/src/libstd/lib.rs
@@ -481,7 +481,10 @@ pub mod alloc;
 
 #[unstable(feature = "allocator_api", issue = "32838")]
 #[rustc_deprecated(since = "1.27.0", reason = "module renamed to `alloc`")]
-pub use alloc as heap;
+/// Use the `alloc` module instead.
+pub mod heap {
+    pub use alloc::*;
+}
 
 // Platform-abstraction modules
 #[macro_use]

From 88ebd2d752831860d8824849cf6f5ae656a2c3eb Mon Sep 17 00:00:00 2001
From: Simon Sapin <simon.sapin@exyr.org>
Date: Tue, 3 Apr 2018 14:43:34 +0200
Subject: [PATCH 07/27] Rename the Heap type to Global
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

… since it is the entry point for what’s registered with `#[global_allocator]`
---
 src/liballoc/alloc.rs | 23 ++++++++++++++---------
 1 file changed, 14 insertions(+), 9 deletions(-)

diff --git a/src/liballoc/alloc.rs b/src/liballoc/alloc.rs
index 2477166966e79..1bd95cfd08c11 100644
--- a/src/liballoc/alloc.rs
+++ b/src/liballoc/alloc.rs
@@ -77,9 +77,14 @@ extern "Rust" {
 }
 
 #[derive(Copy, Clone, Default, Debug)]
-pub struct Heap;
+pub struct Global;
 
-unsafe impl Alloc for Heap {
+#[unstable(feature = "allocator_api", issue = "32838")]
+#[rustc_deprecated(since = "1.27.0", reason = "type renamed to `Global`")]
+pub use self::Global as Heap;
+
+
+unsafe impl Alloc for Global {
     #[inline]
     unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
         let mut err = ManuallyDrop::new(mem::uninitialized::<AllocErr>());
@@ -240,8 +245,8 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
         align as *mut u8
     } else {
         let layout = Layout::from_size_align_unchecked(size, align);
-        Heap.alloc(layout).unwrap_or_else(|err| {
-            Heap.oom(err)
+        Global.alloc(layout).unwrap_or_else(|err| {
+            Global.oom(err)
         })
     }
 }
@@ -254,7 +259,7 @@ pub(crate) unsafe fn box_free<T: ?Sized>(ptr: *mut T) {
     // We do not allocate for Box<T> when T is ZST, so deallocation is also not necessary.
     if size != 0 {
         let layout = Layout::from_size_align_unchecked(size, align);
-        Heap.dealloc(ptr as *mut u8, layout);
+        Global.dealloc(ptr as *mut u8, layout);
     }
 }
 
@@ -263,14 +268,14 @@ mod tests {
     extern crate test;
     use self::test::Bencher;
     use boxed::Box;
-    use heap::{Heap, Alloc, Layout};
+    use heap::{Global, Alloc, Layout};
 
     #[test]
     fn allocate_zeroed() {
         unsafe {
             let layout = Layout::from_size_align(1024, 1).unwrap();
-            let ptr = Heap.alloc_zeroed(layout.clone())
-                .unwrap_or_else(|e| Heap.oom(e));
+            let ptr = Global.alloc_zeroed(layout.clone())
+                .unwrap_or_else(|e| Global.oom(e));
 
             let end = ptr.offset(layout.size() as isize);
             let mut i = ptr;
@@ -278,7 +283,7 @@ mod tests {
                 assert_eq!(*i, 0);
                 i = i.offset(1);
             }
-            Heap.dealloc(ptr, layout);
+            Global.dealloc(ptr, layout);
         }
     }
 

From e521b8b472dfe058f6d0f62f2e1ab5f291c220ee Mon Sep 17 00:00:00 2001
From: Simon Sapin <simon.sapin@exyr.org>
Date: Tue, 3 Apr 2018 21:15:06 +0200
Subject: [PATCH 08/27] Actually deprecate the Heap type

---
 src/liballoc/alloc.rs                |  8 ++++++--
 src/liballoc/arc.rs                  | 13 ++++++-------
 src/liballoc/btree/node.rs           | 13 ++++++-------
 src/liballoc/raw_vec.rs              | 23 +++++++++++------------
 src/liballoc/rc.rs                   | 13 ++++++-------
 src/liballoc/tests/heap.rs           |  4 ++--
 src/libstd/alloc.rs                  |  3 ++-
 src/libstd/collections/hash/map.rs   |  4 ++--
 src/libstd/collections/hash/table.rs | 12 ++++++------
 9 files changed, 47 insertions(+), 46 deletions(-)

diff --git a/src/liballoc/alloc.rs b/src/liballoc/alloc.rs
index 1bd95cfd08c11..12ee770190308 100644
--- a/src/liballoc/alloc.rs
+++ b/src/liballoc/alloc.rs
@@ -81,8 +81,12 @@ pub struct Global;
 
 #[unstable(feature = "allocator_api", issue = "32838")]
 #[rustc_deprecated(since = "1.27.0", reason = "type renamed to `Global`")]
-pub use self::Global as Heap;
+pub type Heap = Global;
 
+#[unstable(feature = "allocator_api", issue = "32838")]
+#[rustc_deprecated(since = "1.27.0", reason = "type renamed to `Global`")]
+#[allow(non_upper_case_globals)]
+pub const Heap: Global = Global;
 
 unsafe impl Alloc for Global {
     #[inline]
@@ -268,7 +272,7 @@ mod tests {
     extern crate test;
     use self::test::Bencher;
     use boxed::Box;
-    use heap::{Global, Alloc, Layout};
+    use alloc::{Global, Alloc, Layout};
 
     #[test]
     fn allocate_zeroed() {
diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs
index ccf2e2768d1a9..d63ed24aa4f69 100644
--- a/src/liballoc/arc.rs
+++ b/src/liballoc/arc.rs
@@ -21,7 +21,6 @@ use core::sync::atomic::Ordering::{Acquire, Relaxed, Release, SeqCst};
 use core::borrow;
 use core::fmt;
 use core::cmp::Ordering;
-use core::heap::{Alloc, Layout};
 use core::intrinsics::abort;
 use core::mem::{self, align_of_val, size_of_val, uninitialized};
 use core::ops::Deref;
@@ -32,7 +31,7 @@ use core::hash::{Hash, Hasher};
 use core::{isize, usize};
 use core::convert::From;
 
-use heap::{Heap, box_free};
+use alloc::{Global, Alloc, Layout, box_free};
 use boxed::Box;
 use string::String;
 use vec::Vec;
@@ -521,7 +520,7 @@ impl<T: ?Sized> Arc<T> {
 
         if self.inner().weak.fetch_sub(1, Release) == 1 {
             atomic::fence(Acquire);
-            Heap.dealloc(ptr as *mut u8, Layout::for_value(&*ptr))
+            Global.dealloc(ptr as *mut u8, Layout::for_value(&*ptr))
         }
     }
 
@@ -555,8 +554,8 @@ impl<T: ?Sized> Arc<T> {
 
         let layout = Layout::for_value(&*fake_ptr);
 
-        let mem = Heap.alloc(layout)
-            .unwrap_or_else(|e| Heap.oom(e));
+        let mem = Global.alloc(layout)
+            .unwrap_or_else(|e| Global.oom(e));
 
         // Initialize the real ArcInner
         let inner = set_data_ptr(ptr as *mut T, mem) as *mut ArcInner<T>;
@@ -640,7 +639,7 @@ impl<T: Clone> ArcFromSlice<T> for Arc<[T]> {
                     let slice = from_raw_parts_mut(self.elems, self.n_elems);
                     ptr::drop_in_place(slice);
 
-                    Heap.dealloc(self.mem, self.layout.clone());
+                    Global.dealloc(self.mem, self.layout.clone());
                 }
             }
         }
@@ -1161,7 +1160,7 @@ impl<T: ?Sized> Drop for Weak<T> {
         if self.inner().weak.fetch_sub(1, Release) == 1 {
             atomic::fence(Acquire);
             unsafe {
-                Heap.dealloc(ptr as *mut u8, Layout::for_value(&*ptr))
+                Global.dealloc(ptr as *mut u8, Layout::for_value(&*ptr))
             }
         }
     }
diff --git a/src/liballoc/btree/node.rs b/src/liballoc/btree/node.rs
index 49109d522e965..8e23228bd28fe 100644
--- a/src/liballoc/btree/node.rs
+++ b/src/liballoc/btree/node.rs
@@ -41,14 +41,13 @@
 // - A node of length `n` has `n` keys, `n` values, and (in an internal node) `n + 1` edges.
 //   This implies that even an empty internal node has at least one edge.
 
-use core::heap::{Alloc, Layout};
 use core::marker::PhantomData;
 use core::mem;
 use core::ptr::{self, Unique, NonNull};
 use core::slice;
 
+use alloc::{Global, Alloc, Layout};
 use boxed::Box;
-use heap::Heap;
 
 const B: usize = 6;
 pub const MIN_LEN: usize = B - 1;
@@ -250,7 +249,7 @@ impl<K, V> Root<K, V> {
         self.as_mut().as_leaf_mut().parent = ptr::null();
 
         unsafe {
-            Heap.dealloc(top, Layout::new::<InternalNode<K, V>>());
+            Global.dealloc(top, Layout::new::<InternalNode<K, V>>());
         }
     }
 }
@@ -436,7 +435,7 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> {
     > {
         let ptr = self.as_leaf() as *const LeafNode<K, V> as *const u8 as *mut u8;
         let ret = self.ascend().ok();
-        Heap.dealloc(ptr, Layout::new::<LeafNode<K, V>>());
+        Global.dealloc(ptr, Layout::new::<LeafNode<K, V>>());
         ret
     }
 }
@@ -457,7 +456,7 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::Internal> {
     > {
         let ptr = self.as_internal() as *const InternalNode<K, V> as *const u8 as *mut u8;
         let ret = self.ascend().ok();
-        Heap.dealloc(ptr, Layout::new::<InternalNode<K, V>>());
+        Global.dealloc(ptr, Layout::new::<InternalNode<K, V>>());
         ret
     }
 }
@@ -1239,12 +1238,12 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
                     ).correct_parent_link();
                 }
 
-                Heap.dealloc(
+                Global.dealloc(
                     right_node.node.as_ptr() as *mut u8,
                     Layout::new::<InternalNode<K, V>>(),
                 );
             } else {
-                Heap.dealloc(
+                Global.dealloc(
                     right_node.node.as_ptr() as *mut u8,
                     Layout::new::<LeafNode<K, V>>(),
                 );
diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs
index 3edce8aebdf39..51f39dc6cc748 100644
--- a/src/liballoc/raw_vec.rs
+++ b/src/liballoc/raw_vec.rs
@@ -8,13 +8,12 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
+use alloc::{Alloc, Layout, Global};
 use core::cmp;
-use core::heap::{Alloc, Layout};
 use core::mem;
 use core::ops::Drop;
 use core::ptr::{self, Unique};
 use core::slice;
-use heap::Heap;
 use super::boxed::Box;
 use super::allocator::CollectionAllocErr;
 use super::allocator::CollectionAllocErr::*;
@@ -47,7 +46,7 @@ use super::allocator::CollectionAllocErr::*;
 /// field. This allows zero-sized types to not be special-cased by consumers of
 /// this type.
 #[allow(missing_debug_implementations)]
-pub struct RawVec<T, A: Alloc = Heap> {
+pub struct RawVec<T, A: Alloc = Global> {
     ptr: Unique<T>,
     cap: usize,
     a: A,
@@ -114,14 +113,14 @@ impl<T, A: Alloc> RawVec<T, A> {
     }
 }
 
-impl<T> RawVec<T, Heap> {
+impl<T> RawVec<T, Global> {
     /// Creates the biggest possible RawVec (on the system heap)
     /// without allocating. If T has positive size, then this makes a
     /// RawVec with capacity 0. If T has 0 size, then it makes a
     /// RawVec with capacity `usize::MAX`. Useful for implementing
     /// delayed allocation.
     pub fn new() -> Self {
-        Self::new_in(Heap)
+        Self::new_in(Global)
     }
 
     /// Creates a RawVec (on the system heap) with exactly the
@@ -141,13 +140,13 @@ impl<T> RawVec<T, Heap> {
     /// Aborts on OOM
     #[inline]
     pub fn with_capacity(cap: usize) -> Self {
-        RawVec::allocate_in(cap, false, Heap)
+        RawVec::allocate_in(cap, false, Global)
     }
 
     /// Like `with_capacity` but guarantees the buffer is zeroed.
     #[inline]
     pub fn with_capacity_zeroed(cap: usize) -> Self {
-        RawVec::allocate_in(cap, true, Heap)
+        RawVec::allocate_in(cap, true, Global)
     }
 }
 
@@ -168,7 +167,7 @@ impl<T, A: Alloc> RawVec<T, A> {
     }
 }
 
-impl<T> RawVec<T, Heap> {
+impl<T> RawVec<T, Global> {
     /// Reconstitutes a RawVec from a pointer, capacity.
     ///
     /// # Undefined Behavior
@@ -180,7 +179,7 @@ impl<T> RawVec<T, Heap> {
         RawVec {
             ptr: Unique::new_unchecked(ptr),
             cap,
-            a: Heap,
+            a: Global,
         }
     }
 
@@ -678,7 +677,7 @@ impl<T, A: Alloc> RawVec<T, A> {
     }
 }
 
-impl<T> RawVec<T, Heap> {
+impl<T> RawVec<T, Global> {
     /// Converts the entire buffer into `Box<[T]>`.
     ///
     /// While it is not *strictly* Undefined Behavior to call
@@ -763,13 +762,13 @@ mod tests {
                 if size > self.fuel {
                     return Err(AllocErr::Unsupported { details: "fuel exhausted" });
                 }
-                match Heap.alloc(layout) {
+                match Global.alloc(layout) {
                     ok @ Ok(_) => { self.fuel -= size; ok }
                     err @ Err(_) => err,
                 }
             }
             unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
-                Heap.dealloc(ptr, layout)
+                Global.dealloc(ptr, layout)
             }
         }
 
diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs
index 8bdc57f96a6d5..c134b181158f5 100644
--- a/src/liballoc/rc.rs
+++ b/src/liballoc/rc.rs
@@ -250,7 +250,6 @@ use core::cell::Cell;
 use core::cmp::Ordering;
 use core::fmt;
 use core::hash::{Hash, Hasher};
-use core::heap::{Alloc, Layout};
 use core::intrinsics::abort;
 use core::marker;
 use core::marker::{Unsize, PhantomData};
@@ -260,7 +259,7 @@ use core::ops::CoerceUnsized;
 use core::ptr::{self, NonNull};
 use core::convert::From;
 
-use heap::{Heap, box_free};
+use alloc::{Global, Alloc, Layout, box_free};
 use string::String;
 use vec::Vec;
 
@@ -668,8 +667,8 @@ impl<T: ?Sized> Rc<T> {
 
         let layout = Layout::for_value(&*fake_ptr);
 
-        let mem = Heap.alloc(layout)
-            .unwrap_or_else(|e| Heap.oom(e));
+        let mem = Global.alloc(layout)
+            .unwrap_or_else(|e| Global.oom(e));
 
         // Initialize the real RcBox
         let inner = set_data_ptr(ptr as *mut T, mem) as *mut RcBox<T>;
@@ -752,7 +751,7 @@ impl<T: Clone> RcFromSlice<T> for Rc<[T]> {
                     let slice = from_raw_parts_mut(self.elems, self.n_elems);
                     ptr::drop_in_place(slice);
 
-                    Heap.dealloc(self.mem, self.layout.clone());
+                    Global.dealloc(self.mem, self.layout.clone());
                 }
             }
         }
@@ -847,7 +846,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc<T> {
                 self.dec_weak();
 
                 if self.weak() == 0 {
-                    Heap.dealloc(ptr as *mut u8, Layout::for_value(&*ptr));
+                    Global.dealloc(ptr as *mut u8, Layout::for_value(&*ptr));
                 }
             }
         }
@@ -1273,7 +1272,7 @@ impl<T: ?Sized> Drop for Weak<T> {
             // the weak count starts at 1, and will only go to zero if all
             // the strong pointers have disappeared.
             if self.weak() == 0 {
-                Heap.dealloc(ptr as *mut u8, Layout::for_value(&*ptr));
+                Global.dealloc(ptr as *mut u8, Layout::for_value(&*ptr));
             }
         }
     }
diff --git a/src/liballoc/tests/heap.rs b/src/liballoc/tests/heap.rs
index d3ce12056bb49..328131e2fef7a 100644
--- a/src/liballoc/tests/heap.rs
+++ b/src/liballoc/tests/heap.rs
@@ -9,7 +9,7 @@
 // except according to those terms.
 
 use alloc_system::System;
-use std::heap::{Heap, Alloc, Layout};
+use std::alloc::{Global, Alloc, Layout};
 
 /// https://github.com/rust-lang/rust/issues/45955
 ///
@@ -22,7 +22,7 @@ fn alloc_system_overaligned_request() {
 
 #[test]
 fn std_heap_overaligned_request() {
-    check_overalign_requests(Heap)
+    check_overalign_requests(Global)
 }
 
 fn check_overalign_requests<T: Alloc>(mut allocator: T) {
diff --git a/src/libstd/alloc.rs b/src/libstd/alloc.rs
index 77be3e52d7647..eb0c960732d3c 100644
--- a/src/libstd/alloc.rs
+++ b/src/libstd/alloc.rs
@@ -12,7 +12,8 @@
 
 #![unstable(issue = "32838", feature = "allocator_api")]
 
-#[doc(inline)] pub use alloc_crate::alloc::Heap;
+#[doc(inline)] #[allow(deprecated)] pub use alloc_crate::alloc::Heap;
+#[doc(inline)] pub use alloc_crate::alloc::Global;
 #[doc(inline)] pub use alloc_system::System;
 #[doc(inline)] pub use core::alloc::*;
 
diff --git a/src/libstd/collections/hash/map.rs b/src/libstd/collections/hash/map.rs
index 73a5df8dc285f..c4ef9e62577cd 100644
--- a/src/libstd/collections/hash/map.rs
+++ b/src/libstd/collections/hash/map.rs
@@ -11,13 +11,13 @@
 use self::Entry::*;
 use self::VacantEntryState::*;
 
+use alloc::{Global, Alloc, CollectionAllocErr};
 use cell::Cell;
 use borrow::Borrow;
 use cmp::max;
 use fmt::{self, Debug};
 #[allow(deprecated)]
 use hash::{Hash, Hasher, BuildHasher, SipHasher13};
-use heap::{Heap, Alloc, CollectionAllocErr};
 use iter::{FromIterator, FusedIterator};
 use mem::{self, replace};
 use ops::{Deref, Index};
@@ -784,7 +784,7 @@ impl<K, V, S> HashMap<K, V, S>
     pub fn reserve(&mut self, additional: usize) {
         match self.try_reserve(additional) {
             Err(CollectionAllocErr::CapacityOverflow) => panic!("capacity overflow"),
-            Err(CollectionAllocErr::AllocErr(e)) => Heap.oom(e),
+            Err(CollectionAllocErr::AllocErr(e)) => Global.oom(e),
             Ok(()) => { /* yay */ }
          }
     }
diff --git a/src/libstd/collections/hash/table.rs b/src/libstd/collections/hash/table.rs
index 878cd82a258d2..10bab5df8b543 100644
--- a/src/libstd/collections/hash/table.rs
+++ b/src/libstd/collections/hash/table.rs
@@ -8,9 +8,9 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
+use alloc::{Global, Alloc, Layout, CollectionAllocErr};
 use cmp;
 use hash::{BuildHasher, Hash, Hasher};
-use heap::{Heap, Alloc, Layout, CollectionAllocErr};
 use marker;
 use mem::{align_of, size_of, needs_drop};
 use mem;
@@ -754,7 +754,7 @@ impl<K, V> RawTable<K, V> {
             return Err(CollectionAllocErr::CapacityOverflow);
         }
 
-        let buffer = Heap.alloc(Layout::from_size_align(size, alignment)
+        let buffer = Global.alloc(Layout::from_size_align(size, alignment)
             .ok_or(CollectionAllocErr::CapacityOverflow)?)?;
 
         let hashes = buffer as *mut HashUint;
@@ -772,7 +772,7 @@ impl<K, V> RawTable<K, V> {
     unsafe fn new_uninitialized(capacity: usize) -> RawTable<K, V> {
         match Self::try_new_uninitialized(capacity) {
             Err(CollectionAllocErr::CapacityOverflow) => panic!("capacity overflow"),
-            Err(CollectionAllocErr::AllocErr(e)) => Heap.oom(e),
+            Err(CollectionAllocErr::AllocErr(e)) => Global.oom(e),
             Ok(table) => { table }
         }
     }
@@ -811,7 +811,7 @@ impl<K, V> RawTable<K, V> {
     pub fn new(capacity: usize) -> RawTable<K, V> {
         match Self::try_new(capacity) {
             Err(CollectionAllocErr::CapacityOverflow) => panic!("capacity overflow"),
-            Err(CollectionAllocErr::AllocErr(e)) => Heap.oom(e),
+            Err(CollectionAllocErr::AllocErr(e)) => Global.oom(e),
             Ok(table) => { table }
         }
     }
@@ -1185,8 +1185,8 @@ unsafe impl<#[may_dangle] K, #[may_dangle] V> Drop for RawTable<K, V> {
         debug_assert!(!oflo, "should be impossible");
 
         unsafe {
-            Heap.dealloc(self.hashes.ptr() as *mut u8,
-                         Layout::from_size_align(size, align).unwrap());
+            Global.dealloc(self.hashes.ptr() as *mut u8,
+                           Layout::from_size_align(size, align).unwrap());
             // Remember how everything was allocated out of one buffer
             // during initialization? We only need one call to free here.
         }

From 5e5a0c21fc1416e77ae8e4db74b93e3601241e22 Mon Sep 17 00:00:00 2001
From: Simon Sapin <simon.sapin@exyr.org>
Date: Tue, 3 Apr 2018 20:58:50 +0200
Subject: [PATCH 09/27] Separate alloc::heap::Alloc trait for stage0
 #[global_allocator]

---
 src/Cargo.lock                   |  2 -
 src/liballoc/alloc.rs            |  5 --
 src/liballoc/heap.rs             | 98 ++++++++++++++++++++++++++++++++
 src/liballoc/lib.rs              |  6 ++
 src/liballoc_jemalloc/Cargo.toml |  1 -
 src/liballoc_jemalloc/lib.rs     |  2 +-
 src/liballoc_system/Cargo.toml   |  1 -
 src/liballoc_system/lib.rs       |  8 +--
 8 files changed, 109 insertions(+), 14 deletions(-)
 create mode 100644 src/liballoc/heap.rs

diff --git a/src/Cargo.lock b/src/Cargo.lock
index 6e7c4b67acf24..f573abadc3192 100644
--- a/src/Cargo.lock
+++ b/src/Cargo.lock
@@ -19,7 +19,6 @@ dependencies = [
 name = "alloc_jemalloc"
 version = "0.0.0"
 dependencies = [
- "alloc 0.0.0",
  "alloc_system 0.0.0",
  "build_helper 0.1.0",
  "cc 1.0.9 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -32,7 +31,6 @@ dependencies = [
 name = "alloc_system"
 version = "0.0.0"
 dependencies = [
- "alloc 0.0.0",
  "compiler_builtins 0.0.0",
  "core 0.0.0",
  "dlmalloc 0.0.0",
diff --git a/src/liballoc/alloc.rs b/src/liballoc/alloc.rs
index 12ee770190308..00a8b2c0e259c 100644
--- a/src/liballoc/alloc.rs
+++ b/src/liballoc/alloc.rs
@@ -22,11 +22,6 @@ use core::usize;
 #[doc(inline)]
 pub use core::alloc::*;
 
-#[doc(hidden)]
-pub mod __core {
-    pub use core::*;
-}
-
 extern "Rust" {
     #[allocator]
     #[rustc_allocator_nounwind]
diff --git a/src/liballoc/heap.rs b/src/liballoc/heap.rs
new file mode 100644
index 0000000000000..a44ff04bd1b2f
--- /dev/null
+++ b/src/liballoc/heap.rs
@@ -0,0 +1,98 @@
+// Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+pub use alloc::{Excess, Layout, AllocErr, CannotReallocInPlace};
+use core::alloc::Alloc as CoreAlloc;
+
+#[doc(hidden)]
+pub mod __core {
+    pub use core::*;
+}
+
+/// Compatibility with older versions of #[global_allocator] during bootstrap
+pub unsafe trait Alloc {
+    unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr>;
+    unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout);
+    fn oom(&mut self, err: AllocErr) -> !;
+    fn usable_size(&self, layout: &Layout) -> (usize, usize);
+    unsafe fn realloc(&mut self,
+                      ptr: *mut u8,
+                      layout: Layout,
+                      new_layout: Layout) -> Result<*mut u8, AllocErr>;
+    unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr>;
+    unsafe fn alloc_excess(&mut self, layout: Layout) -> Result<Excess, AllocErr>;
+    unsafe fn realloc_excess(&mut self,
+                             ptr: *mut u8,
+                             layout: Layout,
+                             new_layout: Layout) -> Result<Excess, AllocErr>;
+    unsafe fn grow_in_place(&mut self,
+                            ptr: *mut u8,
+                            layout: Layout,
+                            new_layout: Layout) -> Result<(), CannotReallocInPlace>;
+    unsafe fn shrink_in_place(&mut self,
+                              ptr: *mut u8,
+                              layout: Layout,
+                              new_layout: Layout) -> Result<(), CannotReallocInPlace>;
+}
+
+#[allow(deprecated)]
+unsafe impl<T> Alloc for T where T: CoreAlloc {
+    unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
+        CoreAlloc::alloc(self, layout)
+    }
+
+    unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
+        CoreAlloc::dealloc(self, ptr, layout)
+    }
+
+    fn oom(&mut self, err: AllocErr) -> ! {
+        CoreAlloc::oom(self, err)
+    }
+
+    fn usable_size(&self, layout: &Layout) -> (usize, usize) {
+        CoreAlloc::usable_size(self, layout)
+    }
+
+    unsafe fn realloc(&mut self,
+                      ptr: *mut u8,
+                      layout: Layout,
+                      new_layout: Layout) -> Result<*mut u8, AllocErr> {
+        CoreAlloc::realloc(self, ptr, layout, new_layout)
+    }
+
+    unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
+        CoreAlloc::alloc_zeroed(self, layout)
+    }
+
+    unsafe fn alloc_excess(&mut self, layout: Layout) -> Result<Excess, AllocErr> {
+        CoreAlloc::alloc_excess(self, layout)
+    }
+
+    unsafe fn realloc_excess(&mut self,
+                             ptr: *mut u8,
+                             layout: Layout,
+                             new_layout: Layout) -> Result<Excess, AllocErr> {
+        CoreAlloc::realloc_excess(self, ptr, layout, new_layout)
+    }
+
+    unsafe fn grow_in_place(&mut self,
+                            ptr: *mut u8,
+                            layout: Layout,
+                            new_layout: Layout) -> Result<(), CannotReallocInPlace> {
+        CoreAlloc::grow_in_place(self, ptr, layout, new_layout)
+    }
+
+    unsafe fn shrink_in_place(&mut self,
+                              ptr: *mut u8,
+                              layout: Layout,
+                              new_layout: Layout) -> Result<(), CannotReallocInPlace> {
+        CoreAlloc::shrink_in_place(self, ptr, layout, new_layout)
+    }
+}
diff --git a/src/liballoc/lib.rs b/src/liballoc/lib.rs
index 066698a71df21..f6598fe5e8969 100644
--- a/src/liballoc/lib.rs
+++ b/src/liballoc/lib.rs
@@ -153,10 +153,16 @@ pub mod alloc;
 #[unstable(feature = "allocator_api", issue = "32838")]
 #[rustc_deprecated(since = "1.27.0", reason = "module renamed to `alloc`")]
 /// Use the `alloc` module instead.
+#[cfg(not(stage0))]
 pub mod heap {
     pub use alloc::*;
 }
 
+#[unstable(feature = "allocator_api", issue = "32838")]
+#[rustc_deprecated(since = "1.27.0", reason = "module renamed to `alloc`")]
+#[cfg(stage0)]
+pub mod heap;
+
 // Primitive types using the heaps above
 
 // Need to conditionally define the mod from `boxed.rs` to avoid
diff --git a/src/liballoc_jemalloc/Cargo.toml b/src/liballoc_jemalloc/Cargo.toml
index fd4a45530463d..02435170374c5 100644
--- a/src/liballoc_jemalloc/Cargo.toml
+++ b/src/liballoc_jemalloc/Cargo.toml
@@ -12,7 +12,6 @@ test = false
 doc = false
 
 [dependencies]
-alloc = { path = "../liballoc" }
 alloc_system = { path = "../liballoc_system" }
 core = { path = "../libcore" }
 libc = { path = "../rustc/libc_shim" }
diff --git a/src/liballoc_jemalloc/lib.rs b/src/liballoc_jemalloc/lib.rs
index df7e3f61f5f1d..616181d99bcdc 100644
--- a/src/liballoc_jemalloc/lib.rs
+++ b/src/liballoc_jemalloc/lib.rs
@@ -32,7 +32,7 @@ pub use contents::*;
 mod contents {
     use core::ptr;
 
-    use core::heap::{Alloc, AllocErr, Layout};
+    use core::alloc::{Alloc, AllocErr, Layout};
     use alloc_system::System;
     use libc::{c_int, c_void, size_t};
 
diff --git a/src/liballoc_system/Cargo.toml b/src/liballoc_system/Cargo.toml
index 936e20a32e10e..c34e2f203a837 100644
--- a/src/liballoc_system/Cargo.toml
+++ b/src/liballoc_system/Cargo.toml
@@ -10,7 +10,6 @@ test = false
 doc = false
 
 [dependencies]
-alloc = { path = "../liballoc" }
 core = { path = "../libcore" }
 libc = { path = "../rustc/libc_shim" }
 compiler_builtins = { path = "../rustc/compiler_builtins_shim" }
diff --git a/src/liballoc_system/lib.rs b/src/liballoc_system/lib.rs
index cdcb732f63547..2d5adca7fcbd5 100644
--- a/src/liballoc_system/lib.rs
+++ b/src/liballoc_system/lib.rs
@@ -41,7 +41,7 @@ const MIN_ALIGN: usize = 8;
 #[allow(dead_code)]
 const MIN_ALIGN: usize = 16;
 
-use core::heap::{Alloc, AllocErr, Layout, Excess, CannotReallocInPlace};
+use core::alloc::{Alloc, AllocErr, Layout, Excess, CannotReallocInPlace};
 
 #[unstable(feature = "allocator_api", issue = "32838")]
 pub struct System;
@@ -121,7 +121,7 @@ mod platform {
 
     use MIN_ALIGN;
     use System;
-    use core::heap::{Alloc, AllocErr, Layout};
+    use core::alloc::{Alloc, AllocErr, Layout};
 
     #[unstable(feature = "allocator_api", issue = "32838")]
     unsafe impl<'a> Alloc for &'a System {
@@ -283,7 +283,7 @@ mod platform {
 
     use MIN_ALIGN;
     use System;
-    use core::heap::{Alloc, AllocErr, Layout, CannotReallocInPlace};
+    use core::alloc::{Alloc, AllocErr, Layout, CannotReallocInPlace};
 
     type LPVOID = *mut u8;
     type HANDLE = LPVOID;
@@ -495,7 +495,7 @@ mod platform {
 mod platform {
     extern crate dlmalloc;
 
-    use core::heap::{Alloc, AllocErr, Layout, Excess, CannotReallocInPlace};
+    use core::alloc::{Alloc, AllocErr, Layout, Excess, CannotReallocInPlace};
     use System;
     use self::dlmalloc::GlobalDlmalloc;
 

From a4caac5e93b801411fb59eeafa399240a7aa5fec Mon Sep 17 00:00:00 2001
From: Alex Crichton <alex@alexcrichton.com>
Date: Mon, 9 Apr 2018 11:51:57 -0700
Subject: [PATCH 10/27] Update to most recent version of dlmalloc

Inline the definition of `GlobalAlloc` for `dlmalloc` on wasm and don't rely on
usage of unstable features in `dlmalloc` itself.
---
 src/Cargo.lock                     |  1 -
 src/dlmalloc                       |  2 +-
 src/liballoc_system/lib.rs         | 62 +++++++++---------------------
 src/rustc/dlmalloc_shim/Cargo.toml |  1 -
 4 files changed, 20 insertions(+), 46 deletions(-)

diff --git a/src/Cargo.lock b/src/Cargo.lock
index f573abadc3192..e5297d1482e8b 100644
--- a/src/Cargo.lock
+++ b/src/Cargo.lock
@@ -540,7 +540,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 name = "dlmalloc"
 version = "0.0.0"
 dependencies = [
- "alloc 0.0.0",
  "compiler_builtins 0.0.0",
  "core 0.0.0",
 ]
diff --git a/src/dlmalloc b/src/dlmalloc
index 9b2dcac06c3e2..c99638dc2ecfc 160000
--- a/src/dlmalloc
+++ b/src/dlmalloc
@@ -1 +1 @@
-Subproject commit 9b2dcac06c3e23235f8997b3c5f2325a6d3382df
+Subproject commit c99638dc2ecfc750cc1656f6edb2bd062c1e0981
diff --git a/src/liballoc_system/lib.rs b/src/liballoc_system/lib.rs
index 2d5adca7fcbd5..6f928287ef24d 100644
--- a/src/liballoc_system/lib.rs
+++ b/src/liballoc_system/lib.rs
@@ -495,27 +495,35 @@ mod platform {
 mod platform {
     extern crate dlmalloc;
 
-    use core::alloc::{Alloc, AllocErr, Layout, Excess, CannotReallocInPlace};
+    use core::alloc::{Alloc, AllocErr, Layout};
     use System;
-    use self::dlmalloc::GlobalDlmalloc;
+
+    // No need for synchronization here as wasm is currently single-threaded
+    static mut DLMALLOC: dlmalloc::Dlmalloc = dlmalloc::DLMALLOC_INIT;
+
+    fn to_result(ptr: *mut u8) -> Result<*mut u8, AllocErr> {
+        if !ptr.is_null() {
+            Ok(ptr)
+        } else {
+            Err(AllocErr::Unsupported { details: "" })
+        }
+    }
 
     #[unstable(feature = "allocator_api", issue = "32838")]
     unsafe impl<'a> Alloc for &'a System {
         #[inline]
         unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
-            GlobalDlmalloc.alloc(layout)
+            to_result(DLMALLOC.malloc(layout.size(), layout.align()))
         }
 
         #[inline]
-        unsafe fn alloc_zeroed(&mut self, layout: Layout)
-            -> Result<*mut u8, AllocErr>
-        {
-            GlobalDlmalloc.alloc_zeroed(layout)
+        unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
+            to_result(DLMALLOC.calloc(layout.size(), layout.align()))
         }
 
         #[inline]
         unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
-            GlobalDlmalloc.dealloc(ptr, layout)
+            DLMALLOC.free(ptr, layout.size(), layout.align())
         }
 
         #[inline]
@@ -523,41 +531,9 @@ mod platform {
                           ptr: *mut u8,
                           old_layout: Layout,
                           new_layout: Layout) -> Result<*mut u8, AllocErr> {
-            GlobalDlmalloc.realloc(ptr, old_layout, new_layout)
-        }
-
-        #[inline]
-        fn usable_size(&self, layout: &Layout) -> (usize, usize) {
-            GlobalDlmalloc.usable_size(layout)
-        }
-
-        #[inline]
-        unsafe fn alloc_excess(&mut self, layout: Layout) -> Result<Excess, AllocErr> {
-            GlobalDlmalloc.alloc_excess(layout)
-        }
-
-        #[inline]
-        unsafe fn realloc_excess(&mut self,
-                                 ptr: *mut u8,
-                                 layout: Layout,
-                                 new_layout: Layout) -> Result<Excess, AllocErr> {
-            GlobalDlmalloc.realloc_excess(ptr, layout, new_layout)
-        }
-
-        #[inline]
-        unsafe fn grow_in_place(&mut self,
-                                ptr: *mut u8,
-                                layout: Layout,
-                                new_layout: Layout) -> Result<(), CannotReallocInPlace> {
-            GlobalDlmalloc.grow_in_place(ptr, layout, new_layout)
-        }
-
-        #[inline]
-        unsafe fn shrink_in_place(&mut self,
-                                  ptr: *mut u8,
-                                  layout: Layout,
-                                  new_layout: Layout) -> Result<(), CannotReallocInPlace> {
-            GlobalDlmalloc.shrink_in_place(ptr, layout, new_layout)
+            to_result(DLMALLOC.realloc(
+                ptr, old_layout.size(), old_layout.align(), new_layout.size(),
+            ))
         }
     }
 }
diff --git a/src/rustc/dlmalloc_shim/Cargo.toml b/src/rustc/dlmalloc_shim/Cargo.toml
index d2fe159d80673..b6f8550829fe0 100644
--- a/src/rustc/dlmalloc_shim/Cargo.toml
+++ b/src/rustc/dlmalloc_shim/Cargo.toml
@@ -12,4 +12,3 @@ doc = false
 [dependencies]
 core = { path = "../../libcore" }
 compiler_builtins = { path = "../../rustc/compiler_builtins_shim" }
-alloc = { path = "../../liballoc" }

From ba7081a033de4981ccad1e1525c8b5191ce02208 Mon Sep 17 00:00:00 2001
From: Simon Sapin <simon.sapin@exyr.org>
Date: Tue, 3 Apr 2018 15:41:09 +0200
Subject: [PATCH 11/27] Make AllocErr a zero-size unit struct

---
 src/liballoc/alloc.rs        | 32 ++++++++++----------
 src/liballoc/raw_vec.rs      |  2 +-
 src/liballoc_jemalloc/lib.rs | 25 ++--------------
 src/liballoc_system/lib.rs   | 24 ++++++---------
 src/libcore/alloc.rs         | 58 +++++-------------------------------
 src/libstd/alloc.rs          | 43 ++++++++------------------
 src/libstd/error.rs          |  2 +-
 7 files changed, 51 insertions(+), 135 deletions(-)

diff --git a/src/liballoc/alloc.rs b/src/liballoc/alloc.rs
index 00a8b2c0e259c..b975ff6be5817 100644
--- a/src/liballoc/alloc.rs
+++ b/src/liballoc/alloc.rs
@@ -16,7 +16,7 @@
             issue = "32838")]
 
 use core::intrinsics::{min_align_of_val, size_of_val};
-use core::mem::{self, ManuallyDrop};
+use core::mem;
 use core::usize;
 
 #[doc(inline)]
@@ -86,12 +86,12 @@ pub const Heap: Global = Global;
 unsafe impl Alloc for Global {
     #[inline]
     unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
-        let mut err = ManuallyDrop::new(mem::uninitialized::<AllocErr>());
+        let mut err = AllocErr;
         let ptr = __rust_alloc(layout.size(),
                                layout.align(),
-                               &mut *err as *mut AllocErr as *mut u8);
+                               &mut err as *mut AllocErr as *mut u8);
         if ptr.is_null() {
-            Err(ManuallyDrop::into_inner(err))
+            Err(AllocErr)
         } else {
             Ok(ptr)
         }
@@ -129,15 +129,15 @@ unsafe impl Alloc for Global {
                       new_layout: Layout)
                       -> Result<*mut u8, AllocErr>
     {
-        let mut err = ManuallyDrop::new(mem::uninitialized::<AllocErr>());
+        let mut err = AllocErr;
         let ptr = __rust_realloc(ptr,
                                  layout.size(),
                                  layout.align(),
                                  new_layout.size(),
                                  new_layout.align(),
-                                 &mut *err as *mut AllocErr as *mut u8);
+                                 &mut err as *mut AllocErr as *mut u8);
         if ptr.is_null() {
-            Err(ManuallyDrop::into_inner(err))
+            Err(AllocErr)
         } else {
             mem::forget(err);
             Ok(ptr)
@@ -146,12 +146,12 @@ unsafe impl Alloc for Global {
 
     #[inline]
     unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
-        let mut err = ManuallyDrop::new(mem::uninitialized::<AllocErr>());
+        let mut err = AllocErr;
         let ptr = __rust_alloc_zeroed(layout.size(),
                                       layout.align(),
-                                      &mut *err as *mut AllocErr as *mut u8);
+                                      &mut err as *mut AllocErr as *mut u8);
         if ptr.is_null() {
-            Err(ManuallyDrop::into_inner(err))
+            Err(AllocErr)
         } else {
             Ok(ptr)
         }
@@ -159,14 +159,14 @@ unsafe impl Alloc for Global {
 
     #[inline]
     unsafe fn alloc_excess(&mut self, layout: Layout) -> Result<Excess, AllocErr> {
-        let mut err = ManuallyDrop::new(mem::uninitialized::<AllocErr>());
+        let mut err = AllocErr;
         let mut size = 0;
         let ptr = __rust_alloc_excess(layout.size(),
                                       layout.align(),
                                       &mut size,
-                                      &mut *err as *mut AllocErr as *mut u8);
+                                      &mut err as *mut AllocErr as *mut u8);
         if ptr.is_null() {
-            Err(ManuallyDrop::into_inner(err))
+            Err(AllocErr)
         } else {
             Ok(Excess(ptr, size))
         }
@@ -177,7 +177,7 @@ unsafe impl Alloc for Global {
                              ptr: *mut u8,
                              layout: Layout,
                              new_layout: Layout) -> Result<Excess, AllocErr> {
-        let mut err = ManuallyDrop::new(mem::uninitialized::<AllocErr>());
+        let mut err = AllocErr;
         let mut size = 0;
         let ptr = __rust_realloc_excess(ptr,
                                         layout.size(),
@@ -185,9 +185,9 @@ unsafe impl Alloc for Global {
                                         new_layout.size(),
                                         new_layout.align(),
                                         &mut size,
-                                        &mut *err as *mut AllocErr as *mut u8);
+                                        &mut err as *mut AllocErr as *mut u8);
         if ptr.is_null() {
-            Err(ManuallyDrop::into_inner(err))
+            Err(AllocErr)
         } else {
             Ok(Excess(ptr, size))
         }
diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs
index 51f39dc6cc748..caedb971ddc6f 100644
--- a/src/liballoc/raw_vec.rs
+++ b/src/liballoc/raw_vec.rs
@@ -760,7 +760,7 @@ mod tests {
             unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
                 let size = layout.size();
                 if size > self.fuel {
-                    return Err(AllocErr::Unsupported { details: "fuel exhausted" });
+                    return Err(AllocErr);
                 }
                 match Global.alloc(layout) {
                     ok @ Ok(_) => { self.fuel -= size; ok }
diff --git a/src/liballoc_jemalloc/lib.rs b/src/liballoc_jemalloc/lib.rs
index 616181d99bcdc..59a7e87e1ec31 100644
--- a/src/liballoc_jemalloc/lib.rs
+++ b/src/liballoc_jemalloc/lib.rs
@@ -30,8 +30,6 @@ extern crate libc;
 pub use contents::*;
 #[cfg(not(dummy_jemalloc))]
 mod contents {
-    use core::ptr;
-
     use core::alloc::{Alloc, AllocErr, Layout};
     use alloc_system::System;
     use libc::{c_int, c_void, size_t};
@@ -106,14 +104,9 @@ mod contents {
     #[rustc_std_internal_symbol]
     pub unsafe extern fn __rde_alloc(size: usize,
                                      align: usize,
-                                     err: *mut u8) -> *mut u8 {
+                                     _err: *mut u8) -> *mut u8 {
         let flags = align_to_flags(align, size);
         let ptr = mallocx(size as size_t, flags) as *mut u8;
-        if ptr.is_null() {
-            let layout = Layout::from_size_align_unchecked(size, align);
-            ptr::write(err as *mut AllocErr,
-                       AllocErr::Exhausted { request: layout });
-        }
         ptr
     }
 
@@ -155,20 +148,13 @@ mod contents {
                                        old_align: usize,
                                        new_size: usize,
                                        new_align: usize,
-                                       err: *mut u8) -> *mut u8 {
+                                       _err: *mut u8) -> *mut u8 {
         if new_align != old_align {
-            ptr::write(err as *mut AllocErr,
-                       AllocErr::Unsupported { details: "can't change alignments" });
             return 0 as *mut u8
         }
 
         let flags = align_to_flags(new_align, new_size);
         let ptr = rallocx(ptr as *mut c_void, new_size, flags) as *mut u8;
-        if ptr.is_null() {
-            let layout = Layout::from_size_align_unchecked(new_size, new_align);
-            ptr::write(err as *mut AllocErr,
-                       AllocErr::Exhausted { request: layout });
-        }
         ptr
     }
 
@@ -176,18 +162,13 @@ mod contents {
     #[rustc_std_internal_symbol]
     pub unsafe extern fn __rde_alloc_zeroed(size: usize,
                                             align: usize,
-                                            err: *mut u8) -> *mut u8 {
+                                            _err: *mut u8) -> *mut u8 {
         let ptr = if align <= MIN_ALIGN && align <= size {
             calloc(size as size_t, 1) as *mut u8
         } else {
             let flags = align_to_flags(align, size) | MALLOCX_ZERO;
             mallocx(size as size_t, flags) as *mut u8
         };
-        if ptr.is_null() {
-            let layout = Layout::from_size_align_unchecked(size, align);
-            ptr::write(err as *mut AllocErr,
-                       AllocErr::Exhausted { request: layout });
-        }
         ptr
     }
 
diff --git a/src/liballoc_system/lib.rs b/src/liballoc_system/lib.rs
index 6f928287ef24d..5dca05cf08537 100644
--- a/src/liballoc_system/lib.rs
+++ b/src/liballoc_system/lib.rs
@@ -133,9 +133,7 @@ mod platform {
                 #[cfg(target_os = "macos")]
                 {
                     if layout.align() > (1 << 31) {
-                        return Err(AllocErr::Unsupported {
-                            details: "requested alignment too large"
-                        })
+                        return Err(AllocErr)
                     }
                 }
                 aligned_malloc(&layout)
@@ -143,7 +141,7 @@ mod platform {
             if !ptr.is_null() {
                 Ok(ptr)
             } else {
-                Err(AllocErr::Exhausted { request: layout })
+                Err(AllocErr)
             }
         }
 
@@ -156,7 +154,7 @@ mod platform {
                 if !ptr.is_null() {
                     Ok(ptr)
                 } else {
-                    Err(AllocErr::Exhausted { request: layout })
+                    Err(AllocErr)
                 }
             } else {
                 let ret = self.alloc(layout.clone());
@@ -178,9 +176,7 @@ mod platform {
                           old_layout: Layout,
                           new_layout: Layout) -> Result<*mut u8, AllocErr> {
             if old_layout.align() != new_layout.align() {
-                return Err(AllocErr::Unsupported {
-                    details: "cannot change alignment on `realloc`",
-                })
+                return Err(AllocErr)
             }
 
             if new_layout.align() <= MIN_ALIGN  && new_layout.align() <= new_layout.size(){
@@ -188,7 +184,7 @@ mod platform {
                 if !ptr.is_null() {
                     Ok(ptr as *mut u8)
                 } else {
-                    Err(AllocErr::Exhausted { request: new_layout })
+                    Err(AllocErr)
                 }
             } else {
                 let res = self.alloc(new_layout.clone());
@@ -342,7 +338,7 @@ mod platform {
             }
         };
         if ptr.is_null() {
-            Err(AllocErr::Exhausted { request: layout })
+            Err(AllocErr)
         } else {
             Ok(ptr as *mut u8)
         }
@@ -382,9 +378,7 @@ mod platform {
                           old_layout: Layout,
                           new_layout: Layout) -> Result<*mut u8, AllocErr> {
             if old_layout.align() != new_layout.align() {
-                return Err(AllocErr::Unsupported {
-                    details: "cannot change alignment on `realloc`",
-                })
+                return Err(AllocErr)
             }
 
             if new_layout.align() <= MIN_ALIGN {
@@ -395,7 +389,7 @@ mod platform {
                 if !ptr.is_null() {
                     Ok(ptr as *mut u8)
                 } else {
-                    Err(AllocErr::Exhausted { request: new_layout })
+                    Err(AllocErr)
                 }
             } else {
                 let res = self.alloc(new_layout.clone());
@@ -505,7 +499,7 @@ mod platform {
         if !ptr.is_null() {
             Ok(ptr)
         } else {
-            Err(AllocErr::Unsupported { details: "" })
+            Err(AllocErr)
         }
     }
 
diff --git a/src/libcore/alloc.rs b/src/libcore/alloc.rs
index 5c51bb2b51b9c..b6626ff9f26e8 100644
--- a/src/libcore/alloc.rs
+++ b/src/libcore/alloc.rs
@@ -320,50 +320,12 @@ impl Layout {
 /// something wrong when combining the given input arguments with this
 /// allocator.
 #[derive(Clone, PartialEq, Eq, Debug)]
-pub enum AllocErr {
-    /// Error due to hitting some resource limit or otherwise running
-    /// out of memory. This condition strongly implies that *some*
-    /// series of deallocations would allow a subsequent reissuing of
-    /// the original allocation request to succeed.
-    Exhausted { request: Layout },
-
-    /// Error due to allocator being fundamentally incapable of
-    /// satisfying the original request. This condition implies that
-    /// such an allocation request will never succeed on the given
-    /// allocator, regardless of environment, memory pressure, or
-    /// other contextual conditions.
-    ///
-    /// For example, an allocator that does not support requests for
-    /// large memory blocks might return this error variant.
-    Unsupported { details: &'static str },
-}
-
-impl AllocErr {
-    #[inline]
-    pub fn invalid_input(details: &'static str) -> Self {
-        AllocErr::Unsupported { details: details }
-    }
-    #[inline]
-    pub fn is_memory_exhausted(&self) -> bool {
-        if let AllocErr::Exhausted { .. } = *self { true } else { false }
-    }
-    #[inline]
-    pub fn is_request_unsupported(&self) -> bool {
-        if let AllocErr::Unsupported { .. } = *self { true } else { false }
-    }
-    #[inline]
-    pub fn description(&self) -> &str {
-        match *self {
-            AllocErr::Exhausted { .. } => "allocator memory exhausted",
-            AllocErr::Unsupported { .. } => "unsupported allocator request",
-        }
-    }
-}
+pub struct AllocErr;
 
 // (we need this for downstream impl of trait Error)
 impl fmt::Display for AllocErr {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        write!(f, "{}", self.description())
+        f.write_str("memory allocation failed")
     }
 }
 
@@ -592,12 +554,8 @@ pub unsafe trait Alloc {
     /// aborting.
     ///
     /// `oom` is meant to be used by clients unable to cope with an
-    /// unsatisfied allocation request (signaled by an error such as
-    /// `AllocErr::Exhausted`), and wish to abandon computation rather
-    /// than attempt to recover locally. Such clients should pass the
-    /// signaling error value back into `oom`, where the allocator
-    /// may incorporate that error value into its diagnostic report
-    /// before aborting.
+    /// unsatisfied allocation request, and wish to abandon
+    /// computation rather than attempt to recover locally.
     ///
     /// Implementations of the `oom` method are discouraged from
     /// infinitely regressing in nested calls to `oom`. In
@@ -963,7 +921,7 @@ pub unsafe trait Alloc {
         if k.size() > 0 {
             unsafe { self.alloc(k).map(|p| NonNull::new_unchecked(p as *mut T)) }
         } else {
-            Err(AllocErr::invalid_input("zero-sized type invalid for alloc_one"))
+            Err(AllocErr)
         }
     }
 
@@ -1036,7 +994,7 @@ pub unsafe trait Alloc {
                         })
                 }
             }
-            _ => Err(AllocErr::invalid_input("invalid layout for alloc_array")),
+            _ => Err(AllocErr),
         }
     }
 
@@ -1084,7 +1042,7 @@ pub unsafe trait Alloc {
                     .map(|p| NonNull::new_unchecked(p as *mut T))
             }
             _ => {
-                Err(AllocErr::invalid_input("invalid layout for realloc_array"))
+                Err(AllocErr)
             }
         }
     }
@@ -1118,7 +1076,7 @@ pub unsafe trait Alloc {
                 Ok(self.dealloc(raw_ptr, k.clone()))
             }
             _ => {
-                Err(AllocErr::invalid_input("invalid layout for dealloc_array"))
+                Err(AllocErr)
             }
         }
     }
diff --git a/src/libstd/alloc.rs b/src/libstd/alloc.rs
index eb0c960732d3c..533ad3ad47332 100644
--- a/src/libstd/alloc.rs
+++ b/src/libstd/alloc.rs
@@ -21,9 +21,7 @@
 #[doc(hidden)]
 #[allow(unused_attributes)]
 pub mod __default_lib_allocator {
-    use super::{System, Layout, Alloc, AllocErr};
-    use ptr;
-
+    use super::{System, Layout, Alloc, AllocErr, CannotReallocInPlace};
     // for symbol names src/librustc/middle/allocator.rs
     // for signatures src/librustc_allocator/lib.rs
 
@@ -34,14 +32,11 @@ pub mod __default_lib_allocator {
     #[rustc_std_internal_symbol]
     pub unsafe extern fn __rdl_alloc(size: usize,
                                      align: usize,
-                                     err: *mut u8) -> *mut u8 {
+                                     _err: *mut u8) -> *mut u8 {
         let layout = Layout::from_size_align_unchecked(size, align);
         match System.alloc(layout) {
             Ok(p) => p,
-            Err(e) => {
-                ptr::write(err as *mut AllocErr, e);
-                0 as *mut u8
-            }
+            Err(AllocErr) => 0 as *mut u8,
         }
     }
 
@@ -76,15 +71,12 @@ pub mod __default_lib_allocator {
                                        old_align: usize,
                                        new_size: usize,
                                        new_align: usize,
-                                       err: *mut u8) -> *mut u8 {
+                                       _err: *mut u8) -> *mut u8 {
         let old_layout = Layout::from_size_align_unchecked(old_size, old_align);
         let new_layout = Layout::from_size_align_unchecked(new_size, new_align);
         match System.realloc(ptr, old_layout, new_layout) {
             Ok(p) => p,
-            Err(e) => {
-                ptr::write(err as *mut AllocErr, e);
-                0 as *mut u8
-            }
+            Err(AllocErr) => 0 as *mut u8,
         }
     }
 
@@ -92,14 +84,11 @@ pub mod __default_lib_allocator {
     #[rustc_std_internal_symbol]
     pub unsafe extern fn __rdl_alloc_zeroed(size: usize,
                                             align: usize,
-                                            err: *mut u8) -> *mut u8 {
+                                            _err: *mut u8) -> *mut u8 {
         let layout = Layout::from_size_align_unchecked(size, align);
         match System.alloc_zeroed(layout) {
             Ok(p) => p,
-            Err(e) => {
-                ptr::write(err as *mut AllocErr, e);
-                0 as *mut u8
-            }
+            Err(AllocErr) => 0 as *mut u8,
         }
     }
 
@@ -108,17 +97,14 @@ pub mod __default_lib_allocator {
     pub unsafe extern fn __rdl_alloc_excess(size: usize,
                                             align: usize,
                                             excess: *mut usize,
-                                            err: *mut u8) -> *mut u8 {
+                                            _err: *mut u8) -> *mut u8 {
         let layout = Layout::from_size_align_unchecked(size, align);
         match System.alloc_excess(layout) {
             Ok(p) => {
                 *excess = p.1;
                 p.0
             }
-            Err(e) => {
-                ptr::write(err as *mut AllocErr, e);
-                0 as *mut u8
-            }
+            Err(AllocErr) => 0 as *mut u8,
         }
     }
 
@@ -130,7 +116,7 @@ pub mod __default_lib_allocator {
                                               new_size: usize,
                                               new_align: usize,
                                               excess: *mut usize,
-                                              err: *mut u8) -> *mut u8 {
+                                              _err: *mut u8) -> *mut u8 {
         let old_layout = Layout::from_size_align_unchecked(old_size, old_align);
         let new_layout = Layout::from_size_align_unchecked(new_size, new_align);
         match System.realloc_excess(ptr, old_layout, new_layout) {
@@ -138,10 +124,7 @@ pub mod __default_lib_allocator {
                 *excess = p.1;
                 p.0
             }
-            Err(e) => {
-                ptr::write(err as *mut AllocErr, e);
-                0 as *mut u8
-            }
+            Err(AllocErr) => 0 as *mut u8,
         }
     }
 
@@ -156,7 +139,7 @@ pub mod __default_lib_allocator {
         let new_layout = Layout::from_size_align_unchecked(new_size, new_align);
         match System.grow_in_place(ptr, old_layout, new_layout) {
             Ok(()) => 1,
-            Err(_) => 0,
+            Err(CannotReallocInPlace) => 0,
         }
     }
 
@@ -171,7 +154,7 @@ pub mod __default_lib_allocator {
         let new_layout = Layout::from_size_align_unchecked(new_size, new_align);
         match System.shrink_in_place(ptr, old_layout, new_layout) {
             Ok(()) => 1,
-            Err(_) => 0,
+            Err(CannotReallocInPlace) => 0,
         }
     }
 }
diff --git a/src/libstd/error.rs b/src/libstd/error.rs
index 4edb897350efd..ec55a3c021a80 100644
--- a/src/libstd/error.rs
+++ b/src/libstd/error.rs
@@ -243,7 +243,7 @@ impl Error for ! {
            issue = "32838")]
 impl Error for AllocErr {
     fn description(&self) -> &str {
-        AllocErr::description(self)
+        "memory allocation failed"
     }
 }
 

From eb69593f73be1e41d9e2ef065010a47478c14924 Mon Sep 17 00:00:00 2001
From: Simon Sapin <simon.sapin@exyr.org>
Date: Tue, 3 Apr 2018 17:51:03 +0200
Subject: [PATCH 12/27] Implement GlobalAlloc for System

---
 src/liballoc_system/lib.rs | 269 +++++++++++++++++++------------------
 src/libcore/alloc.rs       |   4 +
 2 files changed, 145 insertions(+), 128 deletions(-)

diff --git a/src/liballoc_system/lib.rs b/src/liballoc_system/lib.rs
index 5dca05cf08537..0480be8d913ae 100644
--- a/src/liballoc_system/lib.rs
+++ b/src/liballoc_system/lib.rs
@@ -50,19 +50,19 @@ pub struct System;
 unsafe impl Alloc for System {
     #[inline]
     unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
-        (&*self).alloc(layout)
+        Alloc::alloc(&mut &*self, layout)
     }
 
     #[inline]
     unsafe fn alloc_zeroed(&mut self, layout: Layout)
         -> Result<*mut u8, AllocErr>
     {
-        (&*self).alloc_zeroed(layout)
+        Alloc::alloc_zeroed(&mut &*self, layout)
     }
 
     #[inline]
     unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
-        (&*self).dealloc(ptr, layout)
+        Alloc::dealloc(&mut &*self, ptr, layout)
     }
 
     #[inline]
@@ -70,21 +70,21 @@ unsafe impl Alloc for System {
                       ptr: *mut u8,
                       old_layout: Layout,
                       new_layout: Layout) -> Result<*mut u8, AllocErr> {
-        (&*self).realloc(ptr, old_layout, new_layout)
+        Alloc::realloc(&mut &*self, ptr, old_layout, new_layout)
     }
 
     fn oom(&mut self, err: AllocErr) -> ! {
-        (&*self).oom(err)
+        Alloc::oom(&mut &*self, err)
     }
 
     #[inline]
     fn usable_size(&self, layout: &Layout) -> (usize, usize) {
-        (&self).usable_size(layout)
+        Alloc::usable_size(&mut &*self, layout)
     }
 
     #[inline]
     unsafe fn alloc_excess(&mut self, layout: Layout) -> Result<Excess, AllocErr> {
-        (&*self).alloc_excess(layout)
+        Alloc::alloc_excess(&mut &*self, layout)
     }
 
     #[inline]
@@ -92,7 +92,7 @@ unsafe impl Alloc for System {
                              ptr: *mut u8,
                              layout: Layout,
                              new_layout: Layout) -> Result<Excess, AllocErr> {
-        (&*self).realloc_excess(ptr, layout, new_layout)
+        Alloc::realloc_excess(&mut &*self, ptr, layout, new_layout)
     }
 
     #[inline]
@@ -100,7 +100,7 @@ unsafe impl Alloc for System {
                             ptr: *mut u8,
                             layout: Layout,
                             new_layout: Layout) -> Result<(), CannotReallocInPlace> {
-        (&*self).grow_in_place(ptr, layout, new_layout)
+        Alloc::grow_in_place(&mut &*self, ptr, layout, new_layout)
     }
 
     #[inline]
@@ -108,7 +108,76 @@ unsafe impl Alloc for System {
                               ptr: *mut u8,
                               layout: Layout,
                               new_layout: Layout) -> Result<(), CannotReallocInPlace> {
-        (&*self).shrink_in_place(ptr, layout, new_layout)
+        Alloc::shrink_in_place(&mut &*self, ptr, layout, new_layout)
+    }
+}
+
+#[cfg(any(windows, unix, target_os = "cloudabi", target_os = "redox"))]
+mod realloc_fallback {
+    use core::alloc::{GlobalAlloc, Void, Layout};
+    use core::cmp;
+    use core::ptr;
+
+    impl super::System {
+        pub(crate) unsafe fn realloc_fallback(&self, ptr: *mut Void, old_layout: Layout,
+                                              new_size: usize) -> *mut Void {
+            // Docs for GlobalAlloc::realloc require this to be valid:
+            let new_layout = Layout::from_size_align_unchecked(new_size, old_layout.align());
+
+            let new_ptr = GlobalAlloc::alloc(self, new_layout);
+            if !new_ptr.is_null() {
+                let size = cmp::min(old_layout.size(), new_size);
+                ptr::copy_nonoverlapping(ptr as *mut u8, new_ptr as *mut u8, size);
+                GlobalAlloc::dealloc(self, ptr, old_layout);
+            }
+            new_ptr
+        }
+    }
+}
+
+macro_rules! alloc_methods_based_on_global_alloc {
+    () => {
+        #[inline]
+        unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
+            let ptr = GlobalAlloc::alloc(*self, layout);
+            if !ptr.is_null() {
+                Ok(ptr as *mut u8)
+            } else {
+                Err(AllocErr)
+            }
+        }
+
+        #[inline]
+        unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
+            let ptr = GlobalAlloc::alloc_zeroed(*self, layout);
+            if !ptr.is_null() {
+                Ok(ptr as *mut u8)
+            } else {
+                Err(AllocErr)
+            }
+        }
+
+        #[inline]
+        unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
+            GlobalAlloc::dealloc(*self, ptr as *mut Void, layout)
+        }
+
+        #[inline]
+        unsafe fn realloc(&mut self,
+                          ptr: *mut u8,
+                          old_layout: Layout,
+                          new_layout: Layout) -> Result<*mut u8, AllocErr> {
+            if old_layout.align() != new_layout.align() {
+                return Err(AllocErr)
+            }
+
+            let ptr = GlobalAlloc::realloc(*self, ptr as *mut Void, old_layout, new_layout.size());
+            if !ptr.is_null() {
+                Ok(ptr as *mut u8)
+            } else {
+                Err(AllocErr)
+            }
+        }
     }
 }
 
@@ -116,86 +185,62 @@ unsafe impl Alloc for System {
 mod platform {
     extern crate libc;
 
-    use core::cmp;
     use core::ptr;
 
     use MIN_ALIGN;
     use System;
-    use core::alloc::{Alloc, AllocErr, Layout};
+    use core::alloc::{GlobalAlloc, Alloc, AllocErr, Layout, Void};
 
     #[unstable(feature = "allocator_api", issue = "32838")]
-    unsafe impl<'a> Alloc for &'a System {
+    unsafe impl GlobalAlloc for System {
         #[inline]
-        unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
-            let ptr = if layout.align() <= MIN_ALIGN && layout.align() <= layout.size() {
-                libc::malloc(layout.size()) as *mut u8
+        unsafe fn alloc(&self, layout: Layout) -> *mut Void {
+            if layout.align() <= MIN_ALIGN && layout.align() <= layout.size() {
+                libc::malloc(layout.size()) as *mut Void
             } else {
                 #[cfg(target_os = "macos")]
                 {
                     if layout.align() > (1 << 31) {
-                        return Err(AllocErr)
+                        // FIXME: use Void::null_mut https://github.com/rust-lang/rust/issues/49659
+                        return 0 as *mut Void
                     }
                 }
                 aligned_malloc(&layout)
-            };
-            if !ptr.is_null() {
-                Ok(ptr)
-            } else {
-                Err(AllocErr)
             }
         }
 
         #[inline]
-        unsafe fn alloc_zeroed(&mut self, layout: Layout)
-            -> Result<*mut u8, AllocErr>
-        {
+        unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut Void {
             if layout.align() <= MIN_ALIGN && layout.align() <= layout.size() {
-                let ptr = libc::calloc(layout.size(), 1) as *mut u8;
-                if !ptr.is_null() {
-                    Ok(ptr)
-                } else {
-                    Err(AllocErr)
-                }
+                libc::calloc(layout.size(), 1) as *mut Void
             } else {
-                let ret = self.alloc(layout.clone());
-                if let Ok(ptr) = ret {
-                    ptr::write_bytes(ptr, 0, layout.size());
+                let ptr = self.alloc(layout.clone());
+                if !ptr.is_null() {
+                    ptr::write_bytes(ptr as *mut u8, 0, layout.size());
                 }
-                ret
+                ptr
             }
         }
 
         #[inline]
-        unsafe fn dealloc(&mut self, ptr: *mut u8, _layout: Layout) {
+        unsafe fn dealloc(&self, ptr: *mut Void, _layout: Layout) {
             libc::free(ptr as *mut libc::c_void)
         }
 
         #[inline]
-        unsafe fn realloc(&mut self,
-                          ptr: *mut u8,
-                          old_layout: Layout,
-                          new_layout: Layout) -> Result<*mut u8, AllocErr> {
-            if old_layout.align() != new_layout.align() {
-                return Err(AllocErr)
-            }
-
-            if new_layout.align() <= MIN_ALIGN  && new_layout.align() <= new_layout.size(){
-                let ptr = libc::realloc(ptr as *mut libc::c_void, new_layout.size());
-                if !ptr.is_null() {
-                    Ok(ptr as *mut u8)
-                } else {
-                    Err(AllocErr)
-                }
+        unsafe fn realloc(&self, ptr: *mut Void, old_layout: Layout, new_size: usize) -> *mut Void {
+            let align = old_layout.align();
+            if align <= MIN_ALIGN && align <= new_size {
+                libc::realloc(ptr as *mut libc::c_void, new_size) as *mut Void
             } else {
-                let res = self.alloc(new_layout.clone());
-                if let Ok(new_ptr) = res {
-                    let size = cmp::min(old_layout.size(), new_layout.size());
-                    ptr::copy_nonoverlapping(ptr, new_ptr, size);
-                    self.dealloc(ptr, old_layout);
-                }
-                res
+                self.realloc_fallback(ptr, old_layout, new_size)
             }
         }
+    }
+
+    #[unstable(feature = "allocator_api", issue = "32838")]
+    unsafe impl<'a> Alloc for &'a System {
+        alloc_methods_based_on_global_alloc!();
 
         fn oom(&mut self, err: AllocErr) -> ! {
             use core::fmt::{self, Write};
@@ -237,7 +282,7 @@ mod platform {
 
     #[cfg(any(target_os = "android", target_os = "redox", target_os = "solaris"))]
     #[inline]
-    unsafe fn aligned_malloc(layout: &Layout) -> *mut u8 {
+    unsafe fn aligned_malloc(layout: &Layout) -> *mut Void {
         // On android we currently target API level 9 which unfortunately
         // doesn't have the `posix_memalign` API used below. Instead we use
         // `memalign`, but this unfortunately has the property on some systems
@@ -255,18 +300,18 @@ mod platform {
         // [3]: https://bugs.chromium.org/p/chromium/issues/detail?id=138579
         // [4]: https://chromium.googlesource.com/chromium/src/base/+/master/
         //                                       /memory/aligned_memory.cc
-        libc::memalign(layout.align(), layout.size()) as *mut u8
+        libc::memalign(layout.align(), layout.size()) as *mut Void
     }
 
     #[cfg(not(any(target_os = "android", target_os = "redox", target_os = "solaris")))]
     #[inline]
-    unsafe fn aligned_malloc(layout: &Layout) -> *mut u8 {
+    unsafe fn aligned_malloc(layout: &Layout) -> *mut Void {
         let mut out = ptr::null_mut();
         let ret = libc::posix_memalign(&mut out, layout.align(), layout.size());
         if ret != 0 {
-            ptr::null_mut()
+            0 as *mut Void
         } else {
-            out as *mut u8
+            out as *mut Void
         }
     }
 }
@@ -274,12 +319,11 @@ mod platform {
 #[cfg(windows)]
 #[allow(bad_style)]
 mod platform {
-    use core::cmp;
     use core::ptr;
 
     use MIN_ALIGN;
     use System;
-    use core::alloc::{Alloc, AllocErr, Layout, CannotReallocInPlace};
+    use core::alloc::{GlobalAlloc, Alloc, Void, AllocErr, Layout, CannotReallocInPlace};
 
     type LPVOID = *mut u8;
     type HANDLE = LPVOID;
@@ -323,9 +367,7 @@ mod platform {
     }
 
     #[inline]
-    unsafe fn allocate_with_flags(layout: Layout, flags: DWORD)
-        -> Result<*mut u8, AllocErr>
-    {
+    unsafe fn allocate_with_flags(layout: Layout, flags: DWORD) -> *mut Void {
         let ptr = if layout.align() <= MIN_ALIGN {
             HeapAlloc(GetProcessHeap(), flags, layout.size())
         } else {
@@ -337,35 +379,29 @@ mod platform {
                 align_ptr(ptr, layout.align())
             }
         };
-        if ptr.is_null() {
-            Err(AllocErr)
-        } else {
-            Ok(ptr as *mut u8)
-        }
+        ptr as *mut Void
     }
 
     #[unstable(feature = "allocator_api", issue = "32838")]
-    unsafe impl<'a> Alloc for &'a System {
+    unsafe impl GlobalAlloc for System {
         #[inline]
-        unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
+        unsafe fn alloc(&self, layout: Layout) -> *mut Void {
             allocate_with_flags(layout, 0)
         }
 
         #[inline]
-        unsafe fn alloc_zeroed(&mut self, layout: Layout)
-            -> Result<*mut u8, AllocErr>
-        {
+        unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut Void {
             allocate_with_flags(layout, HEAP_ZERO_MEMORY)
         }
 
         #[inline]
-        unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
+        unsafe fn dealloc(&self, ptr: *mut Void, layout: Layout) {
             if layout.align() <= MIN_ALIGN {
                 let err = HeapFree(GetProcessHeap(), 0, ptr as LPVOID);
                 debug_assert!(err != 0, "Failed to free heap memory: {}",
                               GetLastError());
             } else {
-                let header = get_header(ptr);
+                let header = get_header(ptr as *mut u8);
                 let err = HeapFree(GetProcessHeap(), 0, header.0 as LPVOID);
                 debug_assert!(err != 0, "Failed to free heap memory: {}",
                               GetLastError());
@@ -373,34 +409,19 @@ mod platform {
         }
 
         #[inline]
-        unsafe fn realloc(&mut self,
-                          ptr: *mut u8,
-                          old_layout: Layout,
-                          new_layout: Layout) -> Result<*mut u8, AllocErr> {
-            if old_layout.align() != new_layout.align() {
-                return Err(AllocErr)
-            }
-
-            if new_layout.align() <= MIN_ALIGN {
-                let ptr = HeapReAlloc(GetProcessHeap(),
-                                      0,
-                                      ptr as LPVOID,
-                                      new_layout.size());
-                if !ptr.is_null() {
-                    Ok(ptr as *mut u8)
-                } else {
-                    Err(AllocErr)
-                }
+        unsafe fn realloc(&self, ptr: *mut Void, old_layout: Layout, new_size: usize) -> *mut Void {
+            let align = old_layout.align();
+            if align <= MIN_ALIGN {
+                HeapReAlloc(GetProcessHeap(), 0, ptr as LPVOID, new_size) as *mut Void
             } else {
-                let res = self.alloc(new_layout.clone());
-                if let Ok(new_ptr) = res {
-                    let size = cmp::min(old_layout.size(), new_layout.size());
-                    ptr::copy_nonoverlapping(ptr, new_ptr, size);
-                    self.dealloc(ptr, old_layout);
-                }
-                res
+                self.realloc_fallback(ptr, old_layout, new_size)
             }
         }
+    }
+
+    #[unstable(feature = "allocator_api", issue = "32838")]
+    unsafe impl<'a> Alloc for &'a System {
+        alloc_methods_based_on_global_alloc!();
 
         #[inline]
         unsafe fn grow_in_place(&mut self,
@@ -489,45 +510,37 @@ mod platform {
 mod platform {
     extern crate dlmalloc;
 
-    use core::alloc::{Alloc, AllocErr, Layout};
+    use core::alloc::{GlobalAlloc, Alloc, AllocErr, Layout, Void};
     use System;
 
     // No need for synchronization here as wasm is currently single-threaded
     static mut DLMALLOC: dlmalloc::Dlmalloc = dlmalloc::DLMALLOC_INIT;
 
-    fn to_result(ptr: *mut u8) -> Result<*mut u8, AllocErr> {
-        if !ptr.is_null() {
-            Ok(ptr)
-        } else {
-            Err(AllocErr)
-        }
-    }
-
     #[unstable(feature = "allocator_api", issue = "32838")]
-    unsafe impl<'a> Alloc for &'a System {
+    unsafe impl GlobalAlloc for System {
         #[inline]
-        unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
-            to_result(DLMALLOC.malloc(layout.size(), layout.align()))
+        unsafe fn alloc(&self, layout: Layout) -> *mut Void {
+            DLMALLOC.malloc(layout.size(), layout.align()) as *mut Void
         }
 
         #[inline]
-        unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
-            to_result(DLMALLOC.calloc(layout.size(), layout.align()))
+        unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut Void {
+            DLMALLOC.calloc(layout.size(), layout.align()) as *mut Void
         }
 
         #[inline]
-        unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
-            DLMALLOC.free(ptr, layout.size(), layout.align())
+        unsafe fn dealloc(&self, ptr: *mut Void, layout: Layout) {
+            DLMALLOC.free(ptr as *mut u8, layout.size(), layout.align())
         }
 
         #[inline]
-        unsafe fn realloc(&mut self,
-                          ptr: *mut u8,
-                          old_layout: Layout,
-                          new_layout: Layout) -> Result<*mut u8, AllocErr> {
-            to_result(DLMALLOC.realloc(
-                ptr, old_layout.size(), old_layout.align(), new_layout.size(),
-            ))
+        unsafe fn realloc(&self, ptr: *mut Void, layout: Layout, new_size: usize) -> *mut Void {
+            DLMALLOC.realloc(ptr as *mut u8, layout.size(), layout.align(), new_size) as *mut Void
         }
     }
+
+    #[unstable(feature = "allocator_api", issue = "32838")]
+    unsafe impl<'a> Alloc for &'a System {
+        alloc_methods_based_on_global_alloc!();
+    }
 }
diff --git a/src/libcore/alloc.rs b/src/libcore/alloc.rs
index b6626ff9f26e8..1c764dab000f8 100644
--- a/src/libcore/alloc.rs
+++ b/src/libcore/alloc.rs
@@ -381,6 +381,10 @@ pub unsafe trait GlobalAlloc {
         ptr
     }
 
+    /// # Safety
+    ///
+    /// `new_size`, when rounded up to the nearest multiple of `old_layout.align()`,
+    /// must not overflow (i.e. the rounded value must be less than `usize::MAX`).
     unsafe fn realloc(&self, ptr: *mut Void, old_layout: Layout, new_size: usize) -> *mut Void {
         let new_layout = Layout::from_size_align_unchecked(new_size, old_layout.align());
         let new_ptr = self.alloc(new_layout);

From 86753ce1cc520bfe50ae89f09ec47f313ce900eb Mon Sep 17 00:00:00 2001
From: Simon Sapin <simon.sapin@exyr.org>
Date: Tue, 3 Apr 2018 17:12:57 +0200
Subject: [PATCH 13/27] Use the GlobalAlloc trait for #[global_allocator]

---
 src/Cargo.lock                                |   1 -
 .../src/language-features/global-allocator.md |   8 +-
 src/liballoc/alloc.rs                         | 188 +++---------
 src/liballoc_jemalloc/Cargo.toml              |   1 -
 src/liballoc_jemalloc/lib.rs                  | 110 +------
 src/librustc_allocator/expand.rs              | 283 ++----------------
 src/librustc_allocator/lib.rs                 |  39 +--
 src/librustc_trans/allocator.rs               |  28 +-
 src/libstd/alloc.rs                           | 153 ++++------
 src/llvm                                      |   2 +-
 src/rustllvm/llvm-rebuild-trigger             |   2 +-
 .../allocator/not-an-allocator.rs             |  14 +-
 .../run-make-fulldeps/std-core-cycle/bar.rs   |   8 +-
 .../run-pass/allocator/auxiliary/custom.rs    |   8 +-
 src/test/run-pass/allocator/custom.rs         |  12 +-
 src/test/run-pass/allocator/xcrate-use.rs     |   6 +-
 src/test/run-pass/allocator/xcrate-use2.rs    |  12 +-
 17 files changed, 168 insertions(+), 707 deletions(-)

diff --git a/src/Cargo.lock b/src/Cargo.lock
index e5297d1482e8b..2e969f4ec2bbc 100644
--- a/src/Cargo.lock
+++ b/src/Cargo.lock
@@ -19,7 +19,6 @@ dependencies = [
 name = "alloc_jemalloc"
 version = "0.0.0"
 dependencies = [
- "alloc_system 0.0.0",
  "build_helper 0.1.0",
  "cc 1.0.9 (registry+https://github.com/rust-lang/crates.io-index)",
  "compiler_builtins 0.0.0",
diff --git a/src/doc/unstable-book/src/language-features/global-allocator.md b/src/doc/unstable-book/src/language-features/global-allocator.md
index b3e6925b666b7..6ce12ba684ddc 100644
--- a/src/doc/unstable-book/src/language-features/global-allocator.md
+++ b/src/doc/unstable-book/src/language-features/global-allocator.md
@@ -29,16 +29,16 @@ looks like:
 ```rust
 #![feature(global_allocator, allocator_api, heap_api)]
 
-use std::heap::{Alloc, System, Layout, AllocErr};
+use std::alloc::{GlobalAlloc, System, Layout, Void};
 
 struct MyAllocator;
 
-unsafe impl<'a> Alloc for &'a MyAllocator {
-    unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
+unsafe impl GlobalAlloc for MyAllocator {
+    unsafe fn alloc(&self, layout: Layout) -> *mut Void {
         System.alloc(layout)
     }
 
-    unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
+    unsafe fn dealloc(&self, ptr: *mut Void, layout: Layout) {
         System.dealloc(ptr, layout)
     }
 }
diff --git a/src/liballoc/alloc.rs b/src/liballoc/alloc.rs
index b975ff6be5817..73bc78eb8a2e8 100644
--- a/src/liballoc/alloc.rs
+++ b/src/liballoc/alloc.rs
@@ -16,26 +16,19 @@
             issue = "32838")]
 
 use core::intrinsics::{min_align_of_val, size_of_val};
-use core::mem;
 use core::usize;
 
 #[doc(inline)]
 pub use core::alloc::*;
 
+#[cfg(stage0)]
 extern "Rust" {
     #[allocator]
     #[rustc_allocator_nounwind]
     fn __rust_alloc(size: usize, align: usize, err: *mut u8) -> *mut u8;
-    #[cold]
-    #[rustc_allocator_nounwind]
-    fn __rust_oom(err: *const u8) -> !;
     #[rustc_allocator_nounwind]
     fn __rust_dealloc(ptr: *mut u8, size: usize, align: usize);
     #[rustc_allocator_nounwind]
-    fn __rust_usable_size(layout: *const u8,
-                          min: *mut usize,
-                          max: *mut usize);
-    #[rustc_allocator_nounwind]
     fn __rust_realloc(ptr: *mut u8,
                       old_size: usize,
                       old_align: usize,
@@ -44,31 +37,22 @@ extern "Rust" {
                       err: *mut u8) -> *mut u8;
     #[rustc_allocator_nounwind]
     fn __rust_alloc_zeroed(size: usize, align: usize, err: *mut u8) -> *mut u8;
+}
+
+#[cfg(not(stage0))]
+extern "Rust" {
+    #[allocator]
     #[rustc_allocator_nounwind]
-    fn __rust_alloc_excess(size: usize,
-                           align: usize,
-                           excess: *mut usize,
-                           err: *mut u8) -> *mut u8;
+    fn __rust_alloc(size: usize, align: usize) -> *mut u8;
     #[rustc_allocator_nounwind]
-    fn __rust_realloc_excess(ptr: *mut u8,
-                             old_size: usize,
-                             old_align: usize,
-                             new_size: usize,
-                             new_align: usize,
-                             excess: *mut usize,
-                             err: *mut u8) -> *mut u8;
+    fn __rust_dealloc(ptr: *mut u8, size: usize, align: usize);
     #[rustc_allocator_nounwind]
-    fn __rust_grow_in_place(ptr: *mut u8,
-                            old_size: usize,
-                            old_align: usize,
-                            new_size: usize,
-                            new_align: usize) -> u8;
+    fn __rust_realloc(ptr: *mut u8,
+                      old_size: usize,
+                      align: usize,
+                      new_size: usize) -> *mut u8;
     #[rustc_allocator_nounwind]
-    fn __rust_shrink_in_place(ptr: *mut u8,
-                              old_size: usize,
-                              old_align: usize,
-                              new_size: usize,
-                              new_align: usize) -> u8;
+    fn __rust_alloc_zeroed(size: usize, align: usize) -> *mut u8;
 }
 
 #[derive(Copy, Clone, Default, Debug)]
@@ -86,22 +70,15 @@ pub const Heap: Global = Global;
 unsafe impl Alloc for Global {
     #[inline]
     unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
-        let mut err = AllocErr;
-        let ptr = __rust_alloc(layout.size(),
-                               layout.align(),
-                               &mut err as *mut AllocErr as *mut u8);
-        if ptr.is_null() {
-            Err(AllocErr)
-        } else {
-            Ok(ptr)
-        }
-    }
+        #[cfg(not(stage0))]
+        let ptr = __rust_alloc(layout.size(), layout.align());
+        #[cfg(stage0)]
+        let ptr = __rust_alloc(layout.size(), layout.align(), &mut 0);
 
-    #[inline]
-    #[cold]
-    fn oom(&mut self, err: AllocErr) -> ! {
-        unsafe {
-            __rust_oom(&err as *const AllocErr as *const u8)
+        if !ptr.is_null() {
+            Ok(ptr)
+        } else {
+            Err(AllocErr)
         }
     }
 
@@ -110,18 +87,6 @@ unsafe impl Alloc for Global {
         __rust_dealloc(ptr, layout.size(), layout.align())
     }
 
-    #[inline]
-    fn usable_size(&self, layout: &Layout) -> (usize, usize) {
-        let mut min = 0;
-        let mut max = 0;
-        unsafe {
-            __rust_usable_size(layout as *const Layout as *const u8,
-                               &mut min,
-                               &mut max);
-        }
-        (min, max)
-    }
-
     #[inline]
     unsafe fn realloc(&mut self,
                       ptr: *mut u8,
@@ -129,107 +94,34 @@ unsafe impl Alloc for Global {
                       new_layout: Layout)
                       -> Result<*mut u8, AllocErr>
     {
-        let mut err = AllocErr;
-        let ptr = __rust_realloc(ptr,
-                                 layout.size(),
-                                 layout.align(),
-                                 new_layout.size(),
-                                 new_layout.align(),
-                                 &mut err as *mut AllocErr as *mut u8);
-        if ptr.is_null() {
-            Err(AllocErr)
+        if layout.align() == new_layout.align() {
+            #[cfg(not(stage0))]
+            let ptr = __rust_realloc(ptr, layout.size(), layout.align(), new_layout.size());
+            #[cfg(stage0)]
+            let ptr = __rust_realloc(ptr, layout.size(), layout.align(),
+                                     new_layout.size(), new_layout.align(), &mut 0);
+
+            if !ptr.is_null() {
+                Ok(ptr)
+            } else {
+                Err(AllocErr)
+            }
         } else {
-            mem::forget(err);
-            Ok(ptr)
+            Err(AllocErr)
         }
     }
 
     #[inline]
     unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
-        let mut err = AllocErr;
-        let ptr = __rust_alloc_zeroed(layout.size(),
-                                      layout.align(),
-                                      &mut err as *mut AllocErr as *mut u8);
-        if ptr.is_null() {
-            Err(AllocErr)
-        } else {
-            Ok(ptr)
-        }
-    }
+        #[cfg(not(stage0))]
+        let ptr = __rust_alloc_zeroed(layout.size(), layout.align());
+        #[cfg(stage0)]
+        let ptr = __rust_alloc_zeroed(layout.size(), layout.align(), &mut 0);
 
-    #[inline]
-    unsafe fn alloc_excess(&mut self, layout: Layout) -> Result<Excess, AllocErr> {
-        let mut err = AllocErr;
-        let mut size = 0;
-        let ptr = __rust_alloc_excess(layout.size(),
-                                      layout.align(),
-                                      &mut size,
-                                      &mut err as *mut AllocErr as *mut u8);
-        if ptr.is_null() {
-            Err(AllocErr)
+        if !ptr.is_null() {
+            Ok(ptr)
         } else {
-            Ok(Excess(ptr, size))
-        }
-    }
-
-    #[inline]
-    unsafe fn realloc_excess(&mut self,
-                             ptr: *mut u8,
-                             layout: Layout,
-                             new_layout: Layout) -> Result<Excess, AllocErr> {
-        let mut err = AllocErr;
-        let mut size = 0;
-        let ptr = __rust_realloc_excess(ptr,
-                                        layout.size(),
-                                        layout.align(),
-                                        new_layout.size(),
-                                        new_layout.align(),
-                                        &mut size,
-                                        &mut err as *mut AllocErr as *mut u8);
-        if ptr.is_null() {
             Err(AllocErr)
-        } else {
-            Ok(Excess(ptr, size))
-        }
-    }
-
-    #[inline]
-    unsafe fn grow_in_place(&mut self,
-                            ptr: *mut u8,
-                            layout: Layout,
-                            new_layout: Layout)
-                            -> Result<(), CannotReallocInPlace>
-    {
-        debug_assert!(new_layout.size() >= layout.size());
-        debug_assert!(new_layout.align() == layout.align());
-        let ret = __rust_grow_in_place(ptr,
-                                       layout.size(),
-                                       layout.align(),
-                                       new_layout.size(),
-                                       new_layout.align());
-        if ret != 0 {
-            Ok(())
-        } else {
-            Err(CannotReallocInPlace)
-        }
-    }
-
-    #[inline]
-    unsafe fn shrink_in_place(&mut self,
-                              ptr: *mut u8,
-                              layout: Layout,
-                              new_layout: Layout) -> Result<(), CannotReallocInPlace> {
-        debug_assert!(new_layout.size() <= layout.size());
-        debug_assert!(new_layout.align() == layout.align());
-        let ret = __rust_shrink_in_place(ptr,
-                                         layout.size(),
-                                         layout.align(),
-                                         new_layout.size(),
-                                         new_layout.align());
-        if ret != 0 {
-            Ok(())
-        } else {
-            Err(CannotReallocInPlace)
         }
     }
 }
diff --git a/src/liballoc_jemalloc/Cargo.toml b/src/liballoc_jemalloc/Cargo.toml
index 02435170374c5..7986d5dd2eb54 100644
--- a/src/liballoc_jemalloc/Cargo.toml
+++ b/src/liballoc_jemalloc/Cargo.toml
@@ -12,7 +12,6 @@ test = false
 doc = false
 
 [dependencies]
-alloc_system = { path = "../liballoc_system" }
 core = { path = "../libcore" }
 libc = { path = "../rustc/libc_shim" }
 compiler_builtins = { path = "../rustc/compiler_builtins_shim" }
diff --git a/src/liballoc_jemalloc/lib.rs b/src/liballoc_jemalloc/lib.rs
index 59a7e87e1ec31..661d7ab78da01 100644
--- a/src/liballoc_jemalloc/lib.rs
+++ b/src/liballoc_jemalloc/lib.rs
@@ -14,7 +14,6 @@
             reason = "this library is unlikely to be stabilized in its current \
                       form or name",
             issue = "27783")]
-#![feature(alloc_system)]
 #![feature(libc)]
 #![feature(linkage)]
 #![feature(staged_api)]
@@ -23,15 +22,12 @@
 #![cfg_attr(not(dummy_jemalloc), feature(allocator_api))]
 #![rustc_alloc_kind = "exe"]
 
-extern crate alloc_system;
 extern crate libc;
 
 #[cfg(not(dummy_jemalloc))]
 pub use contents::*;
 #[cfg(not(dummy_jemalloc))]
 mod contents {
-    use core::alloc::{Alloc, AllocErr, Layout};
-    use alloc_system::System;
     use libc::{c_int, c_void, size_t};
 
     // Note that the symbols here are prefixed by default on macOS and Windows (we
@@ -50,18 +46,10 @@ mod contents {
                        target_os = "dragonfly", target_os = "windows", target_env = "musl"),
                    link_name = "je_rallocx")]
         fn rallocx(ptr: *mut c_void, size: size_t, flags: c_int) -> *mut c_void;
-        #[cfg_attr(any(target_os = "macos", target_os = "android", target_os = "ios",
-                       target_os = "dragonfly", target_os = "windows", target_env = "musl"),
-                   link_name = "je_xallocx")]
-        fn xallocx(ptr: *mut c_void, size: size_t, extra: size_t, flags: c_int) -> size_t;
         #[cfg_attr(any(target_os = "macos", target_os = "android", target_os = "ios",
                        target_os = "dragonfly", target_os = "windows", target_env = "musl"),
                    link_name = "je_sdallocx")]
         fn sdallocx(ptr: *mut c_void, size: size_t, flags: c_int);
-        #[cfg_attr(any(target_os = "macos", target_os = "android", target_os = "ios",
-                       target_os = "dragonfly", target_os = "windows", target_env = "musl"),
-                   link_name = "je_nallocx")]
-        fn nallocx(size: size_t, flags: c_int) -> size_t;
     }
 
     const MALLOCX_ZERO: c_int = 0x40;
@@ -102,20 +90,12 @@ mod contents {
 
     #[no_mangle]
     #[rustc_std_internal_symbol]
-    pub unsafe extern fn __rde_alloc(size: usize,
-                                     align: usize,
-                                     _err: *mut u8) -> *mut u8 {
+    pub unsafe extern fn __rde_alloc(size: usize, align: usize) -> *mut u8 {
         let flags = align_to_flags(align, size);
         let ptr = mallocx(size as size_t, flags) as *mut u8;
         ptr
     }
 
-    #[no_mangle]
-    #[rustc_std_internal_symbol]
-    pub unsafe extern fn __rde_oom(err: *const u8) -> ! {
-        System.oom((*(err as *const AllocErr)).clone())
-    }
-
     #[no_mangle]
     #[rustc_std_internal_symbol]
     pub unsafe extern fn __rde_dealloc(ptr: *mut u8,
@@ -125,44 +105,20 @@ mod contents {
         sdallocx(ptr as *mut c_void, size, flags);
     }
 
-    #[no_mangle]
-    #[rustc_std_internal_symbol]
-    pub unsafe extern fn __rde_usable_size(layout: *const u8,
-                                           min: *mut usize,
-                                           max: *mut usize) {
-        let layout = &*(layout as *const Layout);
-        let flags = align_to_flags(layout.align(), layout.size());
-        let size = nallocx(layout.size(), flags) as usize;
-        *min = layout.size();
-        if size > 0 {
-            *max = size;
-        } else {
-            *max = layout.size();
-        }
-    }
-
     #[no_mangle]
     #[rustc_std_internal_symbol]
     pub unsafe extern fn __rde_realloc(ptr: *mut u8,
                                        _old_size: usize,
-                                       old_align: usize,
-                                       new_size: usize,
-                                       new_align: usize,
-                                       _err: *mut u8) -> *mut u8 {
-        if new_align != old_align {
-            return 0 as *mut u8
-        }
-
-        let flags = align_to_flags(new_align, new_size);
+                                       align: usize,
+                                       new_size: usize) -> *mut u8 {
+        let flags = align_to_flags(align, new_size);
         let ptr = rallocx(ptr as *mut c_void, new_size, flags) as *mut u8;
         ptr
     }
 
     #[no_mangle]
     #[rustc_std_internal_symbol]
-    pub unsafe extern fn __rde_alloc_zeroed(size: usize,
-                                            align: usize,
-                                            _err: *mut u8) -> *mut u8 {
+    pub unsafe extern fn __rde_alloc_zeroed(size: usize, align: usize) -> *mut u8 {
         let ptr = if align <= MIN_ALIGN && align <= size {
             calloc(size as size_t, 1) as *mut u8
         } else {
@@ -171,60 +127,4 @@ mod contents {
         };
         ptr
     }
-
-    #[no_mangle]
-    #[rustc_std_internal_symbol]
-    pub unsafe extern fn __rde_alloc_excess(size: usize,
-                                            align: usize,
-                                            excess: *mut usize,
-                                            err: *mut u8) -> *mut u8 {
-        let p = __rde_alloc(size, align, err);
-        if !p.is_null() {
-            let flags = align_to_flags(align, size);
-            *excess = nallocx(size, flags) as usize;
-        }
-        return p
-    }
-
-    #[no_mangle]
-    #[rustc_std_internal_symbol]
-    pub unsafe extern fn __rde_realloc_excess(ptr: *mut u8,
-                                              old_size: usize,
-                                              old_align: usize,
-                                              new_size: usize,
-                                              new_align: usize,
-                                              excess: *mut usize,
-                                              err: *mut u8) -> *mut u8 {
-        let p = __rde_realloc(ptr, old_size, old_align, new_size, new_align, err);
-        if !p.is_null() {
-            let flags = align_to_flags(new_align, new_size);
-            *excess = nallocx(new_size, flags) as usize;
-        }
-        p
-    }
-
-    #[no_mangle]
-    #[rustc_std_internal_symbol]
-    pub unsafe extern fn __rde_grow_in_place(ptr: *mut u8,
-                                             old_size: usize,
-                                             old_align: usize,
-                                             new_size: usize,
-                                             new_align: usize) -> u8 {
-        __rde_shrink_in_place(ptr, old_size, old_align, new_size, new_align)
-    }
-
-    #[no_mangle]
-    #[rustc_std_internal_symbol]
-    pub unsafe extern fn __rde_shrink_in_place(ptr: *mut u8,
-                                               _old_size: usize,
-                                               old_align: usize,
-                                               new_size: usize,
-                                               new_align: usize) -> u8 {
-        if old_align == new_align {
-            let flags = align_to_flags(new_align, new_size);
-            (xallocx(ptr as *mut c_void, new_size, 0, flags) == new_size) as u8
-        } else {
-            0
-        }
-    }
 }
diff --git a/src/librustc_allocator/expand.rs b/src/librustc_allocator/expand.rs
index ee38cca7828be..ce41fe1f3bc9e 100644
--- a/src/librustc_allocator/expand.rs
+++ b/src/librustc_allocator/expand.rs
@@ -11,7 +11,7 @@
 use rustc::middle::allocator::AllocatorKind;
 use rustc_errors;
 use syntax::abi::Abi;
-use syntax::ast::{Crate, Attribute, LitKind, StrStyle, ExprKind};
+use syntax::ast::{Crate, Attribute, LitKind, StrStyle};
 use syntax::ast::{Unsafety, Constness, Generics, Mutability, Ty, Mac, Arg};
 use syntax::ast::{self, Ident, Item, ItemKind, TyKind, VisibilityKind, Expr};
 use syntax::attr;
@@ -88,7 +88,7 @@ impl<'a> Folder for ExpandAllocatorDirectives<'a> {
             span,
             kind: AllocatorKind::Global,
             global: item.ident,
-            alloc: Ident::from_str("alloc"),
+            core: Ident::from_str("core"),
             cx: ExtCtxt::new(self.sess, ecfg, self.resolver),
         };
         let super_path = f.cx.path(f.span, vec![
@@ -96,7 +96,7 @@ impl<'a> Folder for ExpandAllocatorDirectives<'a> {
             f.global,
         ]);
         let mut items = vec![
-            f.cx.item_extern_crate(f.span, f.alloc),
+            f.cx.item_extern_crate(f.span, f.core),
             f.cx.item_use_simple(
                 f.span,
                 respan(f.span.shrink_to_lo(), VisibilityKind::Inherited),
@@ -126,7 +126,7 @@ struct AllocFnFactory<'a> {
     span: Span,
     kind: AllocatorKind,
     global: Ident,
-    alloc: Ident,
+    core: Ident,
     cx: ExtCtxt<'a>,
 }
 
@@ -143,8 +143,7 @@ impl<'a> AllocFnFactory<'a> {
             self.arg_ty(ty, &mut abi_args, mk)
         }).collect();
         let result = self.call_allocator(method.name, args);
-        let (output_ty, output_expr) =
-            self.ret_ty(&method.output, &mut abi_args, mk, result);
+        let (output_ty, output_expr) = self.ret_ty(&method.output, result);
         let kind = ItemKind::Fn(self.cx.fn_decl(abi_args, ast::FunctionRetTy::Ty(output_ty)),
                                 Unsafety::Unsafe,
                                 dummy_spanned(Constness::NotConst),
@@ -159,16 +158,15 @@ impl<'a> AllocFnFactory<'a> {
 
     fn call_allocator(&self, method: &str, mut args: Vec<P<Expr>>) -> P<Expr> {
         let method = self.cx.path(self.span, vec![
-            self.alloc,
-            Ident::from_str("heap"),
-            Ident::from_str("Alloc"),
+            self.core,
+            Ident::from_str("alloc"),
+            Ident::from_str("GlobalAlloc"),
             Ident::from_str(method),
         ]);
         let method = self.cx.expr_path(method);
         let allocator = self.cx.path_ident(self.span, self.global);
         let allocator = self.cx.expr_path(allocator);
         let allocator = self.cx.expr_addr_of(self.span, allocator);
-        let allocator = self.cx.expr_mut_addr_of(self.span, allocator);
         args.insert(0, allocator);
 
         self.cx.expr_call(self.span, method, args)
@@ -205,8 +203,8 @@ impl<'a> AllocFnFactory<'a> {
                 args.push(self.cx.arg(self.span, align, ty_usize));
 
                 let layout_new = self.cx.path(self.span, vec![
-                    self.alloc,
-                    Ident::from_str("heap"),
+                    self.core,
+                    Ident::from_str("alloc"),
                     Ident::from_str("Layout"),
                     Ident::from_str("from_size_align_unchecked"),
                 ]);
@@ -219,286 +217,67 @@ impl<'a> AllocFnFactory<'a> {
                 layout
             }
 
-            AllocatorTy::LayoutRef => {
-                let ident = ident();
-                args.push(self.cx.arg(self.span, ident, self.ptr_u8()));
-
-                // Convert our `arg: *const u8` via:
-                //
-                //      &*(arg as *const Layout)
-                let expr = self.cx.expr_ident(self.span, ident);
-                let expr = self.cx.expr_cast(self.span, expr, self.layout_ptr());
-                let expr = self.cx.expr_deref(self.span, expr);
-                self.cx.expr_addr_of(self.span, expr)
-            }
-
-            AllocatorTy::AllocErr => {
-                // We're creating:
-                //
-                //      (*(arg as *const AllocErr)).clone()
+            AllocatorTy::Ptr => {
                 let ident = ident();
                 args.push(self.cx.arg(self.span, ident, self.ptr_u8()));
-                let expr = self.cx.expr_ident(self.span, ident);
-                let expr = self.cx.expr_cast(self.span, expr, self.alloc_err_ptr());
-                let expr = self.cx.expr_deref(self.span, expr);
-                self.cx.expr_method_call(
-                    self.span,
-                    expr,
-                    Ident::from_str("clone"),
-                    Vec::new()
-                )
+                let arg = self.cx.expr_ident(self.span, ident);
+                self.cx.expr_cast(self.span, arg, self.ptr_void())
             }
 
-            AllocatorTy::Ptr => {
+            AllocatorTy::Usize => {
                 let ident = ident();
-                args.push(self.cx.arg(self.span, ident, self.ptr_u8()));
+                args.push(self.cx.arg(self.span, ident, self.usize()));
                 self.cx.expr_ident(self.span, ident)
             }
 
             AllocatorTy::ResultPtr |
-            AllocatorTy::ResultExcess |
-            AllocatorTy::ResultUnit |
-            AllocatorTy::Bang |
-            AllocatorTy::UsizePair |
             AllocatorTy::Unit => {
                 panic!("can't convert AllocatorTy to an argument")
             }
         }
     }
 
-    fn ret_ty(&self,
-              ty: &AllocatorTy,
-              args: &mut Vec<Arg>,
-              ident: &mut FnMut() -> Ident,
-              expr: P<Expr>) -> (P<Ty>, P<Expr>)
-    {
+    fn ret_ty(&self, ty: &AllocatorTy, expr: P<Expr>) -> (P<Ty>, P<Expr>) {
         match *ty {
-            AllocatorTy::UsizePair => {
-                // We're creating:
-                //
-                //      let arg = #expr;
-                //      *min = arg.0;
-                //      *max = arg.1;
-
-                let min = ident();
-                let max = ident();
-
-                args.push(self.cx.arg(self.span, min, self.ptr_usize()));
-                args.push(self.cx.arg(self.span, max, self.ptr_usize()));
-
-                let ident = ident();
-                let stmt = self.cx.stmt_let(self.span, false, ident, expr);
-                let min = self.cx.expr_ident(self.span, min);
-                let max = self.cx.expr_ident(self.span, max);
-                let layout = self.cx.expr_ident(self.span, ident);
-                let assign_min = self.cx.expr(self.span, ExprKind::Assign(
-                    self.cx.expr_deref(self.span, min),
-                    self.cx.expr_tup_field_access(self.span, layout.clone(), 0),
-                ));
-                let assign_min = self.cx.stmt_semi(assign_min);
-                let assign_max = self.cx.expr(self.span, ExprKind::Assign(
-                    self.cx.expr_deref(self.span, max),
-                    self.cx.expr_tup_field_access(self.span, layout.clone(), 1),
-                ));
-                let assign_max = self.cx.stmt_semi(assign_max);
-
-                let stmts = vec![stmt, assign_min, assign_max];
-                let block = self.cx.block(self.span, stmts);
-                let ty_unit = self.cx.ty(self.span, TyKind::Tup(Vec::new()));
-                (ty_unit, self.cx.expr_block(block))
-            }
-
-            AllocatorTy::ResultExcess => {
-                // We're creating:
-                //
-                //      match #expr {
-                //          Ok(ptr) => {
-                //              *excess = ptr.1;
-                //              ptr.0
-                //          }
-                //          Err(e) => {
-                //              ptr::write(err_ptr, e);
-                //              0 as *mut u8
-                //          }
-                //      }
-
-                let excess_ptr = ident();
-                args.push(self.cx.arg(self.span, excess_ptr, self.ptr_usize()));
-                let excess_ptr = self.cx.expr_ident(self.span, excess_ptr);
-
-                let err_ptr = ident();
-                args.push(self.cx.arg(self.span, err_ptr, self.ptr_u8()));
-                let err_ptr = self.cx.expr_ident(self.span, err_ptr);
-                let err_ptr = self.cx.expr_cast(self.span,
-                                                err_ptr,
-                                                self.alloc_err_ptr());
-
-                let name = ident();
-                let ok_expr = {
-                    let ptr = self.cx.expr_ident(self.span, name);
-                    let write = self.cx.expr(self.span, ExprKind::Assign(
-                        self.cx.expr_deref(self.span, excess_ptr),
-                        self.cx.expr_tup_field_access(self.span, ptr.clone(), 1),
-                    ));
-                    let write = self.cx.stmt_semi(write);
-                    let ret = self.cx.expr_tup_field_access(self.span,
-                                                            ptr.clone(),
-                                                            0);
-                    let ret = self.cx.stmt_expr(ret);
-                    let block = self.cx.block(self.span, vec![write, ret]);
-                    self.cx.expr_block(block)
-                };
-                let pat = self.cx.pat_ident(self.span, name);
-                let ok = self.cx.path_ident(self.span, Ident::from_str("Ok"));
-                let ok = self.cx.pat_tuple_struct(self.span, ok, vec![pat]);
-                let ok = self.cx.arm(self.span, vec![ok], ok_expr);
-
-                let name = ident();
-                let err_expr = {
-                    let err = self.cx.expr_ident(self.span, name);
-                    let write = self.cx.path(self.span, vec![
-                        self.alloc,
-                        Ident::from_str("heap"),
-                        Ident::from_str("__core"),
-                        Ident::from_str("ptr"),
-                        Ident::from_str("write"),
-                    ]);
-                    let write = self.cx.expr_path(write);
-                    let write = self.cx.expr_call(self.span, write,
-                                                  vec![err_ptr, err]);
-                    let write = self.cx.stmt_semi(write);
-                    let null = self.cx.expr_usize(self.span, 0);
-                    let null = self.cx.expr_cast(self.span, null, self.ptr_u8());
-                    let null = self.cx.stmt_expr(null);
-                    let block = self.cx.block(self.span, vec![write, null]);
-                    self.cx.expr_block(block)
-                };
-                let pat = self.cx.pat_ident(self.span, name);
-                let err = self.cx.path_ident(self.span, Ident::from_str("Err"));
-                let err = self.cx.pat_tuple_struct(self.span, err, vec![pat]);
-                let err = self.cx.arm(self.span, vec![err], err_expr);
-
-                let expr = self.cx.expr_match(self.span, expr, vec![ok, err]);
-                (self.ptr_u8(), expr)
-            }
-
             AllocatorTy::ResultPtr => {
                 // We're creating:
                 //
-                //      match #expr {
-                //          Ok(ptr) => ptr,
-                //          Err(e) => {
-                //              ptr::write(err_ptr, e);
-                //              0 as *mut u8
-                //          }
-                //      }
-
-                let err_ptr = ident();
-                args.push(self.cx.arg(self.span, err_ptr, self.ptr_u8()));
-                let err_ptr = self.cx.expr_ident(self.span, err_ptr);
-                let err_ptr = self.cx.expr_cast(self.span,
-                                                err_ptr,
-                                                self.alloc_err_ptr());
+                //      #expr as *mut u8
 
-                let name = ident();
-                let ok_expr = self.cx.expr_ident(self.span, name);
-                let pat = self.cx.pat_ident(self.span, name);
-                let ok = self.cx.path_ident(self.span, Ident::from_str("Ok"));
-                let ok = self.cx.pat_tuple_struct(self.span, ok, vec![pat]);
-                let ok = self.cx.arm(self.span, vec![ok], ok_expr);
-
-                let name = ident();
-                let err_expr = {
-                    let err = self.cx.expr_ident(self.span, name);
-                    let write = self.cx.path(self.span, vec![
-                        self.alloc,
-                        Ident::from_str("heap"),
-                        Ident::from_str("__core"),
-                        Ident::from_str("ptr"),
-                        Ident::from_str("write"),
-                    ]);
-                    let write = self.cx.expr_path(write);
-                    let write = self.cx.expr_call(self.span, write,
-                                                  vec![err_ptr, err]);
-                    let write = self.cx.stmt_semi(write);
-                    let null = self.cx.expr_usize(self.span, 0);
-                    let null = self.cx.expr_cast(self.span, null, self.ptr_u8());
-                    let null = self.cx.stmt_expr(null);
-                    let block = self.cx.block(self.span, vec![write, null]);
-                    self.cx.expr_block(block)
-                };
-                let pat = self.cx.pat_ident(self.span, name);
-                let err = self.cx.path_ident(self.span, Ident::from_str("Err"));
-                let err = self.cx.pat_tuple_struct(self.span, err, vec![pat]);
-                let err = self.cx.arm(self.span, vec![err], err_expr);
-
-                let expr = self.cx.expr_match(self.span, expr, vec![ok, err]);
+                let expr = self.cx.expr_cast(self.span, expr, self.ptr_u8());
                 (self.ptr_u8(), expr)
             }
 
-            AllocatorTy::ResultUnit => {
-                // We're creating:
-                //
-                //      #expr.is_ok() as u8
-
-                let cast = self.cx.expr_method_call(
-                    self.span,
-                    expr,
-                    Ident::from_str("is_ok"),
-                    Vec::new()
-                );
-                let u8 = self.cx.path_ident(self.span, Ident::from_str("u8"));
-                let u8 = self.cx.ty_path(u8);
-                let cast = self.cx.expr_cast(self.span, cast, u8.clone());
-                (u8, cast)
-            }
-
-            AllocatorTy::Bang => {
-                (self.cx.ty(self.span, TyKind::Never), expr)
-            }
-
             AllocatorTy::Unit => {
                 (self.cx.ty(self.span, TyKind::Tup(Vec::new())), expr)
             }
 
-            AllocatorTy::AllocErr |
             AllocatorTy::Layout |
-            AllocatorTy::LayoutRef |
+            AllocatorTy::Usize |
             AllocatorTy::Ptr => {
                 panic!("can't convert AllocatorTy to an output")
             }
         }
     }
 
+    fn usize(&self) -> P<Ty> {
+        let usize = self.cx.path_ident(self.span, Ident::from_str("usize"));
+        self.cx.ty_path(usize)
+    }
+
     fn ptr_u8(&self) -> P<Ty> {
         let u8 = self.cx.path_ident(self.span, Ident::from_str("u8"));
         let ty_u8 = self.cx.ty_path(u8);
         self.cx.ty_ptr(self.span, ty_u8, Mutability::Mutable)
     }
 
-    fn ptr_usize(&self) -> P<Ty> {
-        let usize = self.cx.path_ident(self.span, Ident::from_str("usize"));
-        let ty_usize = self.cx.ty_path(usize);
-        self.cx.ty_ptr(self.span, ty_usize, Mutability::Mutable)
-    }
-
-    fn layout_ptr(&self) -> P<Ty> {
-        let layout = self.cx.path(self.span, vec![
-            self.alloc,
-            Ident::from_str("heap"),
-            Ident::from_str("Layout"),
-        ]);
-        let layout = self.cx.ty_path(layout);
-        self.cx.ty_ptr(self.span, layout, Mutability::Mutable)
-    }
-
-    fn alloc_err_ptr(&self) -> P<Ty> {
-        let err = self.cx.path(self.span, vec![
-            self.alloc,
-            Ident::from_str("heap"),
-            Ident::from_str("AllocErr"),
+    fn ptr_void(&self) -> P<Ty> {
+        let void = self.cx.path(self.span, vec![
+            self.core,
+            Ident::from_str("alloc"),
+            Ident::from_str("Void"),
         ]);
-        let err = self.cx.ty_path(err);
-        self.cx.ty_ptr(self.span, err, Mutability::Mutable)
+        let ty_void = self.cx.ty_path(void);
+        self.cx.ty_ptr(self.span, ty_void, Mutability::Mutable)
     }
 }
diff --git a/src/librustc_allocator/lib.rs b/src/librustc_allocator/lib.rs
index 0c7a9a91711b7..969086815ded4 100644
--- a/src/librustc_allocator/lib.rs
+++ b/src/librustc_allocator/lib.rs
@@ -23,24 +23,14 @@ pub static ALLOCATOR_METHODS: &[AllocatorMethod] = &[
         inputs: &[AllocatorTy::Layout],
         output: AllocatorTy::ResultPtr,
     },
-    AllocatorMethod {
-        name: "oom",
-        inputs: &[AllocatorTy::AllocErr],
-        output: AllocatorTy::Bang,
-    },
     AllocatorMethod {
         name: "dealloc",
         inputs: &[AllocatorTy::Ptr, AllocatorTy::Layout],
         output: AllocatorTy::Unit,
     },
-    AllocatorMethod {
-        name: "usable_size",
-        inputs: &[AllocatorTy::LayoutRef],
-        output: AllocatorTy::UsizePair,
-    },
     AllocatorMethod {
         name: "realloc",
-        inputs: &[AllocatorTy::Ptr, AllocatorTy::Layout, AllocatorTy::Layout],
+        inputs: &[AllocatorTy::Ptr, AllocatorTy::Layout, AllocatorTy::Usize],
         output: AllocatorTy::ResultPtr,
     },
     AllocatorMethod {
@@ -48,26 +38,6 @@ pub static ALLOCATOR_METHODS: &[AllocatorMethod] = &[
         inputs: &[AllocatorTy::Layout],
         output: AllocatorTy::ResultPtr,
     },
-    AllocatorMethod {
-        name: "alloc_excess",
-        inputs: &[AllocatorTy::Layout],
-        output: AllocatorTy::ResultExcess,
-    },
-    AllocatorMethod {
-        name: "realloc_excess",
-        inputs: &[AllocatorTy::Ptr, AllocatorTy::Layout, AllocatorTy::Layout],
-        output: AllocatorTy::ResultExcess,
-    },
-    AllocatorMethod {
-        name: "grow_in_place",
-        inputs: &[AllocatorTy::Ptr, AllocatorTy::Layout, AllocatorTy::Layout],
-        output: AllocatorTy::ResultUnit,
-    },
-    AllocatorMethod {
-        name: "shrink_in_place",
-        inputs: &[AllocatorTy::Ptr, AllocatorTy::Layout, AllocatorTy::Layout],
-        output: AllocatorTy::ResultUnit,
-    },
 ];
 
 pub struct AllocatorMethod {
@@ -77,14 +47,9 @@ pub struct AllocatorMethod {
 }
 
 pub enum AllocatorTy {
-    AllocErr,
-    Bang,
     Layout,
-    LayoutRef,
     Ptr,
-    ResultExcess,
     ResultPtr,
-    ResultUnit,
     Unit,
-    UsizePair,
+    Usize,
 }
diff --git a/src/librustc_trans/allocator.rs b/src/librustc_trans/allocator.rs
index e1c145b122d76..ffebb959ebfde 100644
--- a/src/librustc_trans/allocator.rs
+++ b/src/librustc_trans/allocator.rs
@@ -30,7 +30,6 @@ pub(crate) unsafe fn trans(tcx: TyCtxt, mods: &ModuleLlvm, kind: AllocatorKind)
     };
     let i8 = llvm::LLVMInt8TypeInContext(llcx);
     let i8p = llvm::LLVMPointerType(i8, 0);
-    let usizep = llvm::LLVMPointerType(usize, 0);
     let void = llvm::LLVMVoidTypeInContext(llcx);
 
     for method in ALLOCATOR_METHODS {
@@ -41,40 +40,19 @@ pub(crate) unsafe fn trans(tcx: TyCtxt, mods: &ModuleLlvm, kind: AllocatorKind)
                     args.push(usize); // size
                     args.push(usize); // align
                 }
-                AllocatorTy::LayoutRef => args.push(i8p),
                 AllocatorTy::Ptr => args.push(i8p),
-                AllocatorTy::AllocErr => args.push(i8p),
+                AllocatorTy::Usize => args.push(usize),
 
-                AllocatorTy::Bang |
-                AllocatorTy::ResultExcess |
                 AllocatorTy::ResultPtr |
-                AllocatorTy::ResultUnit |
-                AllocatorTy::UsizePair |
                 AllocatorTy::Unit => panic!("invalid allocator arg"),
             }
         }
         let output = match method.output {
-            AllocatorTy::UsizePair => {
-                args.push(usizep); // min
-                args.push(usizep); // max
-                None
-            }
-            AllocatorTy::Bang => None,
-            AllocatorTy::ResultExcess => {
-                args.push(i8p); // excess_ptr
-                args.push(i8p); // err_ptr
-                Some(i8p)
-            }
-            AllocatorTy::ResultPtr => {
-                args.push(i8p); // err_ptr
-                Some(i8p)
-            }
-            AllocatorTy::ResultUnit => Some(i8),
+            AllocatorTy::ResultPtr => Some(i8p),
             AllocatorTy::Unit => None,
 
-            AllocatorTy::AllocErr |
             AllocatorTy::Layout |
-            AllocatorTy::LayoutRef |
+            AllocatorTy::Usize |
             AllocatorTy::Ptr => panic!("invalid allocator output"),
         };
         let ty = llvm::LLVMFunctionType(output.unwrap_or(void),
diff --git a/src/libstd/alloc.rs b/src/libstd/alloc.rs
index 533ad3ad47332..335dc7e041232 100644
--- a/src/libstd/alloc.rs
+++ b/src/libstd/alloc.rs
@@ -21,7 +21,7 @@
 #[doc(hidden)]
 #[allow(unused_attributes)]
 pub mod __default_lib_allocator {
-    use super::{System, Layout, Alloc, AllocErr, CannotReallocInPlace};
+    use super::{System, Layout, GlobalAlloc, Void};
     // for symbol names src/librustc/middle/allocator.rs
     // for signatures src/librustc_allocator/lib.rs
 
@@ -30,20 +30,9 @@ pub mod __default_lib_allocator {
 
     #[no_mangle]
     #[rustc_std_internal_symbol]
-    pub unsafe extern fn __rdl_alloc(size: usize,
-                                     align: usize,
-                                     _err: *mut u8) -> *mut u8 {
+    pub unsafe extern fn __rdl_alloc(size: usize, align: usize) -> *mut u8 {
         let layout = Layout::from_size_align_unchecked(size, align);
-        match System.alloc(layout) {
-            Ok(p) => p,
-            Err(AllocErr) => 0 as *mut u8,
-        }
-    }
-
-    #[no_mangle]
-    #[rustc_std_internal_symbol]
-    pub unsafe extern fn __rdl_oom(err: *const u8) -> ! {
-        System.oom((*(err as *const AllocErr)).clone())
+        System.alloc(layout) as *mut u8
     }
 
     #[no_mangle]
@@ -51,110 +40,76 @@ pub mod __default_lib_allocator {
     pub unsafe extern fn __rdl_dealloc(ptr: *mut u8,
                                        size: usize,
                                        align: usize) {
-        System.dealloc(ptr, Layout::from_size_align_unchecked(size, align))
-    }
-
-    #[no_mangle]
-    #[rustc_std_internal_symbol]
-    pub unsafe extern fn __rdl_usable_size(layout: *const u8,
-                                           min: *mut usize,
-                                           max: *mut usize) {
-        let pair = System.usable_size(&*(layout as *const Layout));
-        *min = pair.0;
-        *max = pair.1;
+        System.dealloc(ptr as *mut Void, Layout::from_size_align_unchecked(size, align))
     }
 
     #[no_mangle]
     #[rustc_std_internal_symbol]
     pub unsafe extern fn __rdl_realloc(ptr: *mut u8,
                                        old_size: usize,
-                                       old_align: usize,
-                                       new_size: usize,
-                                       new_align: usize,
-                                       _err: *mut u8) -> *mut u8 {
-        let old_layout = Layout::from_size_align_unchecked(old_size, old_align);
-        let new_layout = Layout::from_size_align_unchecked(new_size, new_align);
-        match System.realloc(ptr, old_layout, new_layout) {
-            Ok(p) => p,
-            Err(AllocErr) => 0 as *mut u8,
-        }
+                                       align: usize,
+                                       new_size: usize) -> *mut u8 {
+        let old_layout = Layout::from_size_align_unchecked(old_size, align);
+        System.realloc(ptr as *mut Void, old_layout, new_size) as *mut u8
     }
 
     #[no_mangle]
     #[rustc_std_internal_symbol]
-    pub unsafe extern fn __rdl_alloc_zeroed(size: usize,
-                                            align: usize,
-                                            _err: *mut u8) -> *mut u8 {
+    pub unsafe extern fn __rdl_alloc_zeroed(size: usize, align: usize) -> *mut u8 {
         let layout = Layout::from_size_align_unchecked(size, align);
-        match System.alloc_zeroed(layout) {
-            Ok(p) => p,
-            Err(AllocErr) => 0 as *mut u8,
-        }
+        System.alloc_zeroed(layout) as *mut u8
     }
 
-    #[no_mangle]
-    #[rustc_std_internal_symbol]
-    pub unsafe extern fn __rdl_alloc_excess(size: usize,
-                                            align: usize,
-                                            excess: *mut usize,
-                                            _err: *mut u8) -> *mut u8 {
-        let layout = Layout::from_size_align_unchecked(size, align);
-        match System.alloc_excess(layout) {
-            Ok(p) => {
-                *excess = p.1;
-                p.0
-            }
-            Err(AllocErr) => 0 as *mut u8,
+    #[cfg(stage0)]
+    pub mod stage0 {
+        #[no_mangle]
+        #[rustc_std_internal_symbol]
+        pub unsafe extern fn __rdl_usable_size(_layout: *const u8,
+                                               _min: *mut usize,
+                                               _max: *mut usize) {
+            unimplemented!()
         }
-    }
 
-    #[no_mangle]
-    #[rustc_std_internal_symbol]
-    pub unsafe extern fn __rdl_realloc_excess(ptr: *mut u8,
-                                              old_size: usize,
-                                              old_align: usize,
-                                              new_size: usize,
-                                              new_align: usize,
-                                              excess: *mut usize,
-                                              _err: *mut u8) -> *mut u8 {
-        let old_layout = Layout::from_size_align_unchecked(old_size, old_align);
-        let new_layout = Layout::from_size_align_unchecked(new_size, new_align);
-        match System.realloc_excess(ptr, old_layout, new_layout) {
-            Ok(p) => {
-                *excess = p.1;
-                p.0
-            }
-            Err(AllocErr) => 0 as *mut u8,
+        #[no_mangle]
+        #[rustc_std_internal_symbol]
+        pub unsafe extern fn __rdl_alloc_excess(_size: usize,
+                                                _align: usize,
+                                                _excess: *mut usize,
+                                                _err: *mut u8) -> *mut u8 {
+            unimplemented!()
         }
-    }
 
-    #[no_mangle]
-    #[rustc_std_internal_symbol]
-    pub unsafe extern fn __rdl_grow_in_place(ptr: *mut u8,
-                                             old_size: usize,
-                                             old_align: usize,
-                                             new_size: usize,
-                                             new_align: usize) -> u8 {
-        let old_layout = Layout::from_size_align_unchecked(old_size, old_align);
-        let new_layout = Layout::from_size_align_unchecked(new_size, new_align);
-        match System.grow_in_place(ptr, old_layout, new_layout) {
-            Ok(()) => 1,
-            Err(CannotReallocInPlace) => 0,
+        #[no_mangle]
+        #[rustc_std_internal_symbol]
+        pub unsafe extern fn __rdl_realloc_excess(_ptr: *mut u8,
+                                                  _old_size: usize,
+                                                  _old_align: usize,
+                                                  _new_size: usize,
+                                                  _new_align: usize,
+                                                  _excess: *mut usize,
+                                                  _err: *mut u8) -> *mut u8 {
+            unimplemented!()
         }
-    }
 
-    #[no_mangle]
-    #[rustc_std_internal_symbol]
-    pub unsafe extern fn __rdl_shrink_in_place(ptr: *mut u8,
-                                               old_size: usize,
-                                               old_align: usize,
-                                               new_size: usize,
-                                               new_align: usize) -> u8 {
-        let old_layout = Layout::from_size_align_unchecked(old_size, old_align);
-        let new_layout = Layout::from_size_align_unchecked(new_size, new_align);
-        match System.shrink_in_place(ptr, old_layout, new_layout) {
-            Ok(()) => 1,
-            Err(CannotReallocInPlace) => 0,
+        #[no_mangle]
+        #[rustc_std_internal_symbol]
+        pub unsafe extern fn __rdl_grow_in_place(_ptr: *mut u8,
+                                                 _old_size: usize,
+                                                 _old_align: usize,
+                                                 _new_size: usize,
+                                                 _new_align: usize) -> u8 {
+            unimplemented!()
         }
+
+        #[no_mangle]
+        #[rustc_std_internal_symbol]
+        pub unsafe extern fn __rdl_shrink_in_place(_ptr: *mut u8,
+                                                   _old_size: usize,
+                                                   _old_align: usize,
+                                                   _new_size: usize,
+                                                   _new_align: usize) -> u8 {
+            unimplemented!()
+        }
+
     }
 }
diff --git a/src/llvm b/src/llvm
index 6ceaaa4b0176a..7243155b1c3da 160000
--- a/src/llvm
+++ b/src/llvm
@@ -1 +1 @@
-Subproject commit 6ceaaa4b0176a200e4bbd347d6a991ab6c776ede
+Subproject commit 7243155b1c3da0a980c868a87adebf00e0b33989
diff --git a/src/rustllvm/llvm-rebuild-trigger b/src/rustllvm/llvm-rebuild-trigger
index c4c0f1ab6e609..c3fc3e5452c4f 100644
--- a/src/rustllvm/llvm-rebuild-trigger
+++ b/src/rustllvm/llvm-rebuild-trigger
@@ -1,4 +1,4 @@
 # If this file is modified, then llvm will be (optionally) cleaned and then rebuilt.
 # The actual contents of this file do not matter, but to trigger a change on the
 # build bots then the contents should be changed so git updates the mtime.
-2018-03-10
+2018-04-05
diff --git a/src/test/compile-fail/allocator/not-an-allocator.rs b/src/test/compile-fail/allocator/not-an-allocator.rs
index e430143506346..140cad22f34e4 100644
--- a/src/test/compile-fail/allocator/not-an-allocator.rs
+++ b/src/test/compile-fail/allocator/not-an-allocator.rs
@@ -12,15 +12,9 @@
 
 #[global_allocator]
 static A: usize = 0;
-//~^ the trait bound `&usize:
-//~| the trait bound `&usize:
-//~| the trait bound `&usize:
-//~| the trait bound `&usize:
-//~| the trait bound `&usize:
-//~| the trait bound `&usize:
-//~| the trait bound `&usize:
-//~| the trait bound `&usize:
-//~| the trait bound `&usize:
-//~| the trait bound `&usize:
+//~^ the trait bound `usize:
+//~| the trait bound `usize:
+//~| the trait bound `usize:
+//~| the trait bound `usize:
 
 fn main() {}
diff --git a/src/test/run-make-fulldeps/std-core-cycle/bar.rs b/src/test/run-make-fulldeps/std-core-cycle/bar.rs
index 6def5b6f5e181..20b87028fd1b3 100644
--- a/src/test/run-make-fulldeps/std-core-cycle/bar.rs
+++ b/src/test/run-make-fulldeps/std-core-cycle/bar.rs
@@ -11,16 +11,16 @@
 #![feature(allocator_api)]
 #![crate_type = "rlib"]
 
-use std::heap::*;
+use std::alloc::*;
 
 pub struct A;
 
-unsafe impl<'a> Alloc for &'a A {
-    unsafe fn alloc(&mut self, _: Layout) -> Result<*mut u8, AllocErr> {
+unsafe impl GlobalAlloc for A {
+    unsafe fn alloc(&self, _: Layout) -> *mut Void {
         loop {}
     }
 
-    unsafe fn dealloc(&mut self, _ptr: *mut u8, _: Layout) {
+    unsafe fn dealloc(&self, _ptr: *mut Void, _: Layout) {
         loop {}
     }
 }
diff --git a/src/test/run-pass/allocator/auxiliary/custom.rs b/src/test/run-pass/allocator/auxiliary/custom.rs
index 8f4fbcd5ab1c1..95096efc7ef68 100644
--- a/src/test/run-pass/allocator/auxiliary/custom.rs
+++ b/src/test/run-pass/allocator/auxiliary/custom.rs
@@ -13,18 +13,18 @@
 #![feature(heap_api, allocator_api)]
 #![crate_type = "rlib"]
 
-use std::heap::{Alloc, System, AllocErr, Layout};
+use std::heap::{GlobalAlloc, System, Layout, Void};
 use std::sync::atomic::{AtomicUsize, Ordering};
 
 pub struct A(pub AtomicUsize);
 
-unsafe impl<'a> Alloc for &'a A {
-    unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
+unsafe impl GlobalAlloc for A {
+    unsafe fn alloc(&self, layout: Layout) -> *mut Void {
         self.0.fetch_add(1, Ordering::SeqCst);
         System.alloc(layout)
     }
 
-    unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
+    unsafe fn dealloc(&self, ptr: *mut Void, layout: Layout) {
         self.0.fetch_add(1, Ordering::SeqCst);
         System.dealloc(ptr, layout)
     }
diff --git a/src/test/run-pass/allocator/custom.rs b/src/test/run-pass/allocator/custom.rs
index 22081678fb999..f7b2fd73c87b9 100644
--- a/src/test/run-pass/allocator/custom.rs
+++ b/src/test/run-pass/allocator/custom.rs
@@ -15,20 +15,20 @@
 
 extern crate helper;
 
-use std::heap::{Heap, Alloc, System, Layout, AllocErr};
+use std::alloc::{self, Global, Alloc, System, Layout, Void};
 use std::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT};
 
 static HITS: AtomicUsize = ATOMIC_USIZE_INIT;
 
 struct A;
 
-unsafe impl<'a> Alloc for &'a A {
-    unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
+unsafe impl alloc::GlobalAlloc for A {
+    unsafe fn alloc(&self, layout: Layout) -> *mut Void {
         HITS.fetch_add(1, Ordering::SeqCst);
         System.alloc(layout)
     }
 
-    unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
+    unsafe fn dealloc(&self, ptr: *mut Void, layout: Layout) {
         HITS.fetch_add(1, Ordering::SeqCst);
         System.dealloc(ptr, layout)
     }
@@ -45,10 +45,10 @@ fn main() {
     unsafe {
         let layout = Layout::from_size_align(4, 2).unwrap();
 
-        let ptr = Heap.alloc(layout.clone()).unwrap();
+        let ptr = Global.alloc(layout.clone()).unwrap();
         helper::work_with(&ptr);
         assert_eq!(HITS.load(Ordering::SeqCst), n + 1);
-        Heap.dealloc(ptr, layout.clone());
+        Global.dealloc(ptr, layout.clone());
         assert_eq!(HITS.load(Ordering::SeqCst), n + 2);
 
         let s = String::with_capacity(10);
diff --git a/src/test/run-pass/allocator/xcrate-use.rs b/src/test/run-pass/allocator/xcrate-use.rs
index 04d2ef466e73d..78d604a710857 100644
--- a/src/test/run-pass/allocator/xcrate-use.rs
+++ b/src/test/run-pass/allocator/xcrate-use.rs
@@ -17,7 +17,7 @@
 extern crate custom;
 extern crate helper;
 
-use std::heap::{Heap, Alloc, System, Layout};
+use std::alloc::{Global, Alloc, System, Layout};
 use std::sync::atomic::{Ordering, ATOMIC_USIZE_INIT};
 
 #[global_allocator]
@@ -28,10 +28,10 @@ fn main() {
         let n = GLOBAL.0.load(Ordering::SeqCst);
         let layout = Layout::from_size_align(4, 2).unwrap();
 
-        let ptr = Heap.alloc(layout.clone()).unwrap();
+        let ptr = Global.alloc(layout.clone()).unwrap();
         helper::work_with(&ptr);
         assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 1);
-        Heap.dealloc(ptr, layout.clone());
+        Global.dealloc(ptr, layout.clone());
         assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2);
 
         let ptr = System.alloc(layout.clone()).unwrap();
diff --git a/src/test/run-pass/allocator/xcrate-use2.rs b/src/test/run-pass/allocator/xcrate-use2.rs
index 155fb5d6c5de9..52eb963efdb74 100644
--- a/src/test/run-pass/allocator/xcrate-use2.rs
+++ b/src/test/run-pass/allocator/xcrate-use2.rs
@@ -19,7 +19,7 @@ extern crate custom;
 extern crate custom_as_global;
 extern crate helper;
 
-use std::heap::{Heap, Alloc, System, Layout};
+use std::alloc::{Global, Alloc, GlobalAlloc, System, Layout};
 use std::sync::atomic::{Ordering, ATOMIC_USIZE_INIT};
 
 static GLOBAL: custom::A = custom::A(ATOMIC_USIZE_INIT);
@@ -30,25 +30,25 @@ fn main() {
         let layout = Layout::from_size_align(4, 2).unwrap();
 
         // Global allocator routes to the `custom_as_global` global
-        let ptr = Heap.alloc(layout.clone()).unwrap();
+        let ptr = Global.alloc(layout.clone()).unwrap();
         helper::work_with(&ptr);
         assert_eq!(custom_as_global::get(), n + 1);
-        Heap.dealloc(ptr, layout.clone());
+        Global.dealloc(ptr, layout.clone());
         assert_eq!(custom_as_global::get(), n + 2);
 
         // Usage of the system allocator avoids all globals
-        let ptr = System.alloc(layout.clone()).unwrap();
+        let ptr = System.alloc(layout.clone());
         helper::work_with(&ptr);
         assert_eq!(custom_as_global::get(), n + 2);
         System.dealloc(ptr, layout.clone());
         assert_eq!(custom_as_global::get(), n + 2);
 
         // Usage of our personal allocator doesn't affect other instances
-        let ptr = (&GLOBAL).alloc(layout.clone()).unwrap();
+        let ptr = GLOBAL.alloc(layout.clone());
         helper::work_with(&ptr);
         assert_eq!(custom_as_global::get(), n + 2);
         assert_eq!(GLOBAL.0.load(Ordering::SeqCst), 1);
-        (&GLOBAL).dealloc(ptr, layout);
+        GLOBAL.dealloc(ptr, layout);
         assert_eq!(custom_as_global::get(), n + 2);
         assert_eq!(GLOBAL.0.load(Ordering::SeqCst), 2);
     }

From 157ff8cd0562eefdd7aa296395c38a7bc259a4b9 Mon Sep 17 00:00:00 2001
From: Simon Sapin <simon.sapin@exyr.org>
Date: Tue, 3 Apr 2018 16:00:04 +0200
Subject: [PATCH 14/27] Remove the now-unit-struct AllocErr parameter of oom()

---
 src/liballoc/alloc.rs                    |  6 +++---
 src/liballoc/arc.rs                      |  2 +-
 src/liballoc/heap.rs                     |  4 ++--
 src/liballoc/raw_vec.rs                  | 12 ++++++------
 src/liballoc/rc.rs                       |  2 +-
 src/liballoc_system/lib.rs               | 12 ++++++------
 src/libcore/alloc.rs                     |  2 +-
 src/libstd/collections/hash/map.rs       |  2 +-
 src/libstd/collections/hash/table.rs     |  4 ++--
 src/test/run-pass/allocator-alloc-one.rs |  4 ++--
 src/test/run-pass/realloc-16687.rs       |  4 ++--
 src/test/run-pass/regions-mock-trans.rs  |  2 +-
 12 files changed, 28 insertions(+), 28 deletions(-)

diff --git a/src/liballoc/alloc.rs b/src/liballoc/alloc.rs
index 73bc78eb8a2e8..a7b5864016c2b 100644
--- a/src/liballoc/alloc.rs
+++ b/src/liballoc/alloc.rs
@@ -136,8 +136,8 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
         align as *mut u8
     } else {
         let layout = Layout::from_size_align_unchecked(size, align);
-        Global.alloc(layout).unwrap_or_else(|err| {
-            Global.oom(err)
+        Global.alloc(layout).unwrap_or_else(|_| {
+            Global.oom()
         })
     }
 }
@@ -166,7 +166,7 @@ mod tests {
         unsafe {
             let layout = Layout::from_size_align(1024, 1).unwrap();
             let ptr = Global.alloc_zeroed(layout.clone())
-                .unwrap_or_else(|e| Global.oom(e));
+                .unwrap_or_else(|_| Global.oom());
 
             let end = ptr.offset(layout.size() as isize);
             let mut i = ptr;
diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs
index d63ed24aa4f69..f0a325530ba1e 100644
--- a/src/liballoc/arc.rs
+++ b/src/liballoc/arc.rs
@@ -555,7 +555,7 @@ impl<T: ?Sized> Arc<T> {
         let layout = Layout::for_value(&*fake_ptr);
 
         let mem = Global.alloc(layout)
-            .unwrap_or_else(|e| Global.oom(e));
+            .unwrap_or_else(|_| Global.oom());
 
         // Initialize the real ArcInner
         let inner = set_data_ptr(ptr as *mut T, mem) as *mut ArcInner<T>;
diff --git a/src/liballoc/heap.rs b/src/liballoc/heap.rs
index a44ff04bd1b2f..765fb8458d11d 100644
--- a/src/liballoc/heap.rs
+++ b/src/liballoc/heap.rs
@@ -52,8 +52,8 @@ unsafe impl<T> Alloc for T where T: CoreAlloc {
         CoreAlloc::dealloc(self, ptr, layout)
     }
 
-    fn oom(&mut self, err: AllocErr) -> ! {
-        CoreAlloc::oom(self, err)
+    fn oom(&mut self, _: AllocErr) -> ! {
+        CoreAlloc::oom(self)
     }
 
     fn usable_size(&self, layout: &Layout) -> (usize, usize) {
diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs
index caedb971ddc6f..25d759764a5a6 100644
--- a/src/liballoc/raw_vec.rs
+++ b/src/liballoc/raw_vec.rs
@@ -100,7 +100,7 @@ impl<T, A: Alloc> RawVec<T, A> {
                 };
                 match result {
                     Ok(ptr) => ptr,
-                    Err(err) => a.oom(err),
+                    Err(_) => a.oom(),
                 }
             };
 
@@ -316,7 +316,7 @@ impl<T, A: Alloc> RawVec<T, A> {
                                                  new_layout);
                     match ptr_res {
                         Ok(ptr) => (new_cap, Unique::new_unchecked(ptr as *mut T)),
-                        Err(e) => self.a.oom(e),
+                        Err(_) => self.a.oom(),
                     }
                 }
                 None => {
@@ -325,7 +325,7 @@ impl<T, A: Alloc> RawVec<T, A> {
                     let new_cap = if elem_size > (!0) / 8 { 1 } else { 4 };
                     match self.a.alloc_array::<T>(new_cap) {
                         Ok(ptr) => (new_cap, ptr.into()),
-                        Err(e) => self.a.oom(e),
+                        Err(_) => self.a.oom(),
                     }
                 }
             };
@@ -444,7 +444,7 @@ impl<T, A: Alloc> RawVec<T, A> {
     pub fn reserve_exact(&mut self, used_cap: usize, needed_extra_cap: usize) {
         match self.try_reserve_exact(used_cap, needed_extra_cap) {
             Err(CapacityOverflow) => panic!("capacity overflow"),
-            Err(AllocErr(e)) => self.a.oom(e),
+            Err(AllocErr(_)) => self.a.oom(),
             Ok(()) => { /* yay */ }
          }
      }
@@ -554,7 +554,7 @@ impl<T, A: Alloc> RawVec<T, A> {
     pub fn reserve(&mut self, used_cap: usize, needed_extra_cap: usize) {
         match self.try_reserve(used_cap, needed_extra_cap) {
             Err(CapacityOverflow) => panic!("capacity overflow"),
-            Err(AllocErr(e)) => self.a.oom(e),
+            Err(AllocErr(_)) => self.a.oom(),
             Ok(()) => { /* yay */ }
          }
      }
@@ -669,7 +669,7 @@ impl<T, A: Alloc> RawVec<T, A> {
                                      old_layout,
                                      new_layout) {
                     Ok(p) => self.ptr = Unique::new_unchecked(p as *mut T),
-                    Err(err) => self.a.oom(err),
+                    Err(_) => self.a.oom(),
                 }
             }
             self.cap = amount;
diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs
index c134b181158f5..3c0b11bfe747f 100644
--- a/src/liballoc/rc.rs
+++ b/src/liballoc/rc.rs
@@ -668,7 +668,7 @@ impl<T: ?Sized> Rc<T> {
         let layout = Layout::for_value(&*fake_ptr);
 
         let mem = Global.alloc(layout)
-            .unwrap_or_else(|e| Global.oom(e));
+            .unwrap_or_else(|_| Global.oom());
 
         // Initialize the real RcBox
         let inner = set_data_ptr(ptr as *mut T, mem) as *mut RcBox<T>;
diff --git a/src/liballoc_system/lib.rs b/src/liballoc_system/lib.rs
index 0480be8d913ae..5e6b3b5ca1129 100644
--- a/src/liballoc_system/lib.rs
+++ b/src/liballoc_system/lib.rs
@@ -73,8 +73,8 @@ unsafe impl Alloc for System {
         Alloc::realloc(&mut &*self, ptr, old_layout, new_layout)
     }
 
-    fn oom(&mut self, err: AllocErr) -> ! {
-        Alloc::oom(&mut &*self, err)
+    fn oom(&mut self) -> ! {
+        Alloc::oom(&mut &*self)
     }
 
     #[inline]
@@ -242,7 +242,7 @@ mod platform {
     unsafe impl<'a> Alloc for &'a System {
         alloc_methods_based_on_global_alloc!();
 
-        fn oom(&mut self, err: AllocErr) -> ! {
+        fn oom(&mut self) -> ! {
             use core::fmt::{self, Write};
 
             // Print a message to stderr before aborting to assist with
@@ -250,7 +250,7 @@ mod platform {
             // memory since we are in an OOM situation. Any errors are ignored
             // while printing since there's nothing we can do about them and we
             // are about to exit anyways.
-            drop(writeln!(Stderr, "fatal runtime error: {}", err));
+            drop(writeln!(Stderr, "fatal runtime error: {}", AllocErr));
             unsafe {
                 ::core::intrinsics::abort();
             }
@@ -459,11 +459,11 @@ mod platform {
             }
         }
 
-        fn oom(&mut self, err: AllocErr) -> ! {
+        fn oom(&mut self) -> ! {
             use core::fmt::{self, Write};
 
             // Same as with unix we ignore all errors here
-            drop(writeln!(Stderr, "fatal runtime error: {}", err));
+            drop(writeln!(Stderr, "fatal runtime error: {}", AllocErr));
             unsafe {
                 ::core::intrinsics::abort();
             }
diff --git a/src/libcore/alloc.rs b/src/libcore/alloc.rs
index 1c764dab000f8..1ba4c641065ff 100644
--- a/src/libcore/alloc.rs
+++ b/src/libcore/alloc.rs
@@ -572,7 +572,7 @@ pub unsafe trait Alloc {
     /// instead they should return an appropriate error from the
     /// invoked method, and let the client decide whether to invoke
     /// this `oom` method in response.
-    fn oom(&mut self, _: AllocErr) -> ! {
+    fn oom(&mut self) -> ! {
         unsafe { ::intrinsics::abort() }
     }
 
diff --git a/src/libstd/collections/hash/map.rs b/src/libstd/collections/hash/map.rs
index c4ef9e62577cd..2a00241afc603 100644
--- a/src/libstd/collections/hash/map.rs
+++ b/src/libstd/collections/hash/map.rs
@@ -784,7 +784,7 @@ impl<K, V, S> HashMap<K, V, S>
     pub fn reserve(&mut self, additional: usize) {
         match self.try_reserve(additional) {
             Err(CollectionAllocErr::CapacityOverflow) => panic!("capacity overflow"),
-            Err(CollectionAllocErr::AllocErr(e)) => Global.oom(e),
+            Err(CollectionAllocErr::AllocErr(_)) => Global.oom(),
             Ok(()) => { /* yay */ }
          }
     }
diff --git a/src/libstd/collections/hash/table.rs b/src/libstd/collections/hash/table.rs
index 10bab5df8b543..fcc2eb8fef2bb 100644
--- a/src/libstd/collections/hash/table.rs
+++ b/src/libstd/collections/hash/table.rs
@@ -772,7 +772,7 @@ impl<K, V> RawTable<K, V> {
     unsafe fn new_uninitialized(capacity: usize) -> RawTable<K, V> {
         match Self::try_new_uninitialized(capacity) {
             Err(CollectionAllocErr::CapacityOverflow) => panic!("capacity overflow"),
-            Err(CollectionAllocErr::AllocErr(e)) => Global.oom(e),
+            Err(CollectionAllocErr::AllocErr(_)) => Global.oom(),
             Ok(table) => { table }
         }
     }
@@ -811,7 +811,7 @@ impl<K, V> RawTable<K, V> {
     pub fn new(capacity: usize) -> RawTable<K, V> {
         match Self::try_new(capacity) {
             Err(CollectionAllocErr::CapacityOverflow) => panic!("capacity overflow"),
-            Err(CollectionAllocErr::AllocErr(e)) => Global.oom(e),
+            Err(CollectionAllocErr::AllocErr(_)) => Global.oom(),
             Ok(table) => { table }
         }
     }
diff --git a/src/test/run-pass/allocator-alloc-one.rs b/src/test/run-pass/allocator-alloc-one.rs
index eaa5bc9080576..38b8ab50cc713 100644
--- a/src/test/run-pass/allocator-alloc-one.rs
+++ b/src/test/run-pass/allocator-alloc-one.rs
@@ -14,8 +14,8 @@ use std::heap::{Heap, Alloc};
 
 fn main() {
     unsafe {
-        let ptr = Heap.alloc_one::<i32>().unwrap_or_else(|e| {
-            Heap.oom(e)
+        let ptr = Heap.alloc_one::<i32>().unwrap_or_else(|_| {
+            Heap.oom()
         });
         *ptr.as_ptr() = 4;
         assert_eq!(*ptr.as_ptr(), 4);
diff --git a/src/test/run-pass/realloc-16687.rs b/src/test/run-pass/realloc-16687.rs
index eddcd5a584a5d..a562165d21b7d 100644
--- a/src/test/run-pass/realloc-16687.rs
+++ b/src/test/run-pass/realloc-16687.rs
@@ -50,7 +50,7 @@ unsafe fn test_triangle() -> bool {
             println!("allocate({:?})", layout);
         }
 
-        let ret = Heap.alloc(layout.clone()).unwrap_or_else(|e| Heap.oom(e));
+        let ret = Heap.alloc(layout.clone()).unwrap_or_else(|_| Heap.oom());
 
         if PRINT {
             println!("allocate({:?}) = {:?}", layout, ret);
@@ -73,7 +73,7 @@ unsafe fn test_triangle() -> bool {
         }
 
         let ret = Heap.realloc(ptr, old.clone(), new.clone())
-            .unwrap_or_else(|e| Heap.oom(e));
+            .unwrap_or_else(|_| Heap.oom());
 
         if PRINT {
             println!("reallocate({:?}, old={:?}, new={:?}) = {:?}",
diff --git a/src/test/run-pass/regions-mock-trans.rs b/src/test/run-pass/regions-mock-trans.rs
index 8f278a315d1af..7d34b8fd00fc8 100644
--- a/src/test/run-pass/regions-mock-trans.rs
+++ b/src/test/run-pass/regions-mock-trans.rs
@@ -32,7 +32,7 @@ struct Ccx {
 fn alloc<'a>(_bcx : &'a arena) -> &'a Bcx<'a> {
     unsafe {
         let ptr = Heap.alloc(Layout::new::<Bcx>())
-            .unwrap_or_else(|e| Heap.oom(e));
+            .unwrap_or_else(|_| Heap.oom());
         &*(ptr as *const _)
     }
 }

From 93a9ad4897e560ccd5ebc3397afb7d83d990ef42 Mon Sep 17 00:00:00 2001
From: Simon Sapin <simon.sapin@exyr.org>
Date: Tue, 3 Apr 2018 16:01:29 +0200
Subject: [PATCH 15/27] Remove the now-unit-struct AllocErr field inside
 CollectionAllocErr

---
 src/liballoc/raw_vec.rs              |  4 ++--
 src/liballoc/tests/string.rs         | 12 ++++++------
 src/liballoc/tests/vec.rs            | 16 ++++++++--------
 src/liballoc/tests/vec_deque.rs      | 12 ++++++------
 src/libcore/alloc.rs                 |  6 +++---
 src/libstd/collections/hash/map.rs   |  4 ++--
 src/libstd/collections/hash/table.rs |  4 ++--
 7 files changed, 29 insertions(+), 29 deletions(-)

diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs
index 25d759764a5a6..d7c30925f1a6a 100644
--- a/src/liballoc/raw_vec.rs
+++ b/src/liballoc/raw_vec.rs
@@ -444,7 +444,7 @@ impl<T, A: Alloc> RawVec<T, A> {
     pub fn reserve_exact(&mut self, used_cap: usize, needed_extra_cap: usize) {
         match self.try_reserve_exact(used_cap, needed_extra_cap) {
             Err(CapacityOverflow) => panic!("capacity overflow"),
-            Err(AllocErr(_)) => self.a.oom(),
+            Err(AllocErr) => self.a.oom(),
             Ok(()) => { /* yay */ }
          }
      }
@@ -554,7 +554,7 @@ impl<T, A: Alloc> RawVec<T, A> {
     pub fn reserve(&mut self, used_cap: usize, needed_extra_cap: usize) {
         match self.try_reserve(used_cap, needed_extra_cap) {
             Err(CapacityOverflow) => panic!("capacity overflow"),
-            Err(AllocErr(_)) => self.a.oom(),
+            Err(AllocErr) => self.a.oom(),
             Ok(()) => { /* yay */ }
          }
      }
diff --git a/src/liballoc/tests/string.rs b/src/liballoc/tests/string.rs
index 17d53e4cf3e09..befb36baeef1e 100644
--- a/src/liballoc/tests/string.rs
+++ b/src/liballoc/tests/string.rs
@@ -575,11 +575,11 @@ fn test_try_reserve() {
             } else { panic!("usize::MAX should trigger an overflow!") }
         } else {
             // Check isize::MAX + 1 is an OOM
-            if let Err(AllocErr(_)) = empty_string.try_reserve(MAX_CAP + 1) {
+            if let Err(AllocErr) = empty_string.try_reserve(MAX_CAP + 1) {
             } else { panic!("isize::MAX + 1 should trigger an OOM!") }
 
             // Check usize::MAX is an OOM
-            if let Err(AllocErr(_)) = empty_string.try_reserve(MAX_USIZE) {
+            if let Err(AllocErr) = empty_string.try_reserve(MAX_USIZE) {
             } else { panic!("usize::MAX should trigger an OOM!") }
         }
     }
@@ -599,7 +599,7 @@ fn test_try_reserve() {
             if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_CAP - 9) {
             } else { panic!("isize::MAX + 1 should trigger an overflow!"); }
         } else {
-            if let Err(AllocErr(_)) = ten_bytes.try_reserve(MAX_CAP - 9) {
+            if let Err(AllocErr) = ten_bytes.try_reserve(MAX_CAP - 9) {
             } else { panic!("isize::MAX + 1 should trigger an OOM!") }
         }
         // Should always overflow in the add-to-len
@@ -637,10 +637,10 @@ fn test_try_reserve_exact() {
             if let Err(CapacityOverflow) = empty_string.try_reserve_exact(MAX_USIZE) {
             } else { panic!("usize::MAX should trigger an overflow!") }
         } else {
-            if let Err(AllocErr(_)) = empty_string.try_reserve_exact(MAX_CAP + 1) {
+            if let Err(AllocErr) = empty_string.try_reserve_exact(MAX_CAP + 1) {
             } else { panic!("isize::MAX + 1 should trigger an OOM!") }
 
-            if let Err(AllocErr(_)) = empty_string.try_reserve_exact(MAX_USIZE) {
+            if let Err(AllocErr) = empty_string.try_reserve_exact(MAX_USIZE) {
             } else { panic!("usize::MAX should trigger an OOM!") }
         }
     }
@@ -659,7 +659,7 @@ fn test_try_reserve_exact() {
             if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_CAP - 9) {
             } else { panic!("isize::MAX + 1 should trigger an overflow!"); }
         } else {
-            if let Err(AllocErr(_)) = ten_bytes.try_reserve_exact(MAX_CAP - 9) {
+            if let Err(AllocErr) = ten_bytes.try_reserve_exact(MAX_CAP - 9) {
             } else { panic!("isize::MAX + 1 should trigger an OOM!") }
         }
         if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_USIZE) {
diff --git a/src/liballoc/tests/vec.rs b/src/liballoc/tests/vec.rs
index 2895c53009d9a..e329b45a6175d 100644
--- a/src/liballoc/tests/vec.rs
+++ b/src/liballoc/tests/vec.rs
@@ -1016,11 +1016,11 @@ fn test_try_reserve() {
             } else { panic!("usize::MAX should trigger an overflow!") }
         } else {
             // Check isize::MAX + 1 is an OOM
-            if let Err(AllocErr(_)) = empty_bytes.try_reserve(MAX_CAP + 1) {
+            if let Err(AllocErr) = empty_bytes.try_reserve(MAX_CAP + 1) {
             } else { panic!("isize::MAX + 1 should trigger an OOM!") }
 
             // Check usize::MAX is an OOM
-            if let Err(AllocErr(_)) = empty_bytes.try_reserve(MAX_USIZE) {
+            if let Err(AllocErr) = empty_bytes.try_reserve(MAX_USIZE) {
             } else { panic!("usize::MAX should trigger an OOM!") }
         }
     }
@@ -1040,7 +1040,7 @@ fn test_try_reserve() {
             if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_CAP - 9) {
             } else { panic!("isize::MAX + 1 should trigger an overflow!"); }
         } else {
-            if let Err(AllocErr(_)) = ten_bytes.try_reserve(MAX_CAP - 9) {
+            if let Err(AllocErr) = ten_bytes.try_reserve(MAX_CAP - 9) {
             } else { panic!("isize::MAX + 1 should trigger an OOM!") }
         }
         // Should always overflow in the add-to-len
@@ -1063,7 +1063,7 @@ fn test_try_reserve() {
             if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_CAP/4 - 9) {
             } else { panic!("isize::MAX + 1 should trigger an overflow!"); }
         } else {
-            if let Err(AllocErr(_)) = ten_u32s.try_reserve(MAX_CAP/4 - 9) {
+            if let Err(AllocErr) = ten_u32s.try_reserve(MAX_CAP/4 - 9) {
             } else { panic!("isize::MAX + 1 should trigger an OOM!") }
         }
         // Should fail in the mul-by-size
@@ -1103,10 +1103,10 @@ fn test_try_reserve_exact() {
             if let Err(CapacityOverflow) = empty_bytes.try_reserve_exact(MAX_USIZE) {
             } else { panic!("usize::MAX should trigger an overflow!") }
         } else {
-            if let Err(AllocErr(_)) = empty_bytes.try_reserve_exact(MAX_CAP + 1) {
+            if let Err(AllocErr) = empty_bytes.try_reserve_exact(MAX_CAP + 1) {
             } else { panic!("isize::MAX + 1 should trigger an OOM!") }
 
-            if let Err(AllocErr(_)) = empty_bytes.try_reserve_exact(MAX_USIZE) {
+            if let Err(AllocErr) = empty_bytes.try_reserve_exact(MAX_USIZE) {
             } else { panic!("usize::MAX should trigger an OOM!") }
         }
     }
@@ -1125,7 +1125,7 @@ fn test_try_reserve_exact() {
             if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_CAP - 9) {
             } else { panic!("isize::MAX + 1 should trigger an overflow!"); }
         } else {
-            if let Err(AllocErr(_)) = ten_bytes.try_reserve_exact(MAX_CAP - 9) {
+            if let Err(AllocErr) = ten_bytes.try_reserve_exact(MAX_CAP - 9) {
             } else { panic!("isize::MAX + 1 should trigger an OOM!") }
         }
         if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_USIZE) {
@@ -1146,7 +1146,7 @@ fn test_try_reserve_exact() {
             if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_CAP/4 - 9) {
             } else { panic!("isize::MAX + 1 should trigger an overflow!"); }
         } else {
-            if let Err(AllocErr(_)) = ten_u32s.try_reserve_exact(MAX_CAP/4 - 9) {
+            if let Err(AllocErr) = ten_u32s.try_reserve_exact(MAX_CAP/4 - 9) {
             } else { panic!("isize::MAX + 1 should trigger an OOM!") }
         }
         if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_USIZE - 20) {
diff --git a/src/liballoc/tests/vec_deque.rs b/src/liballoc/tests/vec_deque.rs
index 75d3f01f8b601..4d55584e2f4df 100644
--- a/src/liballoc/tests/vec_deque.rs
+++ b/src/liballoc/tests/vec_deque.rs
@@ -1073,7 +1073,7 @@ fn test_try_reserve() {
             // VecDeque starts with capacity 7, always adds 1 to the capacity
             // and also rounds the number to next power of 2 so this is the
             // furthest we can go without triggering CapacityOverflow
-            if let Err(AllocErr(_)) = empty_bytes.try_reserve(MAX_CAP) {
+            if let Err(AllocErr) = empty_bytes.try_reserve(MAX_CAP) {
             } else { panic!("isize::MAX + 1 should trigger an OOM!") }
         }
     }
@@ -1093,7 +1093,7 @@ fn test_try_reserve() {
             if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_CAP - 9) {
             } else { panic!("isize::MAX + 1 should trigger an overflow!"); }
         } else {
-            if let Err(AllocErr(_)) = ten_bytes.try_reserve(MAX_CAP - 9) {
+            if let Err(AllocErr) = ten_bytes.try_reserve(MAX_CAP - 9) {
             } else { panic!("isize::MAX + 1 should trigger an OOM!") }
         }
         // Should always overflow in the add-to-len
@@ -1116,7 +1116,7 @@ fn test_try_reserve() {
             if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_CAP/4 - 9) {
             } else { panic!("isize::MAX + 1 should trigger an overflow!"); }
         } else {
-            if let Err(AllocErr(_)) = ten_u32s.try_reserve(MAX_CAP/4 - 9) {
+            if let Err(AllocErr) = ten_u32s.try_reserve(MAX_CAP/4 - 9) {
             } else { panic!("isize::MAX + 1 should trigger an OOM!") }
         }
         // Should fail in the mul-by-size
@@ -1160,7 +1160,7 @@ fn test_try_reserve_exact() {
             // VecDeque starts with capacity 7, always adds 1 to the capacity
             // and also rounds the number to next power of 2 so this is the
             // furthest we can go without triggering CapacityOverflow
-            if let Err(AllocErr(_)) = empty_bytes.try_reserve_exact(MAX_CAP) {
+            if let Err(AllocErr) = empty_bytes.try_reserve_exact(MAX_CAP) {
             } else { panic!("isize::MAX + 1 should trigger an OOM!") }
         }
     }
@@ -1179,7 +1179,7 @@ fn test_try_reserve_exact() {
             if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_CAP - 9) {
             } else { panic!("isize::MAX + 1 should trigger an overflow!"); }
         } else {
-            if let Err(AllocErr(_)) = ten_bytes.try_reserve_exact(MAX_CAP - 9) {
+            if let Err(AllocErr) = ten_bytes.try_reserve_exact(MAX_CAP - 9) {
             } else { panic!("isize::MAX + 1 should trigger an OOM!") }
         }
         if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_USIZE) {
@@ -1200,7 +1200,7 @@ fn test_try_reserve_exact() {
             if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_CAP/4 - 9) {
             } else { panic!("isize::MAX + 1 should trigger an overflow!"); }
         } else {
-            if let Err(AllocErr(_)) = ten_u32s.try_reserve_exact(MAX_CAP/4 - 9) {
+            if let Err(AllocErr) = ten_u32s.try_reserve_exact(MAX_CAP/4 - 9) {
             } else { panic!("isize::MAX + 1 should trigger an OOM!") }
         }
         if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_USIZE - 20) {
diff --git a/src/libcore/alloc.rs b/src/libcore/alloc.rs
index 1ba4c641065ff..23532c6172189 100644
--- a/src/libcore/alloc.rs
+++ b/src/libcore/alloc.rs
@@ -356,13 +356,13 @@ pub enum CollectionAllocErr {
     /// (usually `isize::MAX` bytes).
     CapacityOverflow,
     /// Error due to the allocator (see the `AllocErr` type's docs).
-    AllocErr(AllocErr),
+    AllocErr,
 }
 
 #[unstable(feature = "try_reserve", reason = "new API", issue="48043")]
 impl From<AllocErr> for CollectionAllocErr {
-    fn from(err: AllocErr) -> Self {
-        CollectionAllocErr::AllocErr(err)
+    fn from(AllocErr: AllocErr) -> Self {
+        CollectionAllocErr::AllocErr
     }
 }
 
diff --git a/src/libstd/collections/hash/map.rs b/src/libstd/collections/hash/map.rs
index 2a00241afc603..20a4f9b508d24 100644
--- a/src/libstd/collections/hash/map.rs
+++ b/src/libstd/collections/hash/map.rs
@@ -784,7 +784,7 @@ impl<K, V, S> HashMap<K, V, S>
     pub fn reserve(&mut self, additional: usize) {
         match self.try_reserve(additional) {
             Err(CollectionAllocErr::CapacityOverflow) => panic!("capacity overflow"),
-            Err(CollectionAllocErr::AllocErr(_)) => Global.oom(),
+            Err(CollectionAllocErr::AllocErr) => Global.oom(),
             Ok(()) => { /* yay */ }
          }
     }
@@ -3634,7 +3634,7 @@ mod test_map {
             if let Err(CapacityOverflow) = empty_bytes.try_reserve(max_no_ovf) {
             } else { panic!("isize::MAX + 1 should trigger a CapacityOverflow!") }
         } else {
-            if let Err(AllocErr(_)) = empty_bytes.try_reserve(max_no_ovf) {
+            if let Err(AllocErr) = empty_bytes.try_reserve(max_no_ovf) {
             } else { panic!("isize::MAX + 1 should trigger an OOM!") }
         }
     }
diff --git a/src/libstd/collections/hash/table.rs b/src/libstd/collections/hash/table.rs
index fcc2eb8fef2bb..e9bdd4e7d07ea 100644
--- a/src/libstd/collections/hash/table.rs
+++ b/src/libstd/collections/hash/table.rs
@@ -772,7 +772,7 @@ impl<K, V> RawTable<K, V> {
     unsafe fn new_uninitialized(capacity: usize) -> RawTable<K, V> {
         match Self::try_new_uninitialized(capacity) {
             Err(CollectionAllocErr::CapacityOverflow) => panic!("capacity overflow"),
-            Err(CollectionAllocErr::AllocErr(_)) => Global.oom(),
+            Err(CollectionAllocErr::AllocErr) => Global.oom(),
             Ok(table) => { table }
         }
     }
@@ -811,7 +811,7 @@ impl<K, V> RawTable<K, V> {
     pub fn new(capacity: usize) -> RawTable<K, V> {
         match Self::try_new(capacity) {
             Err(CollectionAllocErr::CapacityOverflow) => panic!("capacity overflow"),
-            Err(CollectionAllocErr::AllocErr(_)) => Global.oom(),
+            Err(CollectionAllocErr::AllocErr) => Global.oom(),
             Ok(table) => { table }
         }
     }

From f9c96d70bd1471f662aa2ffdfe30ab6df69629d1 Mon Sep 17 00:00:00 2001
From: Simon Sapin <simon.sapin@exyr.org>
Date: Wed, 4 Apr 2018 12:10:34 +0200
Subject: [PATCH 16/27] Add FIXME comments for Void::null_mut usage

---
 src/liballoc_system/lib.rs | 1 +
 1 file changed, 1 insertion(+)

diff --git a/src/liballoc_system/lib.rs b/src/liballoc_system/lib.rs
index 5e6b3b5ca1129..48a6c1e150a55 100644
--- a/src/liballoc_system/lib.rs
+++ b/src/liballoc_system/lib.rs
@@ -309,6 +309,7 @@ mod platform {
         let mut out = ptr::null_mut();
         let ret = libc::posix_memalign(&mut out, layout.align(), layout.size());
         if ret != 0 {
+            // FIXME: use Void::null_mut https://github.com/rust-lang/rust/issues/49659
             0 as *mut Void
         } else {
             out as *mut Void

From b017742136a5d02b6ba0f2080e97d18a8bfeba4b Mon Sep 17 00:00:00 2001
From: Simon Sapin <simon.sapin@exyr.org>
Date: Wed, 4 Apr 2018 16:03:46 +0200
Subject: [PATCH 17/27] Return Result instead of Option in alloc::Layout
 constructors

---
 src/liballoc/raw_vec.rs              |  4 +-
 src/libcore/alloc.rs                 | 63 ++++++++++++++++++----------
 src/libstd/collections/hash/table.rs |  2 +-
 src/libstd/error.rs                  | 11 ++++-
 4 files changed, 54 insertions(+), 26 deletions(-)

diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs
index d7c30925f1a6a..18aaf1de08e9c 100644
--- a/src/liballoc/raw_vec.rs
+++ b/src/liballoc/raw_vec.rs
@@ -422,7 +422,7 @@ impl<T, A: Alloc> RawVec<T, A> {
 
             // Nothing we can really do about these checks :(
             let new_cap = used_cap.checked_add(needed_extra_cap).ok_or(CapacityOverflow)?;
-            let new_layout = Layout::array::<T>(new_cap).ok_or(CapacityOverflow)?;
+            let new_layout = Layout::array::<T>(new_cap).map_err(|_| CapacityOverflow)?;
 
             alloc_guard(new_layout.size())?;
 
@@ -530,7 +530,7 @@ impl<T, A: Alloc> RawVec<T, A> {
             }
 
             let new_cap = self.amortized_new_size(used_cap, needed_extra_cap)?;
-            let new_layout = Layout::array::<T>(new_cap).ok_or(CapacityOverflow)?;
+            let new_layout = Layout::array::<T>(new_cap).map_err(|_| CapacityOverflow)?;
 
              // FIXME: may crash and burn on over-reserve
             alloc_guard(new_layout.size())?;
diff --git a/src/libcore/alloc.rs b/src/libcore/alloc.rs
index 23532c6172189..0acaf54e0d92b 100644
--- a/src/libcore/alloc.rs
+++ b/src/libcore/alloc.rs
@@ -94,9 +94,9 @@ impl Layout {
     ///    must not overflow (i.e. the rounded value must be less than
     ///    `usize::MAX`).
     #[inline]
-    pub fn from_size_align(size: usize, align: usize) -> Option<Layout> {
+    pub fn from_size_align(size: usize, align: usize) -> Result<Self, LayoutErr> {
         if !align.is_power_of_two() {
-            return None;
+            return Err(LayoutErr { private: () });
         }
 
         // (power-of-two implies align != 0.)
@@ -114,11 +114,11 @@ impl Layout {
         // Above implies that checking for summation overflow is both
         // necessary and sufficient.
         if size > usize::MAX - (align - 1) {
-            return None;
+            return Err(LayoutErr { private: () });
         }
 
         unsafe {
-            Some(Layout::from_size_align_unchecked(size, align))
+            Ok(Layout::from_size_align_unchecked(size, align))
         }
     }
 
@@ -130,7 +130,7 @@ impl Layout {
     /// a power-of-two nor `size` aligned to `align` fits within the
     /// address space (i.e. the `Layout::from_size_align` preconditions).
     #[inline]
-    pub unsafe fn from_size_align_unchecked(size: usize, align: usize) -> Layout {
+    pub unsafe fn from_size_align_unchecked(size: usize, align: usize) -> Self {
         Layout { size: size, align: align }
     }
 
@@ -229,15 +229,17 @@ impl Layout {
     ///
     /// On arithmetic overflow, returns `None`.
     #[inline]
-    pub fn repeat(&self, n: usize) -> Option<(Self, usize)> {
-        let padded_size = self.size.checked_add(self.padding_needed_for(self.align))?;
-        let alloc_size = padded_size.checked_mul(n)?;
+    pub fn repeat(&self, n: usize) -> Result<(Self, usize), LayoutErr> {
+        let padded_size = self.size.checked_add(self.padding_needed_for(self.align))
+            .ok_or(LayoutErr { private: () })?;
+        let alloc_size = padded_size.checked_mul(n)
+            .ok_or(LayoutErr { private: () })?;
 
         // We can assume that `self.align` is a power-of-two.
         // Furthermore, `alloc_size` has already been rounded up
         // to a multiple of `self.align`; therefore, the call to
         // `Layout::from_size_align` below should never panic.
-        Some((Layout::from_size_align(alloc_size, self.align).unwrap(), padded_size))
+        Ok((Layout::from_size_align(alloc_size, self.align).unwrap(), padded_size))
     }
 
     /// Creates a layout describing the record for `self` followed by
@@ -251,17 +253,19 @@ impl Layout {
     /// (assuming that the record itself starts at offset 0).
     ///
     /// On arithmetic overflow, returns `None`.
-    pub fn extend(&self, next: Self) -> Option<(Self, usize)> {
+    pub fn extend(&self, next: Self) -> Result<(Self, usize), LayoutErr> {
         let new_align = cmp::max(self.align, next.align);
         let realigned = Layout::from_size_align(self.size, new_align)?;
 
         let pad = realigned.padding_needed_for(next.align);
 
-        let offset = self.size.checked_add(pad)?;
-        let new_size = offset.checked_add(next.size)?;
+        let offset = self.size.checked_add(pad)
+            .ok_or(LayoutErr { private: () })?;
+        let new_size = offset.checked_add(next.size)
+            .ok_or(LayoutErr { private: () })?;
 
         let layout = Layout::from_size_align(new_size, new_align)?;
-        Some((layout, offset))
+        Ok((layout, offset))
     }
 
     /// Creates a layout describing the record for `n` instances of
@@ -276,8 +280,8 @@ impl Layout {
     /// aligned.
     ///
     /// On arithmetic overflow, returns `None`.
-    pub fn repeat_packed(&self, n: usize) -> Option<Self> {
-        let size = self.size().checked_mul(n)?;
+    pub fn repeat_packed(&self, n: usize) -> Result<Self, LayoutErr> {
+        let size = self.size().checked_mul(n).ok_or(LayoutErr { private: () })?;
         Layout::from_size_align(size, self.align)
     }
 
@@ -296,16 +300,17 @@ impl Layout {
     ///  `extend`.)
     ///
     /// On arithmetic overflow, returns `None`.
-    pub fn extend_packed(&self, next: Self) -> Option<(Self, usize)> {
-        let new_size = self.size().checked_add(next.size())?;
+    pub fn extend_packed(&self, next: Self) -> Result<(Self, usize), LayoutErr> {
+        let new_size = self.size().checked_add(next.size())
+            .ok_or(LayoutErr { private: () })?;
         let layout = Layout::from_size_align(new_size, self.align)?;
-        Some((layout, self.size()))
+        Ok((layout, self.size()))
     }
 
     /// Creates a layout describing the record for a `[T; n]`.
     ///
     /// On arithmetic overflow, returns `None`.
-    pub fn array<T>(n: usize) -> Option<Self> {
+    pub fn array<T>(n: usize) -> Result<Self, LayoutErr> {
         Layout::new::<T>()
             .repeat(n)
             .map(|(k, offs)| {
@@ -315,6 +320,20 @@ impl Layout {
     }
 }
 
+/// The parameters given to `Layout::from_size_align` do not satisfy
+/// its documented constraints.
+#[derive(Clone, PartialEq, Eq, Debug)]
+pub struct LayoutErr {
+    private: ()
+}
+
+// (we need this for downstream impl of trait Error)
+impl fmt::Display for LayoutErr {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        f.write_str("invalid parameters to Layout::from_size_align")
+    }
+}
+
 /// The `AllocErr` error specifies whether an allocation failure is
 /// specifically due to resource exhaustion or if it is due to
 /// something wrong when combining the given input arguments with this
@@ -990,7 +1009,7 @@ pub unsafe trait Alloc {
         where Self: Sized
     {
         match Layout::array::<T>(n) {
-            Some(ref layout) if layout.size() > 0 => {
+            Ok(ref layout) if layout.size() > 0 => {
                 unsafe {
                     self.alloc(layout.clone())
                         .map(|p| {
@@ -1041,7 +1060,7 @@ pub unsafe trait Alloc {
         where Self: Sized
     {
         match (Layout::array::<T>(n_old), Layout::array::<T>(n_new), ptr.as_ptr()) {
-            (Some(ref k_old), Some(ref k_new), ptr) if k_old.size() > 0 && k_new.size() > 0 => {
+            (Ok(ref k_old), Ok(ref k_new), ptr) if k_old.size() > 0 && k_new.size() > 0 => {
                 self.realloc(ptr as *mut u8, k_old.clone(), k_new.clone())
                     .map(|p| NonNull::new_unchecked(p as *mut T))
             }
@@ -1076,7 +1095,7 @@ pub unsafe trait Alloc {
     {
         let raw_ptr = ptr.as_ptr() as *mut u8;
         match Layout::array::<T>(n) {
-            Some(ref k) if k.size() > 0 => {
+            Ok(ref k) if k.size() > 0 => {
                 Ok(self.dealloc(raw_ptr, k.clone()))
             }
             _ => {
diff --git a/src/libstd/collections/hash/table.rs b/src/libstd/collections/hash/table.rs
index e9bdd4e7d07ea..502637051434e 100644
--- a/src/libstd/collections/hash/table.rs
+++ b/src/libstd/collections/hash/table.rs
@@ -755,7 +755,7 @@ impl<K, V> RawTable<K, V> {
         }
 
         let buffer = Global.alloc(Layout::from_size_align(size, alignment)
-            .ok_or(CollectionAllocErr::CapacityOverflow)?)?;
+            .map_err(|_| CollectionAllocErr::CapacityOverflow)?)?;
 
         let hashes = buffer as *mut HashUint;
 
diff --git a/src/libstd/error.rs b/src/libstd/error.rs
index ec55a3c021a80..3c209928d432a 100644
--- a/src/libstd/error.rs
+++ b/src/libstd/error.rs
@@ -57,7 +57,7 @@ use cell;
 use char;
 use core::array;
 use fmt::{self, Debug, Display};
-use heap::{AllocErr, CannotReallocInPlace};
+use heap::{AllocErr, LayoutErr, CannotReallocInPlace};
 use mem::transmute;
 use num;
 use str;
@@ -247,6 +247,15 @@ impl Error for AllocErr {
     }
 }
 
+#[unstable(feature = "allocator_api",
+           reason = "the precise API and guarantees it provides may be tweaked.",
+           issue = "32838")]
+impl Error for LayoutErr {
+    fn description(&self) -> &str {
+        "invalid parameters to Layout::from_size_align"
+    }
+}
+
 #[unstable(feature = "allocator_api",
            reason = "the precise API and guarantees it provides may be tweaked.",
            issue = "32838")]

From c957e99b305ecee113442a7ce0edd6b565200ca9 Mon Sep 17 00:00:00 2001
From: Simon Sapin <simon.sapin@exyr.org>
Date: Wed, 4 Apr 2018 17:19:16 +0200
Subject: [PATCH 18/27] realloc with a new size only, not a full new layout.

Changing the alignment with realloc is not supported.
---
 src/liballoc/alloc.rs      | 22 ++++------
 src/liballoc/heap.rs       |  8 ++--
 src/liballoc/raw_vec.rs    | 17 ++++----
 src/liballoc_system/lib.rs | 42 ++++++++----------
 src/libcore/alloc.rs       | 87 ++++++++++++++++----------------------
 5 files changed, 74 insertions(+), 102 deletions(-)

diff --git a/src/liballoc/alloc.rs b/src/liballoc/alloc.rs
index a7b5864016c2b..a6fc8d5004c29 100644
--- a/src/liballoc/alloc.rs
+++ b/src/liballoc/alloc.rs
@@ -91,21 +91,17 @@ unsafe impl Alloc for Global {
     unsafe fn realloc(&mut self,
                       ptr: *mut u8,
                       layout: Layout,
-                      new_layout: Layout)
+                      new_size: usize)
                       -> Result<*mut u8, AllocErr>
     {
-        if layout.align() == new_layout.align() {
-            #[cfg(not(stage0))]
-            let ptr = __rust_realloc(ptr, layout.size(), layout.align(), new_layout.size());
-            #[cfg(stage0)]
-            let ptr = __rust_realloc(ptr, layout.size(), layout.align(),
-                                     new_layout.size(), new_layout.align(), &mut 0);
-
-            if !ptr.is_null() {
-                Ok(ptr)
-            } else {
-                Err(AllocErr)
-            }
+        #[cfg(not(stage0))]
+        let ptr = __rust_realloc(ptr, layout.size(), layout.align(), new_size);
+        #[cfg(stage0)]
+        let ptr = __rust_realloc(ptr, layout.size(), layout.align(),
+                                 new_size, layout.align(), &mut 0);
+
+        if !ptr.is_null() {
+            Ok(ptr)
         } else {
             Err(AllocErr)
         }
diff --git a/src/liballoc/heap.rs b/src/liballoc/heap.rs
index 765fb8458d11d..e79383331e180 100644
--- a/src/liballoc/heap.rs
+++ b/src/liballoc/heap.rs
@@ -64,7 +64,7 @@ unsafe impl<T> Alloc for T where T: CoreAlloc {
                       ptr: *mut u8,
                       layout: Layout,
                       new_layout: Layout) -> Result<*mut u8, AllocErr> {
-        CoreAlloc::realloc(self, ptr, layout, new_layout)
+        CoreAlloc::realloc(self, ptr, layout, new_layout.size())
     }
 
     unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
@@ -79,20 +79,20 @@ unsafe impl<T> Alloc for T where T: CoreAlloc {
                              ptr: *mut u8,
                              layout: Layout,
                              new_layout: Layout) -> Result<Excess, AllocErr> {
-        CoreAlloc::realloc_excess(self, ptr, layout, new_layout)
+        CoreAlloc::realloc_excess(self, ptr, layout, new_layout.size())
     }
 
     unsafe fn grow_in_place(&mut self,
                             ptr: *mut u8,
                             layout: Layout,
                             new_layout: Layout) -> Result<(), CannotReallocInPlace> {
-        CoreAlloc::grow_in_place(self, ptr, layout, new_layout)
+        CoreAlloc::grow_in_place(self, ptr, layout, new_layout.size())
     }
 
     unsafe fn shrink_in_place(&mut self,
                               ptr: *mut u8,
                               layout: Layout,
                               new_layout: Layout) -> Result<(), CannotReallocInPlace> {
-        CoreAlloc::shrink_in_place(self, ptr, layout, new_layout)
+        CoreAlloc::shrink_in_place(self, ptr, layout, new_layout.size())
     }
 }
diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs
index 18aaf1de08e9c..80b816878fb37 100644
--- a/src/liballoc/raw_vec.rs
+++ b/src/liballoc/raw_vec.rs
@@ -309,11 +309,10 @@ impl<T, A: Alloc> RawVec<T, A> {
                     // `from_size_align_unchecked`.
                     let new_cap = 2 * self.cap;
                     let new_size = new_cap * elem_size;
-                    let new_layout = Layout::from_size_align_unchecked(new_size, cur.align());
                     alloc_guard(new_size).expect("capacity overflow");
                     let ptr_res = self.a.realloc(self.ptr.as_ptr() as *mut u8,
                                                  cur,
-                                                 new_layout);
+                                                 new_size);
                     match ptr_res {
                         Ok(ptr) => (new_cap, Unique::new_unchecked(ptr as *mut T)),
                         Err(_) => self.a.oom(),
@@ -371,8 +370,7 @@ impl<T, A: Alloc> RawVec<T, A> {
             let new_size = new_cap * elem_size;
             alloc_guard(new_size).expect("capacity overflow");
             let ptr = self.ptr() as *mut _;
-            let new_layout = Layout::from_size_align_unchecked(new_size, old_layout.align());
-            match self.a.grow_in_place(ptr, old_layout, new_layout) {
+            match self.a.grow_in_place(ptr, old_layout, new_size) {
                 Ok(_) => {
                     // We can't directly divide `size`.
                     self.cap = new_cap;
@@ -428,8 +426,9 @@ impl<T, A: Alloc> RawVec<T, A> {
 
             let res = match self.current_layout() {
                 Some(layout) => {
+                    debug_assert!(new_layout.align() == layout.align());
                     let old_ptr = self.ptr.as_ptr() as *mut u8;
-                    self.a.realloc(old_ptr, layout, new_layout)
+                    self.a.realloc(old_ptr, layout, new_layout.size())
                 }
                 None => self.a.alloc(new_layout),
             };
@@ -537,8 +536,9 @@ impl<T, A: Alloc> RawVec<T, A> {
 
             let res = match self.current_layout() {
                 Some(layout) => {
+                    debug_assert!(new_layout.align() == layout.align());
                     let old_ptr = self.ptr.as_ptr() as *mut u8;
-                    self.a.realloc(old_ptr, layout, new_layout)
+                    self.a.realloc(old_ptr, layout, new_layout.size())
                 }
                 None => self.a.alloc(new_layout),
             };
@@ -604,7 +604,7 @@ impl<T, A: Alloc> RawVec<T, A> {
             let new_layout = Layout::new::<T>().repeat(new_cap).unwrap().0;
             // FIXME: may crash and burn on over-reserve
             alloc_guard(new_layout.size()).expect("capacity overflow");
-            match self.a.grow_in_place(ptr, old_layout, new_layout) {
+            match self.a.grow_in_place(ptr, old_layout, new_layout.size()) {
                 Ok(_) => {
                     self.cap = new_cap;
                     true
@@ -664,10 +664,9 @@ impl<T, A: Alloc> RawVec<T, A> {
                 let new_size = elem_size * amount;
                 let align = mem::align_of::<T>();
                 let old_layout = Layout::from_size_align_unchecked(old_size, align);
-                let new_layout = Layout::from_size_align_unchecked(new_size, align);
                 match self.a.realloc(self.ptr.as_ptr() as *mut u8,
                                      old_layout,
-                                     new_layout) {
+                                     new_size) {
                     Ok(p) => self.ptr = Unique::new_unchecked(p as *mut T),
                     Err(_) => self.a.oom(),
                 }
diff --git a/src/liballoc_system/lib.rs b/src/liballoc_system/lib.rs
index 48a6c1e150a55..7b788a5f9898d 100644
--- a/src/liballoc_system/lib.rs
+++ b/src/liballoc_system/lib.rs
@@ -69,8 +69,8 @@ unsafe impl Alloc for System {
     unsafe fn realloc(&mut self,
                       ptr: *mut u8,
                       old_layout: Layout,
-                      new_layout: Layout) -> Result<*mut u8, AllocErr> {
-        Alloc::realloc(&mut &*self, ptr, old_layout, new_layout)
+                      new_size: usize) -> Result<*mut u8, AllocErr> {
+        Alloc::realloc(&mut &*self, ptr, old_layout, new_size)
     }
 
     fn oom(&mut self) -> ! {
@@ -91,24 +91,24 @@ unsafe impl Alloc for System {
     unsafe fn realloc_excess(&mut self,
                              ptr: *mut u8,
                              layout: Layout,
-                             new_layout: Layout) -> Result<Excess, AllocErr> {
-        Alloc::realloc_excess(&mut &*self, ptr, layout, new_layout)
+                             new_size: usize) -> Result<Excess, AllocErr> {
+        Alloc::realloc_excess(&mut &*self, ptr, layout, new_size)
     }
 
     #[inline]
     unsafe fn grow_in_place(&mut self,
                             ptr: *mut u8,
                             layout: Layout,
-                            new_layout: Layout) -> Result<(), CannotReallocInPlace> {
-        Alloc::grow_in_place(&mut &*self, ptr, layout, new_layout)
+                            new_size: usize) -> Result<(), CannotReallocInPlace> {
+        Alloc::grow_in_place(&mut &*self, ptr, layout, new_size)
     }
 
     #[inline]
     unsafe fn shrink_in_place(&mut self,
                               ptr: *mut u8,
                               layout: Layout,
-                              new_layout: Layout) -> Result<(), CannotReallocInPlace> {
-        Alloc::shrink_in_place(&mut &*self, ptr, layout, new_layout)
+                              new_size: usize) -> Result<(), CannotReallocInPlace> {
+        Alloc::shrink_in_place(&mut &*self, ptr, layout, new_size)
     }
 }
 
@@ -166,12 +166,8 @@ macro_rules! alloc_methods_based_on_global_alloc {
         unsafe fn realloc(&mut self,
                           ptr: *mut u8,
                           old_layout: Layout,
-                          new_layout: Layout) -> Result<*mut u8, AllocErr> {
-            if old_layout.align() != new_layout.align() {
-                return Err(AllocErr)
-            }
-
-            let ptr = GlobalAlloc::realloc(*self, ptr as *mut Void, old_layout, new_layout.size());
+                          new_size: usize) -> Result<*mut u8, AllocErr> {
+            let ptr = GlobalAlloc::realloc(*self, ptr as *mut Void, old_layout, new_size);
             if !ptr.is_null() {
                 Ok(ptr as *mut u8)
             } else {
@@ -428,30 +424,26 @@ mod platform {
         unsafe fn grow_in_place(&mut self,
                                 ptr: *mut u8,
                                 layout: Layout,
-                                new_layout: Layout) -> Result<(), CannotReallocInPlace> {
-            self.shrink_in_place(ptr, layout, new_layout)
+                                new_size: usize) -> Result<(), CannotReallocInPlace> {
+            self.shrink_in_place(ptr, layout, new_size)
         }
 
         #[inline]
         unsafe fn shrink_in_place(&mut self,
                                   ptr: *mut u8,
-                                  old_layout: Layout,
-                                  new_layout: Layout) -> Result<(), CannotReallocInPlace> {
-            if old_layout.align() != new_layout.align() {
-                return Err(CannotReallocInPlace)
-            }
-
-            let new = if new_layout.align() <= MIN_ALIGN {
+                                  layout: Layout,
+                                  new_size: usize) -> Result<(), CannotReallocInPlace> {
+            let new = if layout.align() <= MIN_ALIGN {
                 HeapReAlloc(GetProcessHeap(),
                             HEAP_REALLOC_IN_PLACE_ONLY,
                             ptr as LPVOID,
-                            new_layout.size())
+                            new_size)
             } else {
                 let header = get_header(ptr);
                 HeapReAlloc(GetProcessHeap(),
                             HEAP_REALLOC_IN_PLACE_ONLY,
                             header.0 as LPVOID,
-                            new_layout.size() + new_layout.align())
+                            new_size + layout.align())
             };
             if new.is_null() {
                 Err(CannotReallocInPlace)
diff --git a/src/libcore/alloc.rs b/src/libcore/alloc.rs
index 0acaf54e0d92b..757f06e731ffc 100644
--- a/src/libcore/alloc.rs
+++ b/src/libcore/alloc.rs
@@ -633,9 +633,10 @@ pub unsafe trait Alloc {
     // realloc. alloc_excess, realloc_excess
 
     /// Returns a pointer suitable for holding data described by
-    /// `new_layout`, meeting its size and alignment guarantees. To
+    /// a new layout with `layout`’s alginment and a size given
+    /// by `new_size`. To
     /// accomplish this, this may extend or shrink the allocation
-    /// referenced by `ptr` to fit `new_layout`.
+    /// referenced by `ptr` to fit the new layout.
     ///
     /// If this returns `Ok`, then ownership of the memory block
     /// referenced by `ptr` has been transferred to this
@@ -648,12 +649,6 @@ pub unsafe trait Alloc {
     /// block has not been transferred to this allocator, and the
     /// contents of the memory block are unaltered.
     ///
-    /// For best results, `new_layout` should not impose a different
-    /// alignment constraint than `layout`. (In other words,
-    /// `new_layout.align()` should equal `layout.align()`.) However,
-    /// behavior is well-defined (though underspecified) when this
-    /// constraint is violated; further discussion below.
-    ///
     /// # Safety
     ///
     /// This function is unsafe because undefined behavior can result
@@ -661,12 +656,13 @@ pub unsafe trait Alloc {
     ///
     /// * `ptr` must be currently allocated via this allocator,
     ///
-    /// * `layout` must *fit* the `ptr` (see above). (The `new_layout`
+    /// * `layout` must *fit* the `ptr` (see above). (The `new_size`
     ///   argument need not fit it.)
     ///
-    /// * `new_layout` must have size greater than zero.
+    /// * `new_size` must be greater than zero.
     ///
-    /// * the alignment of `new_layout` is non-zero.
+    /// * `new_size`, when rounded up to the nearest multiple of `layout.align()`,
+    ///   must not overflow (i.e. the rounded value must be less than `usize::MAX`).
     ///
     /// (Extension subtraits might provide more specific bounds on
     /// behavior, e.g. guarantee a sentinel address or a null pointer
@@ -674,18 +670,11 @@ pub unsafe trait Alloc {
     ///
     /// # Errors
     ///
-    /// Returns `Err` only if `new_layout` does not match the
-    /// alignment of `layout`, or does not meet the allocator's size
+    /// Returns `Err` only if the new layout
+    /// does not meet the allocator's size
     /// and alignment constraints of the allocator, or if reallocation
     /// otherwise fails.
     ///
-    /// (Note the previous sentence did not say "if and only if" -- in
-    /// particular, an implementation of this method *can* return `Ok`
-    /// if `new_layout.align() != old_layout.align()`; or it can
-    /// return `Err` in that scenario, depending on whether this
-    /// allocator can dynamically adjust the alignment constraint for
-    /// the block.)
-    ///
     /// Implementations are encouraged to return `Err` on memory
     /// exhaustion rather than panicking or aborting, but this is not
     /// a strict requirement. (Specifically: it is *legal* to
@@ -698,22 +687,21 @@ pub unsafe trait Alloc {
     unsafe fn realloc(&mut self,
                       ptr: *mut u8,
                       layout: Layout,
-                      new_layout: Layout) -> Result<*mut u8, AllocErr> {
-        let new_size = new_layout.size();
+                      new_size: usize) -> Result<*mut u8, AllocErr> {
         let old_size = layout.size();
-        let aligns_match = layout.align == new_layout.align;
 
-        if new_size >= old_size && aligns_match {
-            if let Ok(()) = self.grow_in_place(ptr, layout.clone(), new_layout.clone()) {
+        if new_size >= old_size {
+            if let Ok(()) = self.grow_in_place(ptr, layout.clone(), new_size) {
                 return Ok(ptr);
             }
-        } else if new_size < old_size && aligns_match {
-            if let Ok(()) = self.shrink_in_place(ptr, layout.clone(), new_layout.clone()) {
+        } else if new_size < old_size {
+            if let Ok(()) = self.shrink_in_place(ptr, layout.clone(), new_size) {
                 return Ok(ptr);
             }
         }
 
         // otherwise, fall back on alloc + copy + dealloc.
+        let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
         let result = self.alloc(new_layout);
         if let Ok(new_ptr) = result {
             ptr::copy_nonoverlapping(ptr as *const u8, new_ptr, cmp::min(old_size, new_size));
@@ -789,17 +777,19 @@ pub unsafe trait Alloc {
     unsafe fn realloc_excess(&mut self,
                              ptr: *mut u8,
                              layout: Layout,
-                             new_layout: Layout) -> Result<Excess, AllocErr> {
+                             new_size: usize) -> Result<Excess, AllocErr> {
+        let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
         let usable_size = self.usable_size(&new_layout);
-        self.realloc(ptr, layout, new_layout)
+        self.realloc(ptr, layout, new_size)
             .map(|p| Excess(p, usable_size.1))
     }
 
-    /// Attempts to extend the allocation referenced by `ptr` to fit `new_layout`.
+    /// Attempts to extend the allocation referenced by `ptr` to fit `new_size`.
     ///
     /// If this returns `Ok`, then the allocator has asserted that the
-    /// memory block referenced by `ptr` now fits `new_layout`, and thus can
-    /// be used to carry data of that layout. (The allocator is allowed to
+    /// memory block referenced by `ptr` now fits `new_size`, and thus can
+    /// be used to carry data of a layout of that size and same alignment as
+    /// `layout`. (The allocator is allowed to
     /// expend effort to accomplish this, such as extending the memory block to
     /// include successor blocks, or virtual memory tricks.)
     ///
@@ -815,11 +805,9 @@ pub unsafe trait Alloc {
     /// * `ptr` must be currently allocated via this allocator,
     ///
     /// * `layout` must *fit* the `ptr` (see above); note the
-    ///   `new_layout` argument need not fit it,
+    ///   `new_size` argument need not fit it,
     ///
-    /// * `new_layout.size()` must not be less than `layout.size()`,
-    ///
-    /// * `new_layout.align()` must equal `layout.align()`.
+    /// * `new_size` must not be less than `layout.size()`,
     ///
     /// # Errors
     ///
@@ -834,24 +822,23 @@ pub unsafe trait Alloc {
     unsafe fn grow_in_place(&mut self,
                             ptr: *mut u8,
                             layout: Layout,
-                            new_layout: Layout) -> Result<(), CannotReallocInPlace> {
+                            new_size: usize) -> Result<(), CannotReallocInPlace> {
         let _ = ptr; // this default implementation doesn't care about the actual address.
-        debug_assert!(new_layout.size >= layout.size);
-        debug_assert!(new_layout.align == layout.align);
+        debug_assert!(new_size >= layout.size);
         let (_l, u) = self.usable_size(&layout);
         // _l <= layout.size()                       [guaranteed by usable_size()]
         //       layout.size() <= new_layout.size()  [required by this method]
-        if new_layout.size <= u {
+        if new_size <= u {
             return Ok(());
         } else {
             return Err(CannotReallocInPlace);
         }
     }
 
-    /// Attempts to shrink the allocation referenced by `ptr` to fit `new_layout`.
+    /// Attempts to shrink the allocation referenced by `ptr` to fit `new_size`.
     ///
     /// If this returns `Ok`, then the allocator has asserted that the
-    /// memory block referenced by `ptr` now fits `new_layout`, and
+    /// memory block referenced by `ptr` now fits `new_size`, and
     /// thus can only be used to carry data of that smaller
     /// layout. (The allocator is allowed to take advantage of this,
     /// carving off portions of the block for reuse elsewhere.) The
@@ -872,13 +859,11 @@ pub unsafe trait Alloc {
     /// * `ptr` must be currently allocated via this allocator,
     ///
     /// * `layout` must *fit* the `ptr` (see above); note the
-    ///   `new_layout` argument need not fit it,
+    ///   `new_size` argument need not fit it,
     ///
-    /// * `new_layout.size()` must not be greater than `layout.size()`
+    /// * `new_size` must not be greater than `layout.size()`
     ///   (and must be greater than zero),
     ///
-    /// * `new_layout.align()` must equal `layout.align()`.
-    ///
     /// # Errors
     ///
     /// Returns `Err(CannotReallocInPlace)` when the allocator is
@@ -892,14 +877,13 @@ pub unsafe trait Alloc {
     unsafe fn shrink_in_place(&mut self,
                               ptr: *mut u8,
                               layout: Layout,
-                              new_layout: Layout) -> Result<(), CannotReallocInPlace> {
+                              new_size: usize) -> Result<(), CannotReallocInPlace> {
         let _ = ptr; // this default implementation doesn't care about the actual address.
-        debug_assert!(new_layout.size <= layout.size);
-        debug_assert!(new_layout.align == layout.align);
+        debug_assert!(new_size <= layout.size);
         let (l, _u) = self.usable_size(&layout);
         //                      layout.size() <= _u  [guaranteed by usable_size()]
         // new_layout.size() <= layout.size()        [required by this method]
-        if l <= new_layout.size {
+        if l <= new_size {
             return Ok(());
         } else {
             return Err(CannotReallocInPlace);
@@ -1061,7 +1045,8 @@ pub unsafe trait Alloc {
     {
         match (Layout::array::<T>(n_old), Layout::array::<T>(n_new), ptr.as_ptr()) {
             (Ok(ref k_old), Ok(ref k_new), ptr) if k_old.size() > 0 && k_new.size() > 0 => {
-                self.realloc(ptr as *mut u8, k_old.clone(), k_new.clone())
+                debug_assert!(k_old.align() == k_new.align());
+                self.realloc(ptr as *mut u8, k_old.clone(), k_new.size())
                     .map(|p| NonNull::new_unchecked(p as *mut T))
             }
             _ => {

From 747cc749430d66bd2fca8e81fd8a1c994e36dcf1 Mon Sep 17 00:00:00 2001
From: Simon Sapin <simon.sapin@exyr.org>
Date: Wed, 4 Apr 2018 18:09:39 +0200
Subject: [PATCH 19/27] Conversions between Result<*mut u8, AllocErr>> and *mut
 Void

---
 src/liballoc_system/lib.rs | 21 +++------------------
 src/libcore/alloc.rs       | 21 +++++++++++++++++++++
 2 files changed, 24 insertions(+), 18 deletions(-)

diff --git a/src/liballoc_system/lib.rs b/src/liballoc_system/lib.rs
index 7b788a5f9898d..6ffbd029281c4 100644
--- a/src/liballoc_system/lib.rs
+++ b/src/liballoc_system/lib.rs
@@ -139,22 +139,12 @@ macro_rules! alloc_methods_based_on_global_alloc {
     () => {
         #[inline]
         unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
-            let ptr = GlobalAlloc::alloc(*self, layout);
-            if !ptr.is_null() {
-                Ok(ptr as *mut u8)
-            } else {
-                Err(AllocErr)
-            }
+            GlobalAlloc::alloc(*self, layout).into()
         }
 
         #[inline]
         unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
-            let ptr = GlobalAlloc::alloc_zeroed(*self, layout);
-            if !ptr.is_null() {
-                Ok(ptr as *mut u8)
-            } else {
-                Err(AllocErr)
-            }
+            GlobalAlloc::alloc_zeroed(*self, layout).into()
         }
 
         #[inline]
@@ -167,12 +157,7 @@ macro_rules! alloc_methods_based_on_global_alloc {
                           ptr: *mut u8,
                           old_layout: Layout,
                           new_size: usize) -> Result<*mut u8, AllocErr> {
-            let ptr = GlobalAlloc::realloc(*self, ptr as *mut Void, old_layout, new_size);
-            if !ptr.is_null() {
-                Ok(ptr as *mut u8)
-            } else {
-                Err(AllocErr)
-            }
+            GlobalAlloc::realloc(*self, ptr as *mut Void, old_layout, new_size).into()
         }
     }
 }
diff --git a/src/libcore/alloc.rs b/src/libcore/alloc.rs
index 757f06e731ffc..cfa7df06a40e1 100644
--- a/src/libcore/alloc.rs
+++ b/src/libcore/alloc.rs
@@ -41,6 +41,27 @@ impl Void {
     }
 }
 
+/// Convert from a return value of GlobalAlloc::alloc to that of Alloc::alloc
+impl From<*mut Void> for Result<*mut u8, AllocErr> {
+    fn from(ptr: *mut Void) -> Self {
+        if !ptr.is_null() {
+            Ok(ptr as *mut u8)
+        } else {
+            Err(AllocErr)
+        }
+    }
+}
+
+/// Convert from a return value of Alloc::alloc to that of GlobalAlloc::alloc
+impl From<Result<*mut u8, AllocErr>> for *mut Void {
+    fn from(result: Result<*mut u8, AllocErr>) -> Self {
+        match result {
+            Ok(ptr) => ptr as *mut Void,
+            Err(_) => Void::null_mut(),
+        }
+    }
+}
+
 /// Represents the combination of a starting address and
 /// a total capacity of the returned block.
 #[derive(Debug)]

From c033f1ff5f29ecac41dee668e373c9fc870f2d43 Mon Sep 17 00:00:00 2001
From: Simon Sapin <simon.sapin@exyr.org>
Date: Wed, 4 Apr 2018 18:43:28 +0200
Subject: [PATCH 20/27] Move platform-specific OOM handling to functions

---
 src/liballoc_system/lib.rs | 131 ++++++++++++++++---------------------
 1 file changed, 58 insertions(+), 73 deletions(-)

diff --git a/src/liballoc_system/lib.rs b/src/liballoc_system/lib.rs
index 6ffbd029281c4..8cb5c5d5be9f8 100644
--- a/src/liballoc_system/lib.rs
+++ b/src/liballoc_system/lib.rs
@@ -224,40 +224,7 @@ mod platform {
         alloc_methods_based_on_global_alloc!();
 
         fn oom(&mut self) -> ! {
-            use core::fmt::{self, Write};
-
-            // Print a message to stderr before aborting to assist with
-            // debugging. It is critical that this code does not allocate any
-            // memory since we are in an OOM situation. Any errors are ignored
-            // while printing since there's nothing we can do about them and we
-            // are about to exit anyways.
-            drop(writeln!(Stderr, "fatal runtime error: {}", AllocErr));
-            unsafe {
-                ::core::intrinsics::abort();
-            }
-
-            struct Stderr;
-
-            impl Write for Stderr {
-                #[cfg(target_os = "cloudabi")]
-                fn write_str(&mut self, _: &str) -> fmt::Result {
-                    // CloudABI does not have any reserved file descriptor
-                    // numbers. We should not attempt to write to file
-                    // descriptor #2, as it may be associated with any kind of
-                    // resource.
-                    Ok(())
-                }
-
-                #[cfg(not(target_os = "cloudabi"))]
-                fn write_str(&mut self, s: &str) -> fmt::Result {
-                    unsafe {
-                        libc::write(libc::STDERR_FILENO,
-                                    s.as_ptr() as *const libc::c_void,
-                                    s.len());
-                    }
-                    Ok(())
-                }
-            }
+            ::oom()
         }
     }
 
@@ -301,8 +268,6 @@ mod platform {
 #[cfg(windows)]
 #[allow(bad_style)]
 mod platform {
-    use core::ptr;
-
     use MIN_ALIGN;
     use System;
     use core::alloc::{GlobalAlloc, Alloc, Void, AllocErr, Layout, CannotReallocInPlace};
@@ -312,10 +277,6 @@ mod platform {
     type SIZE_T = usize;
     type DWORD = u32;
     type BOOL = i32;
-    type LPDWORD = *mut DWORD;
-    type LPOVERLAPPED = *mut u8;
-
-    const STD_ERROR_HANDLE: DWORD = -12i32 as DWORD;
 
     extern "system" {
         fn GetProcessHeap() -> HANDLE;
@@ -323,20 +284,12 @@ mod platform {
         fn HeapReAlloc(hHeap: HANDLE, dwFlags: DWORD, lpMem: LPVOID, dwBytes: SIZE_T) -> LPVOID;
         fn HeapFree(hHeap: HANDLE, dwFlags: DWORD, lpMem: LPVOID) -> BOOL;
         fn GetLastError() -> DWORD;
-        fn WriteFile(hFile: HANDLE,
-                     lpBuffer: LPVOID,
-                     nNumberOfBytesToWrite: DWORD,
-                     lpNumberOfBytesWritten: LPDWORD,
-                     lpOverlapped: LPOVERLAPPED)
-                     -> BOOL;
-        fn GetStdHandle(which: DWORD) -> HANDLE;
     }
 
     #[repr(C)]
     struct Header(*mut u8);
 
     const HEAP_ZERO_MEMORY: DWORD = 0x00000008;
-    const HEAP_REALLOC_IN_PLACE_ONLY: DWORD = 0x00000010;
 
     unsafe fn get_header<'a>(ptr: *mut u8) -> &'a mut Header {
         &mut *(ptr as *mut Header).offset(-1)
@@ -438,31 +391,7 @@ mod platform {
         }
 
         fn oom(&mut self) -> ! {
-            use core::fmt::{self, Write};
-
-            // Same as with unix we ignore all errors here
-            drop(writeln!(Stderr, "fatal runtime error: {}", AllocErr));
-            unsafe {
-                ::core::intrinsics::abort();
-            }
-
-            struct Stderr;
-
-            impl Write for Stderr {
-                fn write_str(&mut self, s: &str) -> fmt::Result {
-                    unsafe {
-                        // WriteFile silently fails if it is passed an invalid
-                        // handle, so there is no need to check the result of
-                        // GetStdHandle.
-                        WriteFile(GetStdHandle(STD_ERROR_HANDLE),
-                                  s.as_ptr() as LPVOID,
-                                  s.len() as DWORD,
-                                  ptr::null_mut(),
-                                  ptr::null_mut());
-                    }
-                    Ok(())
-                }
-            }
+            ::oom()
         }
     }
 }
@@ -522,3 +451,59 @@ mod platform {
         alloc_methods_based_on_global_alloc!();
     }
 }
+
+fn oom() -> ! {
+    write_to_stderr("fatal runtime error: memory allocation failed");
+    unsafe {
+        ::core::intrinsics::abort();
+    }
+}
+
+#[cfg(any(unix, target_os = "redox"))]
+fn write_to_stderr(s: &str) {
+    extern crate libc;
+
+    unsafe {
+        libc::write(libc::STDERR_FILENO,
+                    s.as_ptr() as *const libc::c_void,
+                    s.len());
+    }
+}
+
+#[cfg(windows)]
+fn write_to_stderr(s: &str) {
+    use core::ptr;
+
+    type LPVOID = *mut u8;
+    type HANDLE = LPVOID;
+    type DWORD = u32;
+    type BOOL = i32;
+    type LPDWORD = *mut DWORD;
+    type LPOVERLAPPED = *mut u8;
+
+    const STD_ERROR_HANDLE: DWORD = -12i32 as DWORD;
+
+    extern "system" {
+        fn WriteFile(hFile: HANDLE,
+                     lpBuffer: LPVOID,
+                     nNumberOfBytesToWrite: DWORD,
+                     lpNumberOfBytesWritten: LPDWORD,
+                     lpOverlapped: LPOVERLAPPED)
+                     -> BOOL;
+        fn GetStdHandle(which: DWORD) -> HANDLE;
+    }
+
+    unsafe {
+        // WriteFile silently fails if it is passed an invalid
+        // handle, so there is no need to check the result of
+        // GetStdHandle.
+        WriteFile(GetStdHandle(STD_ERROR_HANDLE),
+                  s.as_ptr() as LPVOID,
+                  s.len() as DWORD,
+                  ptr::null_mut(),
+                  ptr::null_mut());
+    }
+}
+
+#[cfg(not(any(windows, unix, target_os = "redox")))]
+fn write_to_stderr(_: &str) {}

From 96c9d225a9667bc5ffcbc1594d44c29b201e999c Mon Sep 17 00:00:00 2001
From: Simon Sapin <simon.sapin@exyr.org>
Date: Wed, 4 Apr 2018 18:50:25 +0200
Subject: [PATCH 21/27] Remove `impl Alloc for &'a System`

This was relevant to `#[global_allocator]`,
which is now based on `GlobalAlloc` trait instead.
---
 src/liballoc_system/lib.rs | 141 ++++++++-----------------------------
 1 file changed, 28 insertions(+), 113 deletions(-)

diff --git a/src/liballoc_system/lib.rs b/src/liballoc_system/lib.rs
index 8cb5c5d5be9f8..4516664e97c55 100644
--- a/src/liballoc_system/lib.rs
+++ b/src/liballoc_system/lib.rs
@@ -41,7 +41,7 @@ const MIN_ALIGN: usize = 8;
 #[allow(dead_code)]
 const MIN_ALIGN: usize = 16;
 
-use core::alloc::{Alloc, AllocErr, Layout, Excess, CannotReallocInPlace};
+use core::alloc::{Alloc, GlobalAlloc, AllocErr, Layout, Void};
 
 #[unstable(feature = "allocator_api", issue = "32838")]
 pub struct System;
@@ -50,19 +50,17 @@ pub struct System;
 unsafe impl Alloc for System {
     #[inline]
     unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
-        Alloc::alloc(&mut &*self, layout)
+        GlobalAlloc::alloc(self, layout).into()
     }
 
     #[inline]
-    unsafe fn alloc_zeroed(&mut self, layout: Layout)
-        -> Result<*mut u8, AllocErr>
-    {
-        Alloc::alloc_zeroed(&mut &*self, layout)
+    unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
+        GlobalAlloc::alloc_zeroed(self, layout).into()
     }
 
     #[inline]
     unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
-        Alloc::dealloc(&mut &*self, ptr, layout)
+        GlobalAlloc::dealloc(self, ptr as *mut Void, layout)
     }
 
     #[inline]
@@ -70,45 +68,44 @@ unsafe impl Alloc for System {
                       ptr: *mut u8,
                       old_layout: Layout,
                       new_size: usize) -> Result<*mut u8, AllocErr> {
-        Alloc::realloc(&mut &*self, ptr, old_layout, new_size)
+        GlobalAlloc::realloc(self, ptr as *mut Void, old_layout, new_size).into()
     }
 
+    #[inline]
     fn oom(&mut self) -> ! {
-        Alloc::oom(&mut &*self)
+        ::oom()
     }
+}
 
+#[cfg(stage0)]
+#[unstable(feature = "allocator_api", issue = "32838")]
+unsafe impl<'a> Alloc for &'a System {
     #[inline]
-    fn usable_size(&self, layout: &Layout) -> (usize, usize) {
-        Alloc::usable_size(&mut &*self, layout)
+    unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
+        GlobalAlloc::alloc(*self, layout).into()
     }
 
     #[inline]
-    unsafe fn alloc_excess(&mut self, layout: Layout) -> Result<Excess, AllocErr> {
-        Alloc::alloc_excess(&mut &*self, layout)
+    unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
+        GlobalAlloc::alloc_zeroed(*self, layout).into()
     }
 
     #[inline]
-    unsafe fn realloc_excess(&mut self,
-                             ptr: *mut u8,
-                             layout: Layout,
-                             new_size: usize) -> Result<Excess, AllocErr> {
-        Alloc::realloc_excess(&mut &*self, ptr, layout, new_size)
+    unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
+        GlobalAlloc::dealloc(*self, ptr as *mut Void, layout)
     }
 
     #[inline]
-    unsafe fn grow_in_place(&mut self,
-                            ptr: *mut u8,
-                            layout: Layout,
-                            new_size: usize) -> Result<(), CannotReallocInPlace> {
-        Alloc::grow_in_place(&mut &*self, ptr, layout, new_size)
+    unsafe fn realloc(&mut self,
+                      ptr: *mut u8,
+                      old_layout: Layout,
+                      new_size: usize) -> Result<*mut u8, AllocErr> {
+        GlobalAlloc::realloc(*self, ptr as *mut Void, old_layout, new_size).into()
     }
 
     #[inline]
-    unsafe fn shrink_in_place(&mut self,
-                              ptr: *mut u8,
-                              layout: Layout,
-                              new_size: usize) -> Result<(), CannotReallocInPlace> {
-        Alloc::shrink_in_place(&mut &*self, ptr, layout, new_size)
+    fn oom(&mut self) -> ! {
+        ::oom()
     }
 }
 
@@ -135,33 +132,6 @@ mod realloc_fallback {
     }
 }
 
-macro_rules! alloc_methods_based_on_global_alloc {
-    () => {
-        #[inline]
-        unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
-            GlobalAlloc::alloc(*self, layout).into()
-        }
-
-        #[inline]
-        unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
-            GlobalAlloc::alloc_zeroed(*self, layout).into()
-        }
-
-        #[inline]
-        unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
-            GlobalAlloc::dealloc(*self, ptr as *mut Void, layout)
-        }
-
-        #[inline]
-        unsafe fn realloc(&mut self,
-                          ptr: *mut u8,
-                          old_layout: Layout,
-                          new_size: usize) -> Result<*mut u8, AllocErr> {
-            GlobalAlloc::realloc(*self, ptr as *mut Void, old_layout, new_size).into()
-        }
-    }
-}
-
 #[cfg(any(unix, target_os = "cloudabi", target_os = "redox"))]
 mod platform {
     extern crate libc;
@@ -170,7 +140,7 @@ mod platform {
 
     use MIN_ALIGN;
     use System;
-    use core::alloc::{GlobalAlloc, Alloc, AllocErr, Layout, Void};
+    use core::alloc::{GlobalAlloc, Layout, Void};
 
     #[unstable(feature = "allocator_api", issue = "32838")]
     unsafe impl GlobalAlloc for System {
@@ -219,15 +189,6 @@ mod platform {
         }
     }
 
-    #[unstable(feature = "allocator_api", issue = "32838")]
-    unsafe impl<'a> Alloc for &'a System {
-        alloc_methods_based_on_global_alloc!();
-
-        fn oom(&mut self) -> ! {
-            ::oom()
-        }
-    }
-
     #[cfg(any(target_os = "android", target_os = "redox", target_os = "solaris"))]
     #[inline]
     unsafe fn aligned_malloc(layout: &Layout) -> *mut Void {
@@ -270,7 +231,7 @@ mod platform {
 mod platform {
     use MIN_ALIGN;
     use System;
-    use core::alloc::{GlobalAlloc, Alloc, Void, AllocErr, Layout, CannotReallocInPlace};
+    use core::alloc::{GlobalAlloc, Void, Layout};
 
     type LPVOID = *mut u8;
     type HANDLE = LPVOID;
@@ -353,47 +314,6 @@ mod platform {
             }
         }
     }
-
-    #[unstable(feature = "allocator_api", issue = "32838")]
-    unsafe impl<'a> Alloc for &'a System {
-        alloc_methods_based_on_global_alloc!();
-
-        #[inline]
-        unsafe fn grow_in_place(&mut self,
-                                ptr: *mut u8,
-                                layout: Layout,
-                                new_size: usize) -> Result<(), CannotReallocInPlace> {
-            self.shrink_in_place(ptr, layout, new_size)
-        }
-
-        #[inline]
-        unsafe fn shrink_in_place(&mut self,
-                                  ptr: *mut u8,
-                                  layout: Layout,
-                                  new_size: usize) -> Result<(), CannotReallocInPlace> {
-            let new = if layout.align() <= MIN_ALIGN {
-                HeapReAlloc(GetProcessHeap(),
-                            HEAP_REALLOC_IN_PLACE_ONLY,
-                            ptr as LPVOID,
-                            new_size)
-            } else {
-                let header = get_header(ptr);
-                HeapReAlloc(GetProcessHeap(),
-                            HEAP_REALLOC_IN_PLACE_ONLY,
-                            header.0 as LPVOID,
-                            new_size + layout.align())
-            };
-            if new.is_null() {
-                Err(CannotReallocInPlace)
-            } else {
-                Ok(())
-            }
-        }
-
-        fn oom(&mut self) -> ! {
-            ::oom()
-        }
-    }
 }
 
 // This is an implementation of a global allocator on the wasm32 platform when
@@ -417,7 +337,7 @@ mod platform {
 mod platform {
     extern crate dlmalloc;
 
-    use core::alloc::{GlobalAlloc, Alloc, AllocErr, Layout, Void};
+    use core::alloc::{GlobalAlloc, Layout, Void};
     use System;
 
     // No need for synchronization here as wasm is currently single-threaded
@@ -445,11 +365,6 @@ mod platform {
             DLMALLOC.realloc(ptr as *mut u8, layout.size(), layout.align(), new_size) as *mut Void
         }
     }
-
-    #[unstable(feature = "allocator_api", issue = "32838")]
-    unsafe impl<'a> Alloc for &'a System {
-        alloc_methods_based_on_global_alloc!();
-    }
 }
 
 fn oom() -> ! {

From eae0d468932660ca383e35bb9d8b0cb4943a82ae Mon Sep 17 00:00:00 2001
From: Simon Sapin <simon.sapin@exyr.org>
Date: Wed, 4 Apr 2018 18:57:48 +0200
Subject: [PATCH 22/27] Restore Global.oom() functionality
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

… now that #[global_allocator] does not define a symbol for it
---
 src/Cargo.lock                                   |  1 +
 src/liballoc/alloc.rs                            | 16 ++++++++++++++++
 src/liballoc/lib.rs                              |  1 +
 src/liballoc_jemalloc/Cargo.toml                 |  1 +
 src/liballoc_jemalloc/lib.rs                     | 10 ++++++++++
 src/liballoc_system/lib.rs                       |  4 ++++
 src/libcore/alloc.rs                             |  4 ++++
 src/librustc_allocator/expand.rs                 |  5 +++++
 src/librustc_allocator/lib.rs                    |  6 ++++++
 src/librustc_trans/allocator.rs                  |  2 ++
 src/libstd/alloc.rs                              |  6 ++++++
 .../compile-fail/allocator/not-an-allocator.rs   |  1 +
 12 files changed, 57 insertions(+)

diff --git a/src/Cargo.lock b/src/Cargo.lock
index 2e969f4ec2bbc..e5297d1482e8b 100644
--- a/src/Cargo.lock
+++ b/src/Cargo.lock
@@ -19,6 +19,7 @@ dependencies = [
 name = "alloc_jemalloc"
 version = "0.0.0"
 dependencies = [
+ "alloc_system 0.0.0",
  "build_helper 0.1.0",
  "cc 1.0.9 (registry+https://github.com/rust-lang/crates.io-index)",
  "compiler_builtins 0.0.0",
diff --git a/src/liballoc/alloc.rs b/src/liballoc/alloc.rs
index a6fc8d5004c29..beae52726a6ec 100644
--- a/src/liballoc/alloc.rs
+++ b/src/liballoc/alloc.rs
@@ -26,6 +26,9 @@ extern "Rust" {
     #[allocator]
     #[rustc_allocator_nounwind]
     fn __rust_alloc(size: usize, align: usize, err: *mut u8) -> *mut u8;
+    #[cold]
+    #[rustc_allocator_nounwind]
+    fn __rust_oom(err: *const u8) -> !;
     #[rustc_allocator_nounwind]
     fn __rust_dealloc(ptr: *mut u8, size: usize, align: usize);
     #[rustc_allocator_nounwind]
@@ -44,6 +47,9 @@ extern "Rust" {
     #[allocator]
     #[rustc_allocator_nounwind]
     fn __rust_alloc(size: usize, align: usize) -> *mut u8;
+    #[cold]
+    #[rustc_allocator_nounwind]
+    fn __rust_oom() -> !;
     #[rustc_allocator_nounwind]
     fn __rust_dealloc(ptr: *mut u8, size: usize, align: usize);
     #[rustc_allocator_nounwind]
@@ -120,6 +126,16 @@ unsafe impl Alloc for Global {
             Err(AllocErr)
         }
     }
+
+    #[inline]
+    fn oom(&mut self) -> ! {
+        unsafe {
+            #[cfg(not(stage0))]
+            __rust_oom();
+            #[cfg(stage0)]
+            __rust_oom(&mut 0);
+        }
+    }
 }
 
 /// The allocator for unique pointers.
diff --git a/src/liballoc/lib.rs b/src/liballoc/lib.rs
index f6598fe5e8969..a10820ebefd00 100644
--- a/src/liballoc/lib.rs
+++ b/src/liballoc/lib.rs
@@ -97,6 +97,7 @@
 #![feature(from_ref)]
 #![feature(fundamental)]
 #![feature(lang_items)]
+#![feature(libc)]
 #![feature(needs_allocator)]
 #![feature(nonzero)]
 #![feature(optin_builtin_traits)]
diff --git a/src/liballoc_jemalloc/Cargo.toml b/src/liballoc_jemalloc/Cargo.toml
index 7986d5dd2eb54..02435170374c5 100644
--- a/src/liballoc_jemalloc/Cargo.toml
+++ b/src/liballoc_jemalloc/Cargo.toml
@@ -12,6 +12,7 @@ test = false
 doc = false
 
 [dependencies]
+alloc_system = { path = "../liballoc_system" }
 core = { path = "../libcore" }
 libc = { path = "../rustc/libc_shim" }
 compiler_builtins = { path = "../rustc/compiler_builtins_shim" }
diff --git a/src/liballoc_jemalloc/lib.rs b/src/liballoc_jemalloc/lib.rs
index 661d7ab78da01..2b66c293f21a0 100644
--- a/src/liballoc_jemalloc/lib.rs
+++ b/src/liballoc_jemalloc/lib.rs
@@ -14,6 +14,7 @@
             reason = "this library is unlikely to be stabilized in its current \
                       form or name",
             issue = "27783")]
+#![feature(alloc_system)]
 #![feature(libc)]
 #![feature(linkage)]
 #![feature(staged_api)]
@@ -22,12 +23,15 @@
 #![cfg_attr(not(dummy_jemalloc), feature(allocator_api))]
 #![rustc_alloc_kind = "exe"]
 
+extern crate alloc_system;
 extern crate libc;
 
 #[cfg(not(dummy_jemalloc))]
 pub use contents::*;
 #[cfg(not(dummy_jemalloc))]
 mod contents {
+    use core::alloc::GlobalAlloc;
+    use alloc_system::System;
     use libc::{c_int, c_void, size_t};
 
     // Note that the symbols here are prefixed by default on macOS and Windows (we
@@ -96,6 +100,12 @@ mod contents {
         ptr
     }
 
+    #[no_mangle]
+    #[rustc_std_internal_symbol]
+    pub unsafe extern fn __rde_oom() -> ! {
+        System.oom()
+    }
+
     #[no_mangle]
     #[rustc_std_internal_symbol]
     pub unsafe extern fn __rde_dealloc(ptr: *mut u8,
diff --git a/src/liballoc_system/lib.rs b/src/liballoc_system/lib.rs
index 4516664e97c55..c6507282b244d 100644
--- a/src/liballoc_system/lib.rs
+++ b/src/liballoc_system/lib.rs
@@ -367,6 +367,7 @@ mod platform {
     }
 }
 
+#[inline]
 fn oom() -> ! {
     write_to_stderr("fatal runtime error: memory allocation failed");
     unsafe {
@@ -375,6 +376,7 @@ fn oom() -> ! {
 }
 
 #[cfg(any(unix, target_os = "redox"))]
+#[inline]
 fn write_to_stderr(s: &str) {
     extern crate libc;
 
@@ -386,6 +388,7 @@ fn write_to_stderr(s: &str) {
 }
 
 #[cfg(windows)]
+#[inline]
 fn write_to_stderr(s: &str) {
     use core::ptr;
 
@@ -421,4 +424,5 @@ fn write_to_stderr(s: &str) {
 }
 
 #[cfg(not(any(windows, unix, target_os = "redox")))]
+#[inline]
 fn write_to_stderr(_: &str) {}
diff --git a/src/libcore/alloc.rs b/src/libcore/alloc.rs
index cfa7df06a40e1..7334f986f2baa 100644
--- a/src/libcore/alloc.rs
+++ b/src/libcore/alloc.rs
@@ -438,6 +438,10 @@ pub unsafe trait GlobalAlloc {
         }
         new_ptr
     }
+
+    fn oom(&self) -> ! {
+        unsafe { ::intrinsics::abort() }
+    }
 }
 
 /// An implementation of `Alloc` can allocate, reallocate, and
diff --git a/src/librustc_allocator/expand.rs b/src/librustc_allocator/expand.rs
index ce41fe1f3bc9e..58d4c7f289c39 100644
--- a/src/librustc_allocator/expand.rs
+++ b/src/librustc_allocator/expand.rs
@@ -231,6 +231,7 @@ impl<'a> AllocFnFactory<'a> {
             }
 
             AllocatorTy::ResultPtr |
+            AllocatorTy::Bang |
             AllocatorTy::Unit => {
                 panic!("can't convert AllocatorTy to an argument")
             }
@@ -248,6 +249,10 @@ impl<'a> AllocFnFactory<'a> {
                 (self.ptr_u8(), expr)
             }
 
+            AllocatorTy::Bang => {
+                (self.cx.ty(self.span, TyKind::Never), expr)
+            }
+
             AllocatorTy::Unit => {
                 (self.cx.ty(self.span, TyKind::Tup(Vec::new())), expr)
             }
diff --git a/src/librustc_allocator/lib.rs b/src/librustc_allocator/lib.rs
index 969086815ded4..706eab72d44cc 100644
--- a/src/librustc_allocator/lib.rs
+++ b/src/librustc_allocator/lib.rs
@@ -23,6 +23,11 @@ pub static ALLOCATOR_METHODS: &[AllocatorMethod] = &[
         inputs: &[AllocatorTy::Layout],
         output: AllocatorTy::ResultPtr,
     },
+    AllocatorMethod {
+        name: "oom",
+        inputs: &[],
+        output: AllocatorTy::Bang,
+    },
     AllocatorMethod {
         name: "dealloc",
         inputs: &[AllocatorTy::Ptr, AllocatorTy::Layout],
@@ -47,6 +52,7 @@ pub struct AllocatorMethod {
 }
 
 pub enum AllocatorTy {
+    Bang,
     Layout,
     Ptr,
     ResultPtr,
diff --git a/src/librustc_trans/allocator.rs b/src/librustc_trans/allocator.rs
index ffebb959ebfde..f2dd2ed8460eb 100644
--- a/src/librustc_trans/allocator.rs
+++ b/src/librustc_trans/allocator.rs
@@ -43,11 +43,13 @@ pub(crate) unsafe fn trans(tcx: TyCtxt, mods: &ModuleLlvm, kind: AllocatorKind)
                 AllocatorTy::Ptr => args.push(i8p),
                 AllocatorTy::Usize => args.push(usize),
 
+                AllocatorTy::Bang |
                 AllocatorTy::ResultPtr |
                 AllocatorTy::Unit => panic!("invalid allocator arg"),
             }
         }
         let output = match method.output {
+            AllocatorTy::Bang => None,
             AllocatorTy::ResultPtr => Some(i8p),
             AllocatorTy::Unit => None,
 
diff --git a/src/libstd/alloc.rs b/src/libstd/alloc.rs
index 335dc7e041232..4e728df010a47 100644
--- a/src/libstd/alloc.rs
+++ b/src/libstd/alloc.rs
@@ -35,6 +35,12 @@ pub mod __default_lib_allocator {
         System.alloc(layout) as *mut u8
     }
 
+    #[no_mangle]
+    #[rustc_std_internal_symbol]
+    pub unsafe extern fn __rdl_oom() -> ! {
+        System.oom()
+    }
+
     #[no_mangle]
     #[rustc_std_internal_symbol]
     pub unsafe extern fn __rdl_dealloc(ptr: *mut u8,
diff --git a/src/test/compile-fail/allocator/not-an-allocator.rs b/src/test/compile-fail/allocator/not-an-allocator.rs
index 140cad22f34e4..1479d0b62642a 100644
--- a/src/test/compile-fail/allocator/not-an-allocator.rs
+++ b/src/test/compile-fail/allocator/not-an-allocator.rs
@@ -16,5 +16,6 @@ static A: usize = 0;
 //~| the trait bound `usize:
 //~| the trait bound `usize:
 //~| the trait bound `usize:
+//~| the trait bound `usize:
 
 fn main() {}

From fd242ee64c5488e64e2bb677d90f2460e017b7cb Mon Sep 17 00:00:00 2001
From: Simon Sapin <simon.sapin@exyr.org>
Date: Wed, 4 Apr 2018 19:15:22 +0200
Subject: [PATCH 23/27] impl GlobalAlloc for Global

---
 src/liballoc/alloc.rs | 85 +++++++++++++++++++++++++------------------
 1 file changed, 50 insertions(+), 35 deletions(-)

diff --git a/src/liballoc/alloc.rs b/src/liballoc/alloc.rs
index beae52726a6ec..063f0543ec4a6 100644
--- a/src/liballoc/alloc.rs
+++ b/src/liballoc/alloc.rs
@@ -73,62 +73,42 @@ pub type Heap = Global;
 #[allow(non_upper_case_globals)]
 pub const Heap: Global = Global;
 
-unsafe impl Alloc for Global {
+unsafe impl GlobalAlloc for Global {
     #[inline]
-    unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
+    unsafe fn alloc(&self, layout: Layout) -> *mut Void {
         #[cfg(not(stage0))]
         let ptr = __rust_alloc(layout.size(), layout.align());
         #[cfg(stage0)]
         let ptr = __rust_alloc(layout.size(), layout.align(), &mut 0);
-
-        if !ptr.is_null() {
-            Ok(ptr)
-        } else {
-            Err(AllocErr)
-        }
+        ptr as *mut Void
     }
 
     #[inline]
-    unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
-        __rust_dealloc(ptr, layout.size(), layout.align())
+    unsafe fn dealloc(&self, ptr: *mut Void, layout: Layout) {
+        __rust_dealloc(ptr as *mut u8, layout.size(), layout.align())
     }
 
     #[inline]
-    unsafe fn realloc(&mut self,
-                      ptr: *mut u8,
-                      layout: Layout,
-                      new_size: usize)
-                      -> Result<*mut u8, AllocErr>
-    {
+    unsafe fn realloc(&self, ptr: *mut Void, layout: Layout, new_size: usize) -> *mut Void {
         #[cfg(not(stage0))]
-        let ptr = __rust_realloc(ptr, layout.size(), layout.align(), new_size);
+        let ptr = __rust_realloc(ptr as *mut u8, layout.size(), layout.align(), new_size);
         #[cfg(stage0)]
-        let ptr = __rust_realloc(ptr, layout.size(), layout.align(),
+        let ptr = __rust_realloc(ptr as *mut u8, layout.size(), layout.align(),
                                  new_size, layout.align(), &mut 0);
-
-        if !ptr.is_null() {
-            Ok(ptr)
-        } else {
-            Err(AllocErr)
-        }
+        ptr as *mut Void
     }
 
     #[inline]
-    unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
+    unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut Void {
         #[cfg(not(stage0))]
         let ptr = __rust_alloc_zeroed(layout.size(), layout.align());
         #[cfg(stage0)]
         let ptr = __rust_alloc_zeroed(layout.size(), layout.align(), &mut 0);
-
-        if !ptr.is_null() {
-            Ok(ptr)
-        } else {
-            Err(AllocErr)
-        }
+        ptr as *mut Void
     }
 
     #[inline]
-    fn oom(&mut self) -> ! {
+    fn oom(&self) -> ! {
         unsafe {
             #[cfg(not(stage0))]
             __rust_oom();
@@ -138,6 +118,38 @@ unsafe impl Alloc for Global {
     }
 }
 
+unsafe impl Alloc for Global {
+    #[inline]
+    unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
+        GlobalAlloc::alloc(self, layout).into()
+    }
+
+    #[inline]
+    unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
+        GlobalAlloc::dealloc(self, ptr as *mut Void, layout)
+    }
+
+    #[inline]
+    unsafe fn realloc(&mut self,
+                      ptr: *mut u8,
+                      layout: Layout,
+                      new_size: usize)
+                      -> Result<*mut u8, AllocErr>
+    {
+        GlobalAlloc::realloc(self, ptr as *mut Void, layout, new_size).into()
+    }
+
+    #[inline]
+    unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
+        GlobalAlloc::alloc_zeroed(self, layout).into()
+    }
+
+    #[inline]
+    fn oom(&mut self) -> ! {
+        GlobalAlloc::oom(self)
+    }
+}
+
 /// The allocator for unique pointers.
 // This function must not unwind. If it does, MIR trans will fail.
 #[cfg(not(test))]
@@ -148,9 +160,12 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
         align as *mut u8
     } else {
         let layout = Layout::from_size_align_unchecked(size, align);
-        Global.alloc(layout).unwrap_or_else(|_| {
+        let ptr = Global.alloc(layout);
+        if !ptr.is_null() {
+            ptr as *mut u8
+        } else {
             Global.oom()
-        })
+        }
     }
 }
 
@@ -162,7 +177,7 @@ pub(crate) unsafe fn box_free<T: ?Sized>(ptr: *mut T) {
     // We do not allocate for Box<T> when T is ZST, so deallocation is also not necessary.
     if size != 0 {
         let layout = Layout::from_size_align_unchecked(size, align);
-        Global.dealloc(ptr as *mut u8, layout);
+        Global.dealloc(ptr as *mut Void, layout);
     }
 }
 

From fddf51ee0b9765484fc316dbf3d4feb8ceea715d Mon Sep 17 00:00:00 2001
From: Mike Hommey <mh@glandium.org>
Date: Tue, 3 Apr 2018 08:51:02 +0900
Subject: [PATCH 24/27] Use NonNull<Void> instead of *mut u8 in the Alloc trait

Fixes #49608
---
 src/doc/nomicon                               |  2 +-
 .../src/language-features/global-allocator.md |  1 +
 src/liballoc/alloc.rs                         | 19 +++---
 src/liballoc/arc.rs                           | 16 ++---
 src/liballoc/btree/node.rs                    | 16 ++---
 src/liballoc/heap.rs                          | 22 +++++--
 src/liballoc/lib.rs                           |  1 +
 src/liballoc/raw_vec.rs                       | 40 ++++++-------
 src/liballoc/rc.rs                            | 18 +++---
 src/liballoc/tests/heap.rs                    |  3 +-
 src/liballoc_system/lib.rs                    | 29 +++++-----
 src/libcore/alloc.rs                          | 58 ++++++++-----------
 src/libcore/ptr.rs                            |  8 +++
 src/libstd/collections/hash/table.rs          |  6 +-
 src/libstd/lib.rs                             |  1 +
 src/test/run-pass/allocator/xcrate-use2.rs    |  2 +-
 src/test/run-pass/realloc-16687.rs            | 18 +++---
 src/test/run-pass/regions-mock-trans.rs       |  5 +-
 18 files changed, 136 insertions(+), 129 deletions(-)

diff --git a/src/doc/nomicon b/src/doc/nomicon
index 6a8f0a27e9a58..498ac2997420f 160000
--- a/src/doc/nomicon
+++ b/src/doc/nomicon
@@ -1 +1 @@
-Subproject commit 6a8f0a27e9a58c55c89d07bc43a176fdae5e051c
+Subproject commit 498ac2997420f7b25f7cd0a3f8202950d8ad93ec
diff --git a/src/doc/unstable-book/src/language-features/global-allocator.md b/src/doc/unstable-book/src/language-features/global-allocator.md
index 6ce12ba684ddc..a3f3ee65bf014 100644
--- a/src/doc/unstable-book/src/language-features/global-allocator.md
+++ b/src/doc/unstable-book/src/language-features/global-allocator.md
@@ -30,6 +30,7 @@ looks like:
 #![feature(global_allocator, allocator_api, heap_api)]
 
 use std::alloc::{GlobalAlloc, System, Layout, Void};
+use std::ptr::NonNull;
 
 struct MyAllocator;
 
diff --git a/src/liballoc/alloc.rs b/src/liballoc/alloc.rs
index 063f0543ec4a6..af48aa7961e06 100644
--- a/src/liballoc/alloc.rs
+++ b/src/liballoc/alloc.rs
@@ -16,6 +16,7 @@
             issue = "32838")]
 
 use core::intrinsics::{min_align_of_val, size_of_val};
+use core::ptr::NonNull;
 use core::usize;
 
 #[doc(inline)]
@@ -120,27 +121,27 @@ unsafe impl GlobalAlloc for Global {
 
 unsafe impl Alloc for Global {
     #[inline]
-    unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
+    unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
         GlobalAlloc::alloc(self, layout).into()
     }
 
     #[inline]
-    unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
-        GlobalAlloc::dealloc(self, ptr as *mut Void, layout)
+    unsafe fn dealloc(&mut self, ptr: NonNull<Void>, layout: Layout) {
+        GlobalAlloc::dealloc(self, ptr.as_ptr(), layout)
     }
 
     #[inline]
     unsafe fn realloc(&mut self,
-                      ptr: *mut u8,
+                      ptr: NonNull<Void>,
                       layout: Layout,
                       new_size: usize)
-                      -> Result<*mut u8, AllocErr>
+                      -> Result<NonNull<Void>, AllocErr>
     {
-        GlobalAlloc::realloc(self, ptr as *mut Void, layout, new_size).into()
+        GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size).into()
     }
 
     #[inline]
-    unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
+    unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
         GlobalAlloc::alloc_zeroed(self, layout).into()
     }
 
@@ -195,8 +196,8 @@ mod tests {
             let ptr = Global.alloc_zeroed(layout.clone())
                 .unwrap_or_else(|_| Global.oom());
 
-            let end = ptr.offset(layout.size() as isize);
-            let mut i = ptr;
+            let mut i = ptr.cast::<u8>().as_ptr();
+            let end = i.offset(layout.size() as isize);
             while i < end {
                 assert_eq!(*i, 0);
                 i = i.offset(1);
diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs
index f0a325530ba1e..88754ace3ce2c 100644
--- a/src/liballoc/arc.rs
+++ b/src/liballoc/arc.rs
@@ -512,15 +512,13 @@ impl<T: ?Sized> Arc<T> {
     // Non-inlined part of `drop`.
     #[inline(never)]
     unsafe fn drop_slow(&mut self) {
-        let ptr = self.ptr.as_ptr();
-
         // Destroy the data at this time, even though we may not free the box
         // allocation itself (there may still be weak pointers lying around).
         ptr::drop_in_place(&mut self.ptr.as_mut().data);
 
         if self.inner().weak.fetch_sub(1, Release) == 1 {
             atomic::fence(Acquire);
-            Global.dealloc(ptr as *mut u8, Layout::for_value(&*ptr))
+            Global.dealloc(self.ptr.as_void(), Layout::for_value(self.ptr.as_ref()))
         }
     }
 
@@ -558,7 +556,7 @@ impl<T: ?Sized> Arc<T> {
             .unwrap_or_else(|_| Global.oom());
 
         // Initialize the real ArcInner
-        let inner = set_data_ptr(ptr as *mut T, mem) as *mut ArcInner<T>;
+        let inner = set_data_ptr(ptr as *mut T, mem.as_ptr() as *mut u8) as *mut ArcInner<T>;
 
         ptr::write(&mut (*inner).strong, atomic::AtomicUsize::new(1));
         ptr::write(&mut (*inner).weak, atomic::AtomicUsize::new(1));
@@ -625,7 +623,7 @@ impl<T: Clone> ArcFromSlice<T> for Arc<[T]> {
         // In the event of a panic, elements that have been written
         // into the new ArcInner will be dropped, then the memory freed.
         struct Guard<T> {
-            mem: *mut u8,
+            mem: NonNull<u8>,
             elems: *mut T,
             layout: Layout,
             n_elems: usize,
@@ -639,7 +637,7 @@ impl<T: Clone> ArcFromSlice<T> for Arc<[T]> {
                     let slice = from_raw_parts_mut(self.elems, self.n_elems);
                     ptr::drop_in_place(slice);
 
-                    Global.dealloc(self.mem, self.layout.clone());
+                    Global.dealloc(self.mem.as_void(), self.layout.clone());
                 }
             }
         }
@@ -655,7 +653,7 @@ impl<T: Clone> ArcFromSlice<T> for Arc<[T]> {
             let elems = &mut (*ptr).data as *mut [T] as *mut T;
 
             let mut guard = Guard{
-                mem: mem,
+                mem: NonNull::new_unchecked(mem),
                 elems: elems,
                 layout: layout,
                 n_elems: 0,
@@ -1147,8 +1145,6 @@ impl<T: ?Sized> Drop for Weak<T> {
     /// assert!(other_weak_foo.upgrade().is_none());
     /// ```
     fn drop(&mut self) {
-        let ptr = self.ptr.as_ptr();
-
         // If we find out that we were the last weak pointer, then its time to
         // deallocate the data entirely. See the discussion in Arc::drop() about
         // the memory orderings
@@ -1160,7 +1156,7 @@ impl<T: ?Sized> Drop for Weak<T> {
         if self.inner().weak.fetch_sub(1, Release) == 1 {
             atomic::fence(Acquire);
             unsafe {
-                Global.dealloc(ptr as *mut u8, Layout::for_value(&*ptr))
+                Global.dealloc(self.ptr.as_void(), Layout::for_value(self.ptr.as_ref()))
             }
         }
     }
diff --git a/src/liballoc/btree/node.rs b/src/liballoc/btree/node.rs
index 8e23228bd28fe..64aa40ac166e6 100644
--- a/src/liballoc/btree/node.rs
+++ b/src/liballoc/btree/node.rs
@@ -236,7 +236,7 @@ impl<K, V> Root<K, V> {
     pub fn pop_level(&mut self) {
         debug_assert!(self.height > 0);
 
-        let top = self.node.ptr.as_ptr() as *mut u8;
+        let top = self.node.ptr;
 
         self.node = unsafe {
             BoxedNode::from_ptr(self.as_mut()
@@ -249,7 +249,7 @@ impl<K, V> Root<K, V> {
         self.as_mut().as_leaf_mut().parent = ptr::null();
 
         unsafe {
-            Global.dealloc(top, Layout::new::<InternalNode<K, V>>());
+            Global.dealloc(NonNull::from(top).as_void(), Layout::new::<InternalNode<K, V>>());
         }
     }
 }
@@ -433,9 +433,9 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> {
             marker::Edge
         >
     > {
-        let ptr = self.as_leaf() as *const LeafNode<K, V> as *const u8 as *mut u8;
+        let node = self.node;
         let ret = self.ascend().ok();
-        Global.dealloc(ptr, Layout::new::<LeafNode<K, V>>());
+        Global.dealloc(node.as_void(), Layout::new::<LeafNode<K, V>>());
         ret
     }
 }
@@ -454,9 +454,9 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::Internal> {
             marker::Edge
         >
     > {
-        let ptr = self.as_internal() as *const InternalNode<K, V> as *const u8 as *mut u8;
+        let node = self.node;
         let ret = self.ascend().ok();
-        Global.dealloc(ptr, Layout::new::<InternalNode<K, V>>());
+        Global.dealloc(node.as_void(), Layout::new::<InternalNode<K, V>>());
         ret
     }
 }
@@ -1239,12 +1239,12 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
                 }
 
                 Global.dealloc(
-                    right_node.node.as_ptr() as *mut u8,
+                    right_node.node.as_void(),
                     Layout::new::<InternalNode<K, V>>(),
                 );
             } else {
                 Global.dealloc(
-                    right_node.node.as_ptr() as *mut u8,
+                    right_node.node.as_void(),
                     Layout::new::<LeafNode<K, V>>(),
                 );
             }
diff --git a/src/liballoc/heap.rs b/src/liballoc/heap.rs
index e79383331e180..cfb6504e743a1 100644
--- a/src/liballoc/heap.rs
+++ b/src/liballoc/heap.rs
@@ -8,14 +8,20 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-pub use alloc::{Excess, Layout, AllocErr, CannotReallocInPlace};
+#![allow(deprecated)]
+
+pub use alloc::{Layout, AllocErr, CannotReallocInPlace, Void};
 use core::alloc::Alloc as CoreAlloc;
+use core::ptr::NonNull;
 
 #[doc(hidden)]
 pub mod __core {
     pub use core::*;
 }
 
+#[derive(Debug)]
+pub struct Excess(pub *mut u8, pub usize);
+
 /// Compatibility with older versions of #[global_allocator] during bootstrap
 pub unsafe trait Alloc {
     unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr>;
@@ -42,13 +48,13 @@ pub unsafe trait Alloc {
                               new_layout: Layout) -> Result<(), CannotReallocInPlace>;
 }
 
-#[allow(deprecated)]
 unsafe impl<T> Alloc for T where T: CoreAlloc {
     unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
-        CoreAlloc::alloc(self, layout)
+        CoreAlloc::alloc(self, layout).map(|ptr| ptr.cast().as_ptr())
     }
 
     unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
+        let ptr = NonNull::new_unchecked(ptr as *mut Void);
         CoreAlloc::dealloc(self, ptr, layout)
     }
 
@@ -64,28 +70,33 @@ unsafe impl<T> Alloc for T where T: CoreAlloc {
                       ptr: *mut u8,
                       layout: Layout,
                       new_layout: Layout) -> Result<*mut u8, AllocErr> {
-        CoreAlloc::realloc(self, ptr, layout, new_layout.size())
+        let ptr = NonNull::new_unchecked(ptr as *mut Void);
+        CoreAlloc::realloc(self, ptr, layout, new_layout.size()).map(|ptr| ptr.cast().as_ptr())
     }
 
     unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
-        CoreAlloc::alloc_zeroed(self, layout)
+        CoreAlloc::alloc_zeroed(self, layout).map(|ptr| ptr.cast().as_ptr())
     }
 
     unsafe fn alloc_excess(&mut self, layout: Layout) -> Result<Excess, AllocErr> {
         CoreAlloc::alloc_excess(self, layout)
+            .map(|e| Excess(e.0 .cast().as_ptr(), e.1))
     }
 
     unsafe fn realloc_excess(&mut self,
                              ptr: *mut u8,
                              layout: Layout,
                              new_layout: Layout) -> Result<Excess, AllocErr> {
+        let ptr = NonNull::new_unchecked(ptr as *mut Void);
         CoreAlloc::realloc_excess(self, ptr, layout, new_layout.size())
+            .map(|e| Excess(e.0 .cast().as_ptr(), e.1))
     }
 
     unsafe fn grow_in_place(&mut self,
                             ptr: *mut u8,
                             layout: Layout,
                             new_layout: Layout) -> Result<(), CannotReallocInPlace> {
+        let ptr = NonNull::new_unchecked(ptr as *mut Void);
         CoreAlloc::grow_in_place(self, ptr, layout, new_layout.size())
     }
 
@@ -93,6 +104,7 @@ unsafe impl<T> Alloc for T where T: CoreAlloc {
                               ptr: *mut u8,
                               layout: Layout,
                               new_layout: Layout) -> Result<(), CannotReallocInPlace> {
+        let ptr = NonNull::new_unchecked(ptr as *mut Void);
         CoreAlloc::shrink_in_place(self, ptr, layout, new_layout.size())
     }
 }
diff --git a/src/liballoc/lib.rs b/src/liballoc/lib.rs
index a10820ebefd00..3a106a2ff5c3d 100644
--- a/src/liballoc/lib.rs
+++ b/src/liballoc/lib.rs
@@ -99,6 +99,7 @@
 #![feature(lang_items)]
 #![feature(libc)]
 #![feature(needs_allocator)]
+#![feature(nonnull_cast)]
 #![feature(nonzero)]
 #![feature(optin_builtin_traits)]
 #![feature(pattern)]
diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs
index 80b816878fb37..d72301f5ad640 100644
--- a/src/liballoc/raw_vec.rs
+++ b/src/liballoc/raw_vec.rs
@@ -12,7 +12,7 @@ use alloc::{Alloc, Layout, Global};
 use core::cmp;
 use core::mem;
 use core::ops::Drop;
-use core::ptr::{self, Unique};
+use core::ptr::{self, NonNull, Unique};
 use core::slice;
 use super::boxed::Box;
 use super::allocator::CollectionAllocErr;
@@ -90,7 +90,7 @@ impl<T, A: Alloc> RawVec<T, A> {
 
             // handles ZSTs and `cap = 0` alike
             let ptr = if alloc_size == 0 {
-                mem::align_of::<T>() as *mut u8
+                NonNull::<T>::dangling().as_void()
             } else {
                 let align = mem::align_of::<T>();
                 let result = if zeroed {
@@ -105,7 +105,7 @@ impl<T, A: Alloc> RawVec<T, A> {
             };
 
             RawVec {
-                ptr: Unique::new_unchecked(ptr as *mut _),
+                ptr: ptr.cast().into(),
                 cap,
                 a,
             }
@@ -310,11 +310,11 @@ impl<T, A: Alloc> RawVec<T, A> {
                     let new_cap = 2 * self.cap;
                     let new_size = new_cap * elem_size;
                     alloc_guard(new_size).expect("capacity overflow");
-                    let ptr_res = self.a.realloc(self.ptr.as_ptr() as *mut u8,
+                    let ptr_res = self.a.realloc(NonNull::from(self.ptr).as_void(),
                                                  cur,
                                                  new_size);
                     match ptr_res {
-                        Ok(ptr) => (new_cap, Unique::new_unchecked(ptr as *mut T)),
+                        Ok(ptr) => (new_cap, ptr.cast().into()),
                         Err(_) => self.a.oom(),
                     }
                 }
@@ -369,8 +369,7 @@ impl<T, A: Alloc> RawVec<T, A> {
             let new_cap = 2 * self.cap;
             let new_size = new_cap * elem_size;
             alloc_guard(new_size).expect("capacity overflow");
-            let ptr = self.ptr() as *mut _;
-            match self.a.grow_in_place(ptr, old_layout, new_size) {
+            match self.a.grow_in_place(NonNull::from(self.ptr).as_void(), old_layout, new_size) {
                 Ok(_) => {
                     // We can't directly divide `size`.
                     self.cap = new_cap;
@@ -427,13 +426,12 @@ impl<T, A: Alloc> RawVec<T, A> {
             let res = match self.current_layout() {
                 Some(layout) => {
                     debug_assert!(new_layout.align() == layout.align());
-                    let old_ptr = self.ptr.as_ptr() as *mut u8;
-                    self.a.realloc(old_ptr, layout, new_layout.size())
+                    self.a.realloc(NonNull::from(self.ptr).as_void(), layout, new_layout.size())
                 }
                 None => self.a.alloc(new_layout),
             };
 
-            self.ptr = Unique::new_unchecked(res? as *mut T);
+            self.ptr = res?.cast().into();
             self.cap = new_cap;
 
             Ok(())
@@ -537,13 +535,12 @@ impl<T, A: Alloc> RawVec<T, A> {
             let res = match self.current_layout() {
                 Some(layout) => {
                     debug_assert!(new_layout.align() == layout.align());
-                    let old_ptr = self.ptr.as_ptr() as *mut u8;
-                    self.a.realloc(old_ptr, layout, new_layout.size())
+                    self.a.realloc(NonNull::from(self.ptr).as_void(), layout, new_layout.size())
                 }
                 None => self.a.alloc(new_layout),
             };
 
-            self.ptr = Unique::new_unchecked(res? as *mut T);
+            self.ptr = res?.cast().into();
             self.cap = new_cap;
 
             Ok(())
@@ -600,11 +597,12 @@ impl<T, A: Alloc> RawVec<T, A> {
             // (regardless of whether `self.cap - used_cap` wrapped).
             // Therefore we can safely call grow_in_place.
 
-            let ptr = self.ptr() as *mut _;
             let new_layout = Layout::new::<T>().repeat(new_cap).unwrap().0;
             // FIXME: may crash and burn on over-reserve
             alloc_guard(new_layout.size()).expect("capacity overflow");
-            match self.a.grow_in_place(ptr, old_layout, new_layout.size()) {
+            match self.a.grow_in_place(
+                NonNull::from(self.ptr).as_void(), old_layout, new_layout.size(),
+            ) {
                 Ok(_) => {
                     self.cap = new_cap;
                     true
@@ -664,10 +662,10 @@ impl<T, A: Alloc> RawVec<T, A> {
                 let new_size = elem_size * amount;
                 let align = mem::align_of::<T>();
                 let old_layout = Layout::from_size_align_unchecked(old_size, align);
-                match self.a.realloc(self.ptr.as_ptr() as *mut u8,
+                match self.a.realloc(NonNull::from(self.ptr).as_void(),
                                      old_layout,
                                      new_size) {
-                    Ok(p) => self.ptr = Unique::new_unchecked(p as *mut T),
+                    Ok(p) => self.ptr = p.cast().into(),
                     Err(_) => self.a.oom(),
                 }
             }
@@ -700,8 +698,7 @@ impl<T, A: Alloc> RawVec<T, A> {
         let elem_size = mem::size_of::<T>();
         if elem_size != 0 {
             if let Some(layout) = self.current_layout() {
-                let ptr = self.ptr() as *mut u8;
-                self.a.dealloc(ptr, layout);
+                self.a.dealloc(NonNull::from(self.ptr).as_void(), layout);
             }
         }
     }
@@ -737,6 +734,7 @@ fn alloc_guard(alloc_size: usize) -> Result<(), CollectionAllocErr> {
 #[cfg(test)]
 mod tests {
     use super::*;
+    use alloc::Void;
 
     #[test]
     fn allocator_param() {
@@ -756,7 +754,7 @@ mod tests {
         // before allocation attempts start failing.
         struct BoundedAlloc { fuel: usize }
         unsafe impl Alloc for BoundedAlloc {
-            unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
+            unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
                 let size = layout.size();
                 if size > self.fuel {
                     return Err(AllocErr);
@@ -766,7 +764,7 @@ mod tests {
                     err @ Err(_) => err,
                 }
             }
-            unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
+            unsafe fn dealloc(&mut self, ptr: NonNull<Void>, layout: Layout) {
                 Global.dealloc(ptr, layout)
             }
         }
diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs
index 3c0b11bfe747f..1c835fe50decb 100644
--- a/src/liballoc/rc.rs
+++ b/src/liballoc/rc.rs
@@ -259,7 +259,7 @@ use core::ops::CoerceUnsized;
 use core::ptr::{self, NonNull};
 use core::convert::From;
 
-use alloc::{Global, Alloc, Layout, box_free};
+use alloc::{Global, Alloc, Layout, Void, box_free};
 use string::String;
 use vec::Vec;
 
@@ -671,7 +671,7 @@ impl<T: ?Sized> Rc<T> {
             .unwrap_or_else(|_| Global.oom());
 
         // Initialize the real RcBox
-        let inner = set_data_ptr(ptr as *mut T, mem) as *mut RcBox<T>;
+        let inner = set_data_ptr(ptr as *mut T, mem.as_ptr() as *mut u8) as *mut RcBox<T>;
 
         ptr::write(&mut (*inner).strong, Cell::new(1));
         ptr::write(&mut (*inner).weak, Cell::new(1));
@@ -737,7 +737,7 @@ impl<T: Clone> RcFromSlice<T> for Rc<[T]> {
         // In the event of a panic, elements that have been written
         // into the new RcBox will be dropped, then the memory freed.
         struct Guard<T> {
-            mem: *mut u8,
+            mem: NonNull<Void>,
             elems: *mut T,
             layout: Layout,
             n_elems: usize,
@@ -760,14 +760,14 @@ impl<T: Clone> RcFromSlice<T> for Rc<[T]> {
             let v_ptr = v as *const [T];
             let ptr = Self::allocate_for_ptr(v_ptr);
 
-            let mem = ptr as *mut _ as *mut u8;
+            let mem = ptr as *mut _ as *mut Void;
             let layout = Layout::for_value(&*ptr);
 
             // Pointer to first element
             let elems = &mut (*ptr).value as *mut [T] as *mut T;
 
             let mut guard = Guard{
-                mem: mem,
+                mem: NonNull::new_unchecked(mem),
                 elems: elems,
                 layout: layout,
                 n_elems: 0,
@@ -834,8 +834,6 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc<T> {
     /// ```
     fn drop(&mut self) {
         unsafe {
-            let ptr = self.ptr.as_ptr();
-
             self.dec_strong();
             if self.strong() == 0 {
                 // destroy the contained object
@@ -846,7 +844,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc<T> {
                 self.dec_weak();
 
                 if self.weak() == 0 {
-                    Global.dealloc(ptr as *mut u8, Layout::for_value(&*ptr));
+                    Global.dealloc(self.ptr.as_void(), Layout::for_value(self.ptr.as_ref()));
                 }
             }
         }
@@ -1266,13 +1264,11 @@ impl<T: ?Sized> Drop for Weak<T> {
     /// ```
     fn drop(&mut self) {
         unsafe {
-            let ptr = self.ptr.as_ptr();
-
             self.dec_weak();
             // the weak count starts at 1, and will only go to zero if all
             // the strong pointers have disappeared.
             if self.weak() == 0 {
-                Global.dealloc(ptr as *mut u8, Layout::for_value(&*ptr));
+                Global.dealloc(self.ptr.as_void(), Layout::for_value(self.ptr.as_ref()));
             }
         }
     }
diff --git a/src/liballoc/tests/heap.rs b/src/liballoc/tests/heap.rs
index 328131e2fef7a..6fa88ce969a0e 100644
--- a/src/liballoc/tests/heap.rs
+++ b/src/liballoc/tests/heap.rs
@@ -34,7 +34,8 @@ fn check_overalign_requests<T: Alloc>(mut allocator: T) {
             allocator.alloc(Layout::from_size_align(size, align).unwrap()).unwrap()
         }).collect();
         for &ptr in &pointers {
-            assert_eq!((ptr as usize) % align, 0, "Got a pointer less aligned than requested")
+            assert_eq!((ptr.as_ptr() as usize) % align, 0,
+                       "Got a pointer less aligned than requested")
         }
 
         // Clean up
diff --git a/src/liballoc_system/lib.rs b/src/liballoc_system/lib.rs
index c6507282b244d..bf27e972177ce 100644
--- a/src/liballoc_system/lib.rs
+++ b/src/liballoc_system/lib.rs
@@ -42,6 +42,7 @@ const MIN_ALIGN: usize = 8;
 const MIN_ALIGN: usize = 16;
 
 use core::alloc::{Alloc, GlobalAlloc, AllocErr, Layout, Void};
+use core::ptr::NonNull;
 
 #[unstable(feature = "allocator_api", issue = "32838")]
 pub struct System;
@@ -49,26 +50,26 @@ pub struct System;
 #[unstable(feature = "allocator_api", issue = "32838")]
 unsafe impl Alloc for System {
     #[inline]
-    unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
+    unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
         GlobalAlloc::alloc(self, layout).into()
     }
 
     #[inline]
-    unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
+    unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
         GlobalAlloc::alloc_zeroed(self, layout).into()
     }
 
     #[inline]
-    unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
-        GlobalAlloc::dealloc(self, ptr as *mut Void, layout)
+    unsafe fn dealloc(&mut self, ptr: NonNull<Void>, layout: Layout) {
+        GlobalAlloc::dealloc(self, ptr.as_ptr(), layout)
     }
 
     #[inline]
     unsafe fn realloc(&mut self,
-                      ptr: *mut u8,
+                      ptr: NonNull<Void>,
                       old_layout: Layout,
-                      new_size: usize) -> Result<*mut u8, AllocErr> {
-        GlobalAlloc::realloc(self, ptr as *mut Void, old_layout, new_size).into()
+                      new_size: usize) -> Result<NonNull<Void>, AllocErr> {
+        GlobalAlloc::realloc(self, ptr.as_ptr(), old_layout, new_size).into()
     }
 
     #[inline]
@@ -81,26 +82,26 @@ unsafe impl Alloc for System {
 #[unstable(feature = "allocator_api", issue = "32838")]
 unsafe impl<'a> Alloc for &'a System {
     #[inline]
-    unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
+    unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
         GlobalAlloc::alloc(*self, layout).into()
     }
 
     #[inline]
-    unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
+    unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
         GlobalAlloc::alloc_zeroed(*self, layout).into()
     }
 
     #[inline]
-    unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
-        GlobalAlloc::dealloc(*self, ptr as *mut Void, layout)
+    unsafe fn dealloc(&mut self, ptr: NonNull<Void>, layout: Layout) {
+        GlobalAlloc::dealloc(*self, ptr.as_ptr(), layout)
     }
 
     #[inline]
     unsafe fn realloc(&mut self,
-                      ptr: *mut u8,
+                      ptr: NonNull<Void>,
                       old_layout: Layout,
-                      new_size: usize) -> Result<*mut u8, AllocErr> {
-        GlobalAlloc::realloc(*self, ptr as *mut Void, old_layout, new_size).into()
+                      new_size: usize) -> Result<NonNull<Void>, AllocErr> {
+        GlobalAlloc::realloc(*self, ptr.as_ptr(), old_layout, new_size).into()
     }
 
     #[inline]
diff --git a/src/libcore/alloc.rs b/src/libcore/alloc.rs
index 7334f986f2baa..632eed960492c 100644
--- a/src/libcore/alloc.rs
+++ b/src/libcore/alloc.rs
@@ -42,21 +42,17 @@ impl Void {
 }
 
 /// Convert from a return value of GlobalAlloc::alloc to that of Alloc::alloc
-impl From<*mut Void> for Result<*mut u8, AllocErr> {
+impl From<*mut Void> for Result<NonNull<Void>, AllocErr> {
     fn from(ptr: *mut Void) -> Self {
-        if !ptr.is_null() {
-            Ok(ptr as *mut u8)
-        } else {
-            Err(AllocErr)
-        }
+        NonNull::new(ptr).ok_or(AllocErr)
     }
 }
 
 /// Convert from a return value of Alloc::alloc to that of GlobalAlloc::alloc
-impl From<Result<*mut u8, AllocErr>> for *mut Void {
-    fn from(result: Result<*mut u8, AllocErr>) -> Self {
+impl From<Result<NonNull<Void>, AllocErr>> for *mut Void {
+    fn from(result: Result<NonNull<Void>, AllocErr>) -> Self {
         match result {
-            Ok(ptr) => ptr as *mut Void,
+            Ok(ptr) => ptr.as_ptr(),
             Err(_) => Void::null_mut(),
         }
     }
@@ -65,7 +61,7 @@ impl From<Result<*mut u8, AllocErr>> for *mut Void {
 /// Represents the combination of a starting address and
 /// a total capacity of the returned block.
 #[derive(Debug)]
-pub struct Excess(pub *mut u8, pub usize);
+pub struct Excess(pub NonNull<Void>, pub usize);
 
 fn size_align<T>() -> (usize, usize) {
     (mem::size_of::<T>(), mem::align_of::<T>())
@@ -575,7 +571,7 @@ pub unsafe trait Alloc {
     /// Clients wishing to abort computation in response to an
     /// allocation error are encouraged to call the allocator's `oom`
     /// method, rather than directly invoking `panic!` or similar.
-    unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr>;
+    unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr>;
 
     /// Deallocate the memory referenced by `ptr`.
     ///
@@ -592,7 +588,7 @@ pub unsafe trait Alloc {
     /// * In addition to fitting the block of memory `layout`, the
     ///   alignment of the `layout` must match the alignment used
     ///   to allocate that block of memory.
-    unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout);
+    unsafe fn dealloc(&mut self, ptr: NonNull<Void>, layout: Layout);
 
     /// Allocator-specific method for signaling an out-of-memory
     /// condition.
@@ -710,9 +706,9 @@ pub unsafe trait Alloc {
     /// reallocation error are encouraged to call the allocator's `oom`
     /// method, rather than directly invoking `panic!` or similar.
     unsafe fn realloc(&mut self,
-                      ptr: *mut u8,
+                      ptr: NonNull<Void>,
                       layout: Layout,
-                      new_size: usize) -> Result<*mut u8, AllocErr> {
+                      new_size: usize) -> Result<NonNull<Void>, AllocErr> {
         let old_size = layout.size();
 
         if new_size >= old_size {
@@ -729,7 +725,9 @@ pub unsafe trait Alloc {
         let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
         let result = self.alloc(new_layout);
         if let Ok(new_ptr) = result {
-            ptr::copy_nonoverlapping(ptr as *const u8, new_ptr, cmp::min(old_size, new_size));
+            ptr::copy_nonoverlapping(ptr.as_ptr() as *const u8,
+                                     new_ptr.as_ptr() as *mut u8,
+                                     cmp::min(old_size, new_size));
             self.dealloc(ptr, layout);
         }
         result
@@ -751,11 +749,11 @@ pub unsafe trait Alloc {
     /// Clients wishing to abort computation in response to an
     /// allocation error are encouraged to call the allocator's `oom`
     /// method, rather than directly invoking `panic!` or similar.
-    unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
+    unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
         let size = layout.size();
         let p = self.alloc(layout);
         if let Ok(p) = p {
-            ptr::write_bytes(p, 0, size);
+            ptr::write_bytes(p.as_ptr() as *mut u8, 0, size);
         }
         p
     }
@@ -800,7 +798,7 @@ pub unsafe trait Alloc {
     /// reallocation error are encouraged to call the allocator's `oom`
     /// method, rather than directly invoking `panic!` or similar.
     unsafe fn realloc_excess(&mut self,
-                             ptr: *mut u8,
+                             ptr: NonNull<Void>,
                              layout: Layout,
                              new_size: usize) -> Result<Excess, AllocErr> {
         let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
@@ -845,7 +843,7 @@ pub unsafe trait Alloc {
     /// `grow_in_place` failures without aborting, or to fall back on
     /// another reallocation method before resorting to an abort.
     unsafe fn grow_in_place(&mut self,
-                            ptr: *mut u8,
+                            ptr: NonNull<Void>,
                             layout: Layout,
                             new_size: usize) -> Result<(), CannotReallocInPlace> {
         let _ = ptr; // this default implementation doesn't care about the actual address.
@@ -900,7 +898,7 @@ pub unsafe trait Alloc {
     /// `shrink_in_place` failures without aborting, or to fall back
     /// on another reallocation method before resorting to an abort.
     unsafe fn shrink_in_place(&mut self,
-                              ptr: *mut u8,
+                              ptr: NonNull<Void>,
                               layout: Layout,
                               new_size: usize) -> Result<(), CannotReallocInPlace> {
         let _ = ptr; // this default implementation doesn't care about the actual address.
@@ -951,7 +949,7 @@ pub unsafe trait Alloc {
     {
         let k = Layout::new::<T>();
         if k.size() > 0 {
-            unsafe { self.alloc(k).map(|p| NonNull::new_unchecked(p as *mut T)) }
+            unsafe { self.alloc(k).map(|p| p.cast()) }
         } else {
             Err(AllocErr)
         }
@@ -977,10 +975,9 @@ pub unsafe trait Alloc {
     unsafe fn dealloc_one<T>(&mut self, ptr: NonNull<T>)
         where Self: Sized
     {
-        let raw_ptr = ptr.as_ptr() as *mut u8;
         let k = Layout::new::<T>();
         if k.size() > 0 {
-            self.dealloc(raw_ptr, k);
+            self.dealloc(ptr.as_void(), k);
         }
     }
 
@@ -1020,10 +1017,7 @@ pub unsafe trait Alloc {
         match Layout::array::<T>(n) {
             Ok(ref layout) if layout.size() > 0 => {
                 unsafe {
-                    self.alloc(layout.clone())
-                        .map(|p| {
-                            NonNull::new_unchecked(p as *mut T)
-                        })
+                    self.alloc(layout.clone()).map(|p| p.cast())
                 }
             }
             _ => Err(AllocErr),
@@ -1068,11 +1062,10 @@ pub unsafe trait Alloc {
                                n_new: usize) -> Result<NonNull<T>, AllocErr>
         where Self: Sized
     {
-        match (Layout::array::<T>(n_old), Layout::array::<T>(n_new), ptr.as_ptr()) {
-            (Ok(ref k_old), Ok(ref k_new), ptr) if k_old.size() > 0 && k_new.size() > 0 => {
+        match (Layout::array::<T>(n_old), Layout::array::<T>(n_new)) {
+            (Ok(ref k_old), Ok(ref k_new)) if k_old.size() > 0 && k_new.size() > 0 => {
                 debug_assert!(k_old.align() == k_new.align());
-                self.realloc(ptr as *mut u8, k_old.clone(), k_new.size())
-                    .map(|p| NonNull::new_unchecked(p as *mut T))
+                self.realloc(ptr.as_void(), k_old.clone(), k_new.size()).map(NonNull::cast)
             }
             _ => {
                 Err(AllocErr)
@@ -1103,10 +1096,9 @@ pub unsafe trait Alloc {
     unsafe fn dealloc_array<T>(&mut self, ptr: NonNull<T>, n: usize) -> Result<(), AllocErr>
         where Self: Sized
     {
-        let raw_ptr = ptr.as_ptr() as *mut u8;
         match Layout::array::<T>(n) {
             Ok(ref k) if k.size() > 0 => {
-                Ok(self.dealloc(raw_ptr, k.clone()))
+                Ok(self.dealloc(ptr.as_void(), k.clone()))
             }
             _ => {
                 Err(AllocErr)
diff --git a/src/libcore/ptr.rs b/src/libcore/ptr.rs
index c1e150e9fb909..f4e668328ce97 100644
--- a/src/libcore/ptr.rs
+++ b/src/libcore/ptr.rs
@@ -2750,6 +2750,14 @@ impl<T: ?Sized> NonNull<T> {
             NonNull::new_unchecked(self.as_ptr() as *mut U)
         }
     }
+
+    /// Cast to a `Void` pointer
+    #[unstable(feature = "allocator_api", issue = "32838")]
+    pub fn as_void(self) -> NonNull<::alloc::Void> {
+        unsafe {
+            NonNull::new_unchecked(self.as_ptr() as _)
+        }
+    }
 }
 
 #[stable(feature = "nonnull", since = "1.25.0")]
diff --git a/src/libstd/collections/hash/table.rs b/src/libstd/collections/hash/table.rs
index 502637051434e..38c993737880e 100644
--- a/src/libstd/collections/hash/table.rs
+++ b/src/libstd/collections/hash/table.rs
@@ -757,12 +757,10 @@ impl<K, V> RawTable<K, V> {
         let buffer = Global.alloc(Layout::from_size_align(size, alignment)
             .map_err(|_| CollectionAllocErr::CapacityOverflow)?)?;
 
-        let hashes = buffer as *mut HashUint;
-
         Ok(RawTable {
             capacity_mask: capacity.wrapping_sub(1),
             size: 0,
-            hashes: TaggedHashUintPtr::new(hashes),
+            hashes: TaggedHashUintPtr::new(buffer.cast().as_ptr()),
             marker: marker::PhantomData,
         })
     }
@@ -1185,7 +1183,7 @@ unsafe impl<#[may_dangle] K, #[may_dangle] V> Drop for RawTable<K, V> {
         debug_assert!(!oflo, "should be impossible");
 
         unsafe {
-            Global.dealloc(self.hashes.ptr() as *mut u8,
+            Global.dealloc(NonNull::new_unchecked(self.hashes.ptr()).as_void(),
                            Layout::from_size_align(size, align).unwrap());
             // Remember how everything was allocated out of one buffer
             // during initialization? We only need one call to free here.
diff --git a/src/libstd/lib.rs b/src/libstd/lib.rs
index 25ba75fd35eb8..a34fcb5a7f98b 100644
--- a/src/libstd/lib.rs
+++ b/src/libstd/lib.rs
@@ -275,6 +275,7 @@
 #![feature(macro_reexport)]
 #![feature(macro_vis_matcher)]
 #![feature(needs_panic_runtime)]
+#![feature(nonnull_cast)]
 #![feature(exhaustive_patterns)]
 #![feature(nonzero)]
 #![feature(num_bits_bytes)]
diff --git a/src/test/run-pass/allocator/xcrate-use2.rs b/src/test/run-pass/allocator/xcrate-use2.rs
index 52eb963efdb74..b8e844522dc8b 100644
--- a/src/test/run-pass/allocator/xcrate-use2.rs
+++ b/src/test/run-pass/allocator/xcrate-use2.rs
@@ -30,7 +30,7 @@ fn main() {
         let layout = Layout::from_size_align(4, 2).unwrap();
 
         // Global allocator routes to the `custom_as_global` global
-        let ptr = Global.alloc(layout.clone()).unwrap();
+        let ptr = Global.alloc(layout.clone());
         helper::work_with(&ptr);
         assert_eq!(custom_as_global::get(), n + 1);
         Global.dealloc(ptr, layout.clone());
diff --git a/src/test/run-pass/realloc-16687.rs b/src/test/run-pass/realloc-16687.rs
index a562165d21b7d..49ab0ee33109d 100644
--- a/src/test/run-pass/realloc-16687.rs
+++ b/src/test/run-pass/realloc-16687.rs
@@ -13,10 +13,10 @@
 // Ideally this would be revised to use no_std, but for now it serves
 // well enough to reproduce (and illustrate) the bug from #16687.
 
-#![feature(heap_api, allocator_api)]
+#![feature(heap_api, allocator_api, nonnull_cast)]
 
-use std::heap::{Heap, Alloc, Layout};
-use std::ptr;
+use std::alloc::{Global, Alloc, Layout};
+use std::ptr::{self, NonNull};
 
 fn main() {
     unsafe {
@@ -50,13 +50,13 @@ unsafe fn test_triangle() -> bool {
             println!("allocate({:?})", layout);
         }
 
-        let ret = Heap.alloc(layout.clone()).unwrap_or_else(|_| Heap.oom());
+        let ret = Global.alloc(layout.clone()).unwrap_or_else(|_| Global.oom());
 
         if PRINT {
             println!("allocate({:?}) = {:?}", layout, ret);
         }
 
-        ret
+        ret.cast().as_ptr()
     }
 
     unsafe fn deallocate(ptr: *mut u8, layout: Layout) {
@@ -64,7 +64,7 @@ unsafe fn test_triangle() -> bool {
             println!("deallocate({:?}, {:?}", ptr, layout);
         }
 
-        Heap.dealloc(ptr, layout);
+        Global.dealloc(NonNull::new_unchecked(ptr).as_void(), layout);
     }
 
     unsafe fn reallocate(ptr: *mut u8, old: Layout, new: Layout) -> *mut u8 {
@@ -72,14 +72,14 @@ unsafe fn test_triangle() -> bool {
             println!("reallocate({:?}, old={:?}, new={:?})", ptr, old, new);
         }
 
-        let ret = Heap.realloc(ptr, old.clone(), new.clone())
-            .unwrap_or_else(|_| Heap.oom());
+        let ret = Global.realloc(NonNull::new_unchecked(ptr).as_void(), old.clone(), new.size())
+            .unwrap_or_else(|_| Global.oom());
 
         if PRINT {
             println!("reallocate({:?}, old={:?}, new={:?}) = {:?}",
                      ptr, old, new, ret);
         }
-        ret
+        ret.cast().as_ptr()
     }
 
     fn idx_to_size(i: usize) -> usize { (i+1) * 10 }
diff --git a/src/test/run-pass/regions-mock-trans.rs b/src/test/run-pass/regions-mock-trans.rs
index 7d34b8fd00fc8..3c37243c8b938 100644
--- a/src/test/run-pass/regions-mock-trans.rs
+++ b/src/test/run-pass/regions-mock-trans.rs
@@ -13,6 +13,7 @@
 #![feature(allocator_api)]
 
 use std::heap::{Alloc, Heap, Layout};
+use std::ptr::NonNull;
 
 struct arena(());
 
@@ -33,7 +34,7 @@ fn alloc<'a>(_bcx : &'a arena) -> &'a Bcx<'a> {
     unsafe {
         let ptr = Heap.alloc(Layout::new::<Bcx>())
             .unwrap_or_else(|_| Heap.oom());
-        &*(ptr as *const _)
+        &*(ptr.as_ptr() as *const _)
     }
 }
 
@@ -45,7 +46,7 @@ fn g(fcx : &Fcx) {
     let bcx = Bcx { fcx: fcx };
     let bcx2 = h(&bcx);
     unsafe {
-        Heap.dealloc(bcx2 as *const _ as *mut _, Layout::new::<Bcx>());
+        Heap.dealloc(NonNull::new_unchecked(bcx2 as *const _ as *mut _), Layout::new::<Bcx>());
     }
 }
 

From ed297777599081d11c4a337cf19c9b1a1112136b Mon Sep 17 00:00:00 2001
From: Simon Sapin <simon.sapin@exyr.org>
Date: Wed, 11 Apr 2018 16:28:37 +0200
Subject: [PATCH 25/27] Remove conversions for allocated pointers
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

One was now unused, and `NonNull::new(…).ok_or(AllocErr)` feels short enough
for the few cases that need the other conversion.
---
 src/liballoc/alloc.rs      |  6 +++---
 src/liballoc_system/lib.rs | 16 ++++++++--------
 src/libcore/alloc.rs       | 17 -----------------
 3 files changed, 11 insertions(+), 28 deletions(-)

diff --git a/src/liballoc/alloc.rs b/src/liballoc/alloc.rs
index af48aa7961e06..031babe5f6d1a 100644
--- a/src/liballoc/alloc.rs
+++ b/src/liballoc/alloc.rs
@@ -122,7 +122,7 @@ unsafe impl GlobalAlloc for Global {
 unsafe impl Alloc for Global {
     #[inline]
     unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
-        GlobalAlloc::alloc(self, layout).into()
+        NonNull::new(GlobalAlloc::alloc(self, layout)).ok_or(AllocErr)
     }
 
     #[inline]
@@ -137,12 +137,12 @@ unsafe impl Alloc for Global {
                       new_size: usize)
                       -> Result<NonNull<Void>, AllocErr>
     {
-        GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size).into()
+        NonNull::new(GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size)).ok_or(AllocErr)
     }
 
     #[inline]
     unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
-        GlobalAlloc::alloc_zeroed(self, layout).into()
+        NonNull::new(GlobalAlloc::alloc_zeroed(self, layout)).ok_or(AllocErr)
     }
 
     #[inline]
diff --git a/src/liballoc_system/lib.rs b/src/liballoc_system/lib.rs
index bf27e972177ce..7fea6061169c6 100644
--- a/src/liballoc_system/lib.rs
+++ b/src/liballoc_system/lib.rs
@@ -51,12 +51,12 @@ pub struct System;
 unsafe impl Alloc for System {
     #[inline]
     unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
-        GlobalAlloc::alloc(self, layout).into()
+        NonNull::new(GlobalAlloc::alloc(self, layout)).ok_or(AllocErr)
     }
 
     #[inline]
     unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
-        GlobalAlloc::alloc_zeroed(self, layout).into()
+        NonNull::new(GlobalAlloc::alloc_zeroed(self, layout)).ok_or(AllocErr)
     }
 
     #[inline]
@@ -67,9 +67,9 @@ unsafe impl Alloc for System {
     #[inline]
     unsafe fn realloc(&mut self,
                       ptr: NonNull<Void>,
-                      old_layout: Layout,
+                      layout: Layout,
                       new_size: usize) -> Result<NonNull<Void>, AllocErr> {
-        GlobalAlloc::realloc(self, ptr.as_ptr(), old_layout, new_size).into()
+        NonNull::new(GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size)).ok_or(AllocErr)
     }
 
     #[inline]
@@ -83,12 +83,12 @@ unsafe impl Alloc for System {
 unsafe impl<'a> Alloc for &'a System {
     #[inline]
     unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
-        GlobalAlloc::alloc(*self, layout).into()
+        NonNull::new(GlobalAlloc::alloc(*self, layout)).ok_or(AllocErr)
     }
 
     #[inline]
     unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
-        GlobalAlloc::alloc_zeroed(*self, layout).into()
+        NonNull::new(GlobalAlloc::alloc_zeroed(*self, layout)).ok_or(AllocErr)
     }
 
     #[inline]
@@ -99,9 +99,9 @@ unsafe impl<'a> Alloc for &'a System {
     #[inline]
     unsafe fn realloc(&mut self,
                       ptr: NonNull<Void>,
-                      old_layout: Layout,
+                      layout: Layout,
                       new_size: usize) -> Result<NonNull<Void>, AllocErr> {
-        GlobalAlloc::realloc(*self, ptr.as_ptr(), old_layout, new_size).into()
+        NonNull::new(GlobalAlloc::realloc(*self, ptr.as_ptr(), layout, new_size)).ok_or(AllocErr)
     }
 
     #[inline]
diff --git a/src/libcore/alloc.rs b/src/libcore/alloc.rs
index 632eed960492c..97a49703bafd9 100644
--- a/src/libcore/alloc.rs
+++ b/src/libcore/alloc.rs
@@ -41,23 +41,6 @@ impl Void {
     }
 }
 
-/// Convert from a return value of GlobalAlloc::alloc to that of Alloc::alloc
-impl From<*mut Void> for Result<NonNull<Void>, AllocErr> {
-    fn from(ptr: *mut Void) -> Self {
-        NonNull::new(ptr).ok_or(AllocErr)
-    }
-}
-
-/// Convert from a return value of Alloc::alloc to that of GlobalAlloc::alloc
-impl From<Result<NonNull<Void>, AllocErr>> for *mut Void {
-    fn from(result: Result<NonNull<Void>, AllocErr>) -> Self {
-        match result {
-            Ok(ptr) => ptr.as_ptr(),
-            Err(_) => Void::null_mut(),
-        }
-    }
-}
-
 /// Represents the combination of a starting address and
 /// a total capacity of the returned block.
 #[derive(Debug)]

From f607a3872addf380846cae28661a777ec3e3c9a2 Mon Sep 17 00:00:00 2001
From: Simon Sapin <simon.sapin@exyr.org>
Date: Wed, 11 Apr 2018 17:19:48 +0200
Subject: [PATCH 26/27] Rename alloc::Void to alloc::Opaque

---
 src/doc/nomicon                               |   2 +-
 .../src/language-features/global-allocator.md |   6 +-
 src/liballoc/alloc.rs                         |  26 ++---
 src/liballoc/arc.rs                           |   6 +-
 src/liballoc/btree/node.rs                    |  10 +-
 src/liballoc/heap.rs                          |  12 +-
 src/liballoc/raw_vec.rs                       |  22 ++--
 src/liballoc/rc.rs                            |  10 +-
 src/liballoc_system/lib.rs                    | 103 +++++++++---------
 src/libcore/alloc.rs                          |  38 +++----
 src/libcore/ptr.rs                            |   4 +-
 src/librustc_allocator/expand.rs              |  12 +-
 src/libstd/alloc.rs                           |   6 +-
 src/libstd/collections/hash/table.rs          |   2 +-
 .../run-make-fulldeps/std-core-cycle/bar.rs   |   4 +-
 .../run-pass/allocator/auxiliary/custom.rs    |   6 +-
 src/test/run-pass/allocator/custom.rs         |   6 +-
 src/test/run-pass/realloc-16687.rs            |   4 +-
 18 files changed, 139 insertions(+), 140 deletions(-)

diff --git a/src/doc/nomicon b/src/doc/nomicon
index 498ac2997420f..3c56329d1bd90 160000
--- a/src/doc/nomicon
+++ b/src/doc/nomicon
@@ -1 +1 @@
-Subproject commit 498ac2997420f7b25f7cd0a3f8202950d8ad93ec
+Subproject commit 3c56329d1bd9038e5341f1962bcd8d043312a712
diff --git a/src/doc/unstable-book/src/language-features/global-allocator.md b/src/doc/unstable-book/src/language-features/global-allocator.md
index a3f3ee65bf014..031b6347445e2 100644
--- a/src/doc/unstable-book/src/language-features/global-allocator.md
+++ b/src/doc/unstable-book/src/language-features/global-allocator.md
@@ -29,17 +29,17 @@ looks like:
 ```rust
 #![feature(global_allocator, allocator_api, heap_api)]
 
-use std::alloc::{GlobalAlloc, System, Layout, Void};
+use std::alloc::{GlobalAlloc, System, Layout, Opaque};
 use std::ptr::NonNull;
 
 struct MyAllocator;
 
 unsafe impl GlobalAlloc for MyAllocator {
-    unsafe fn alloc(&self, layout: Layout) -> *mut Void {
+    unsafe fn alloc(&self, layout: Layout) -> *mut Opaque {
         System.alloc(layout)
     }
 
-    unsafe fn dealloc(&self, ptr: *mut Void, layout: Layout) {
+    unsafe fn dealloc(&self, ptr: *mut Opaque, layout: Layout) {
         System.dealloc(ptr, layout)
     }
 }
diff --git a/src/liballoc/alloc.rs b/src/liballoc/alloc.rs
index 031babe5f6d1a..68a617e0ffed4 100644
--- a/src/liballoc/alloc.rs
+++ b/src/liballoc/alloc.rs
@@ -76,36 +76,36 @@ pub const Heap: Global = Global;
 
 unsafe impl GlobalAlloc for Global {
     #[inline]
-    unsafe fn alloc(&self, layout: Layout) -> *mut Void {
+    unsafe fn alloc(&self, layout: Layout) -> *mut Opaque {
         #[cfg(not(stage0))]
         let ptr = __rust_alloc(layout.size(), layout.align());
         #[cfg(stage0)]
         let ptr = __rust_alloc(layout.size(), layout.align(), &mut 0);
-        ptr as *mut Void
+        ptr as *mut Opaque
     }
 
     #[inline]
-    unsafe fn dealloc(&self, ptr: *mut Void, layout: Layout) {
+    unsafe fn dealloc(&self, ptr: *mut Opaque, layout: Layout) {
         __rust_dealloc(ptr as *mut u8, layout.size(), layout.align())
     }
 
     #[inline]
-    unsafe fn realloc(&self, ptr: *mut Void, layout: Layout, new_size: usize) -> *mut Void {
+    unsafe fn realloc(&self, ptr: *mut Opaque, layout: Layout, new_size: usize) -> *mut Opaque {
         #[cfg(not(stage0))]
         let ptr = __rust_realloc(ptr as *mut u8, layout.size(), layout.align(), new_size);
         #[cfg(stage0)]
         let ptr = __rust_realloc(ptr as *mut u8, layout.size(), layout.align(),
                                  new_size, layout.align(), &mut 0);
-        ptr as *mut Void
+        ptr as *mut Opaque
     }
 
     #[inline]
-    unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut Void {
+    unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut Opaque {
         #[cfg(not(stage0))]
         let ptr = __rust_alloc_zeroed(layout.size(), layout.align());
         #[cfg(stage0)]
         let ptr = __rust_alloc_zeroed(layout.size(), layout.align(), &mut 0);
-        ptr as *mut Void
+        ptr as *mut Opaque
     }
 
     #[inline]
@@ -121,27 +121,27 @@ unsafe impl GlobalAlloc for Global {
 
 unsafe impl Alloc for Global {
     #[inline]
-    unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
+    unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr> {
         NonNull::new(GlobalAlloc::alloc(self, layout)).ok_or(AllocErr)
     }
 
     #[inline]
-    unsafe fn dealloc(&mut self, ptr: NonNull<Void>, layout: Layout) {
+    unsafe fn dealloc(&mut self, ptr: NonNull<Opaque>, layout: Layout) {
         GlobalAlloc::dealloc(self, ptr.as_ptr(), layout)
     }
 
     #[inline]
     unsafe fn realloc(&mut self,
-                      ptr: NonNull<Void>,
+                      ptr: NonNull<Opaque>,
                       layout: Layout,
                       new_size: usize)
-                      -> Result<NonNull<Void>, AllocErr>
+                      -> Result<NonNull<Opaque>, AllocErr>
     {
         NonNull::new(GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size)).ok_or(AllocErr)
     }
 
     #[inline]
-    unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
+    unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr> {
         NonNull::new(GlobalAlloc::alloc_zeroed(self, layout)).ok_or(AllocErr)
     }
 
@@ -178,7 +178,7 @@ pub(crate) unsafe fn box_free<T: ?Sized>(ptr: *mut T) {
     // We do not allocate for Box<T> when T is ZST, so deallocation is also not necessary.
     if size != 0 {
         let layout = Layout::from_size_align_unchecked(size, align);
-        Global.dealloc(ptr as *mut Void, layout);
+        Global.dealloc(ptr as *mut Opaque, layout);
     }
 }
 
diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs
index 88754ace3ce2c..225b055d8ee82 100644
--- a/src/liballoc/arc.rs
+++ b/src/liballoc/arc.rs
@@ -518,7 +518,7 @@ impl<T: ?Sized> Arc<T> {
 
         if self.inner().weak.fetch_sub(1, Release) == 1 {
             atomic::fence(Acquire);
-            Global.dealloc(self.ptr.as_void(), Layout::for_value(self.ptr.as_ref()))
+            Global.dealloc(self.ptr.as_opaque(), Layout::for_value(self.ptr.as_ref()))
         }
     }
 
@@ -637,7 +637,7 @@ impl<T: Clone> ArcFromSlice<T> for Arc<[T]> {
                     let slice = from_raw_parts_mut(self.elems, self.n_elems);
                     ptr::drop_in_place(slice);
 
-                    Global.dealloc(self.mem.as_void(), self.layout.clone());
+                    Global.dealloc(self.mem.as_opaque(), self.layout.clone());
                 }
             }
         }
@@ -1156,7 +1156,7 @@ impl<T: ?Sized> Drop for Weak<T> {
         if self.inner().weak.fetch_sub(1, Release) == 1 {
             atomic::fence(Acquire);
             unsafe {
-                Global.dealloc(self.ptr.as_void(), Layout::for_value(self.ptr.as_ref()))
+                Global.dealloc(self.ptr.as_opaque(), Layout::for_value(self.ptr.as_ref()))
             }
         }
     }
diff --git a/src/liballoc/btree/node.rs b/src/liballoc/btree/node.rs
index 64aa40ac166e6..d6346662314e6 100644
--- a/src/liballoc/btree/node.rs
+++ b/src/liballoc/btree/node.rs
@@ -249,7 +249,7 @@ impl<K, V> Root<K, V> {
         self.as_mut().as_leaf_mut().parent = ptr::null();
 
         unsafe {
-            Global.dealloc(NonNull::from(top).as_void(), Layout::new::<InternalNode<K, V>>());
+            Global.dealloc(NonNull::from(top).as_opaque(), Layout::new::<InternalNode<K, V>>());
         }
     }
 }
@@ -435,7 +435,7 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> {
     > {
         let node = self.node;
         let ret = self.ascend().ok();
-        Global.dealloc(node.as_void(), Layout::new::<LeafNode<K, V>>());
+        Global.dealloc(node.as_opaque(), Layout::new::<LeafNode<K, V>>());
         ret
     }
 }
@@ -456,7 +456,7 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::Internal> {
     > {
         let node = self.node;
         let ret = self.ascend().ok();
-        Global.dealloc(node.as_void(), Layout::new::<InternalNode<K, V>>());
+        Global.dealloc(node.as_opaque(), Layout::new::<InternalNode<K, V>>());
         ret
     }
 }
@@ -1239,12 +1239,12 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
                 }
 
                 Global.dealloc(
-                    right_node.node.as_void(),
+                    right_node.node.as_opaque(),
                     Layout::new::<InternalNode<K, V>>(),
                 );
             } else {
                 Global.dealloc(
-                    right_node.node.as_void(),
+                    right_node.node.as_opaque(),
                     Layout::new::<LeafNode<K, V>>(),
                 );
             }
diff --git a/src/liballoc/heap.rs b/src/liballoc/heap.rs
index cfb6504e743a1..faac38ca7ce15 100644
--- a/src/liballoc/heap.rs
+++ b/src/liballoc/heap.rs
@@ -10,7 +10,7 @@
 
 #![allow(deprecated)]
 
-pub use alloc::{Layout, AllocErr, CannotReallocInPlace, Void};
+pub use alloc::{Layout, AllocErr, CannotReallocInPlace, Opaque};
 use core::alloc::Alloc as CoreAlloc;
 use core::ptr::NonNull;
 
@@ -54,7 +54,7 @@ unsafe impl<T> Alloc for T where T: CoreAlloc {
     }
 
     unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
-        let ptr = NonNull::new_unchecked(ptr as *mut Void);
+        let ptr = NonNull::new_unchecked(ptr as *mut Opaque);
         CoreAlloc::dealloc(self, ptr, layout)
     }
 
@@ -70,7 +70,7 @@ unsafe impl<T> Alloc for T where T: CoreAlloc {
                       ptr: *mut u8,
                       layout: Layout,
                       new_layout: Layout) -> Result<*mut u8, AllocErr> {
-        let ptr = NonNull::new_unchecked(ptr as *mut Void);
+        let ptr = NonNull::new_unchecked(ptr as *mut Opaque);
         CoreAlloc::realloc(self, ptr, layout, new_layout.size()).map(|ptr| ptr.cast().as_ptr())
     }
 
@@ -87,7 +87,7 @@ unsafe impl<T> Alloc for T where T: CoreAlloc {
                              ptr: *mut u8,
                              layout: Layout,
                              new_layout: Layout) -> Result<Excess, AllocErr> {
-        let ptr = NonNull::new_unchecked(ptr as *mut Void);
+        let ptr = NonNull::new_unchecked(ptr as *mut Opaque);
         CoreAlloc::realloc_excess(self, ptr, layout, new_layout.size())
             .map(|e| Excess(e.0 .cast().as_ptr(), e.1))
     }
@@ -96,7 +96,7 @@ unsafe impl<T> Alloc for T where T: CoreAlloc {
                             ptr: *mut u8,
                             layout: Layout,
                             new_layout: Layout) -> Result<(), CannotReallocInPlace> {
-        let ptr = NonNull::new_unchecked(ptr as *mut Void);
+        let ptr = NonNull::new_unchecked(ptr as *mut Opaque);
         CoreAlloc::grow_in_place(self, ptr, layout, new_layout.size())
     }
 
@@ -104,7 +104,7 @@ unsafe impl<T> Alloc for T where T: CoreAlloc {
                               ptr: *mut u8,
                               layout: Layout,
                               new_layout: Layout) -> Result<(), CannotReallocInPlace> {
-        let ptr = NonNull::new_unchecked(ptr as *mut Void);
+        let ptr = NonNull::new_unchecked(ptr as *mut Opaque);
         CoreAlloc::shrink_in_place(self, ptr, layout, new_layout.size())
     }
 }
diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs
index d72301f5ad640..214cc7d7d0cd9 100644
--- a/src/liballoc/raw_vec.rs
+++ b/src/liballoc/raw_vec.rs
@@ -90,7 +90,7 @@ impl<T, A: Alloc> RawVec<T, A> {
 
             // handles ZSTs and `cap = 0` alike
             let ptr = if alloc_size == 0 {
-                NonNull::<T>::dangling().as_void()
+                NonNull::<T>::dangling().as_opaque()
             } else {
                 let align = mem::align_of::<T>();
                 let result = if zeroed {
@@ -310,7 +310,7 @@ impl<T, A: Alloc> RawVec<T, A> {
                     let new_cap = 2 * self.cap;
                     let new_size = new_cap * elem_size;
                     alloc_guard(new_size).expect("capacity overflow");
-                    let ptr_res = self.a.realloc(NonNull::from(self.ptr).as_void(),
+                    let ptr_res = self.a.realloc(NonNull::from(self.ptr).as_opaque(),
                                                  cur,
                                                  new_size);
                     match ptr_res {
@@ -369,7 +369,7 @@ impl<T, A: Alloc> RawVec<T, A> {
             let new_cap = 2 * self.cap;
             let new_size = new_cap * elem_size;
             alloc_guard(new_size).expect("capacity overflow");
-            match self.a.grow_in_place(NonNull::from(self.ptr).as_void(), old_layout, new_size) {
+            match self.a.grow_in_place(NonNull::from(self.ptr).as_opaque(), old_layout, new_size) {
                 Ok(_) => {
                     // We can't directly divide `size`.
                     self.cap = new_cap;
@@ -426,7 +426,7 @@ impl<T, A: Alloc> RawVec<T, A> {
             let res = match self.current_layout() {
                 Some(layout) => {
                     debug_assert!(new_layout.align() == layout.align());
-                    self.a.realloc(NonNull::from(self.ptr).as_void(), layout, new_layout.size())
+                    self.a.realloc(NonNull::from(self.ptr).as_opaque(), layout, new_layout.size())
                 }
                 None => self.a.alloc(new_layout),
             };
@@ -535,7 +535,7 @@ impl<T, A: Alloc> RawVec<T, A> {
             let res = match self.current_layout() {
                 Some(layout) => {
                     debug_assert!(new_layout.align() == layout.align());
-                    self.a.realloc(NonNull::from(self.ptr).as_void(), layout, new_layout.size())
+                    self.a.realloc(NonNull::from(self.ptr).as_opaque(), layout, new_layout.size())
                 }
                 None => self.a.alloc(new_layout),
             };
@@ -601,7 +601,7 @@ impl<T, A: Alloc> RawVec<T, A> {
             // FIXME: may crash and burn on over-reserve
             alloc_guard(new_layout.size()).expect("capacity overflow");
             match self.a.grow_in_place(
-                NonNull::from(self.ptr).as_void(), old_layout, new_layout.size(),
+                NonNull::from(self.ptr).as_opaque(), old_layout, new_layout.size(),
             ) {
                 Ok(_) => {
                     self.cap = new_cap;
@@ -662,7 +662,7 @@ impl<T, A: Alloc> RawVec<T, A> {
                 let new_size = elem_size * amount;
                 let align = mem::align_of::<T>();
                 let old_layout = Layout::from_size_align_unchecked(old_size, align);
-                match self.a.realloc(NonNull::from(self.ptr).as_void(),
+                match self.a.realloc(NonNull::from(self.ptr).as_opaque(),
                                      old_layout,
                                      new_size) {
                     Ok(p) => self.ptr = p.cast().into(),
@@ -698,7 +698,7 @@ impl<T, A: Alloc> RawVec<T, A> {
         let elem_size = mem::size_of::<T>();
         if elem_size != 0 {
             if let Some(layout) = self.current_layout() {
-                self.a.dealloc(NonNull::from(self.ptr).as_void(), layout);
+                self.a.dealloc(NonNull::from(self.ptr).as_opaque(), layout);
             }
         }
     }
@@ -734,7 +734,7 @@ fn alloc_guard(alloc_size: usize) -> Result<(), CollectionAllocErr> {
 #[cfg(test)]
 mod tests {
     use super::*;
-    use alloc::Void;
+    use alloc::Opaque;
 
     #[test]
     fn allocator_param() {
@@ -754,7 +754,7 @@ mod tests {
         // before allocation attempts start failing.
         struct BoundedAlloc { fuel: usize }
         unsafe impl Alloc for BoundedAlloc {
-            unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
+            unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr> {
                 let size = layout.size();
                 if size > self.fuel {
                     return Err(AllocErr);
@@ -764,7 +764,7 @@ mod tests {
                     err @ Err(_) => err,
                 }
             }
-            unsafe fn dealloc(&mut self, ptr: NonNull<Void>, layout: Layout) {
+            unsafe fn dealloc(&mut self, ptr: NonNull<Opaque>, layout: Layout) {
                 Global.dealloc(ptr, layout)
             }
         }
diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs
index 1c835fe50decb..de0422d82bb76 100644
--- a/src/liballoc/rc.rs
+++ b/src/liballoc/rc.rs
@@ -259,7 +259,7 @@ use core::ops::CoerceUnsized;
 use core::ptr::{self, NonNull};
 use core::convert::From;
 
-use alloc::{Global, Alloc, Layout, Void, box_free};
+use alloc::{Global, Alloc, Layout, Opaque, box_free};
 use string::String;
 use vec::Vec;
 
@@ -737,7 +737,7 @@ impl<T: Clone> RcFromSlice<T> for Rc<[T]> {
         // In the event of a panic, elements that have been written
         // into the new RcBox will be dropped, then the memory freed.
         struct Guard<T> {
-            mem: NonNull<Void>,
+            mem: NonNull<Opaque>,
             elems: *mut T,
             layout: Layout,
             n_elems: usize,
@@ -760,7 +760,7 @@ impl<T: Clone> RcFromSlice<T> for Rc<[T]> {
             let v_ptr = v as *const [T];
             let ptr = Self::allocate_for_ptr(v_ptr);
 
-            let mem = ptr as *mut _ as *mut Void;
+            let mem = ptr as *mut _ as *mut Opaque;
             let layout = Layout::for_value(&*ptr);
 
             // Pointer to first element
@@ -844,7 +844,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc<T> {
                 self.dec_weak();
 
                 if self.weak() == 0 {
-                    Global.dealloc(self.ptr.as_void(), Layout::for_value(self.ptr.as_ref()));
+                    Global.dealloc(self.ptr.as_opaque(), Layout::for_value(self.ptr.as_ref()));
                 }
             }
         }
@@ -1268,7 +1268,7 @@ impl<T: ?Sized> Drop for Weak<T> {
             // the weak count starts at 1, and will only go to zero if all
             // the strong pointers have disappeared.
             if self.weak() == 0 {
-                Global.dealloc(self.ptr.as_void(), Layout::for_value(self.ptr.as_ref()));
+                Global.dealloc(self.ptr.as_opaque(), Layout::for_value(self.ptr.as_ref()));
             }
         }
     }
diff --git a/src/liballoc_system/lib.rs b/src/liballoc_system/lib.rs
index 7fea6061169c6..fd8109e2a4aab 100644
--- a/src/liballoc_system/lib.rs
+++ b/src/liballoc_system/lib.rs
@@ -41,7 +41,7 @@ const MIN_ALIGN: usize = 8;
 #[allow(dead_code)]
 const MIN_ALIGN: usize = 16;
 
-use core::alloc::{Alloc, GlobalAlloc, AllocErr, Layout, Void};
+use core::alloc::{Alloc, GlobalAlloc, AllocErr, Layout, Opaque};
 use core::ptr::NonNull;
 
 #[unstable(feature = "allocator_api", issue = "32838")]
@@ -50,25 +50,25 @@ pub struct System;
 #[unstable(feature = "allocator_api", issue = "32838")]
 unsafe impl Alloc for System {
     #[inline]
-    unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
+    unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr> {
         NonNull::new(GlobalAlloc::alloc(self, layout)).ok_or(AllocErr)
     }
 
     #[inline]
-    unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
+    unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr> {
         NonNull::new(GlobalAlloc::alloc_zeroed(self, layout)).ok_or(AllocErr)
     }
 
     #[inline]
-    unsafe fn dealloc(&mut self, ptr: NonNull<Void>, layout: Layout) {
+    unsafe fn dealloc(&mut self, ptr: NonNull<Opaque>, layout: Layout) {
         GlobalAlloc::dealloc(self, ptr.as_ptr(), layout)
     }
 
     #[inline]
     unsafe fn realloc(&mut self,
-                      ptr: NonNull<Void>,
+                      ptr: NonNull<Opaque>,
                       layout: Layout,
-                      new_size: usize) -> Result<NonNull<Void>, AllocErr> {
+                      new_size: usize) -> Result<NonNull<Opaque>, AllocErr> {
         NonNull::new(GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size)).ok_or(AllocErr)
     }
 
@@ -82,25 +82,25 @@ unsafe impl Alloc for System {
 #[unstable(feature = "allocator_api", issue = "32838")]
 unsafe impl<'a> Alloc for &'a System {
     #[inline]
-    unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
+    unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr> {
         NonNull::new(GlobalAlloc::alloc(*self, layout)).ok_or(AllocErr)
     }
 
     #[inline]
-    unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
+    unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr> {
         NonNull::new(GlobalAlloc::alloc_zeroed(*self, layout)).ok_or(AllocErr)
     }
 
     #[inline]
-    unsafe fn dealloc(&mut self, ptr: NonNull<Void>, layout: Layout) {
+    unsafe fn dealloc(&mut self, ptr: NonNull<Opaque>, layout: Layout) {
         GlobalAlloc::dealloc(*self, ptr.as_ptr(), layout)
     }
 
     #[inline]
     unsafe fn realloc(&mut self,
-                      ptr: NonNull<Void>,
+                      ptr: NonNull<Opaque>,
                       layout: Layout,
-                      new_size: usize) -> Result<NonNull<Void>, AllocErr> {
+                      new_size: usize) -> Result<NonNull<Opaque>, AllocErr> {
         NonNull::new(GlobalAlloc::realloc(*self, ptr.as_ptr(), layout, new_size)).ok_or(AllocErr)
     }
 
@@ -112,13 +112,13 @@ unsafe impl<'a> Alloc for &'a System {
 
 #[cfg(any(windows, unix, target_os = "cloudabi", target_os = "redox"))]
 mod realloc_fallback {
-    use core::alloc::{GlobalAlloc, Void, Layout};
+    use core::alloc::{GlobalAlloc, Opaque, Layout};
     use core::cmp;
     use core::ptr;
 
     impl super::System {
-        pub(crate) unsafe fn realloc_fallback(&self, ptr: *mut Void, old_layout: Layout,
-                                              new_size: usize) -> *mut Void {
+        pub(crate) unsafe fn realloc_fallback(&self, ptr: *mut Opaque, old_layout: Layout,
+                                              new_size: usize) -> *mut Opaque {
             // Docs for GlobalAlloc::realloc require this to be valid:
             let new_layout = Layout::from_size_align_unchecked(new_size, old_layout.align());
 
@@ -141,20 +141,21 @@ mod platform {
 
     use MIN_ALIGN;
     use System;
-    use core::alloc::{GlobalAlloc, Layout, Void};
+    use core::alloc::{GlobalAlloc, Layout, Opaque};
 
     #[unstable(feature = "allocator_api", issue = "32838")]
     unsafe impl GlobalAlloc for System {
         #[inline]
-        unsafe fn alloc(&self, layout: Layout) -> *mut Void {
+        unsafe fn alloc(&self, layout: Layout) -> *mut Opaque {
             if layout.align() <= MIN_ALIGN && layout.align() <= layout.size() {
-                libc::malloc(layout.size()) as *mut Void
+                libc::malloc(layout.size()) as *mut Opaque
             } else {
                 #[cfg(target_os = "macos")]
                 {
                     if layout.align() > (1 << 31) {
-                        // FIXME: use Void::null_mut https://github.com/rust-lang/rust/issues/49659
-                        return 0 as *mut Void
+                        // FIXME: use Opaque::null_mut
+                        // https://github.com/rust-lang/rust/issues/49659
+                        return 0 as *mut Opaque
                     }
                 }
                 aligned_malloc(&layout)
@@ -162,9 +163,9 @@ mod platform {
         }
 
         #[inline]
-        unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut Void {
+        unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut Opaque {
             if layout.align() <= MIN_ALIGN && layout.align() <= layout.size() {
-                libc::calloc(layout.size(), 1) as *mut Void
+                libc::calloc(layout.size(), 1) as *mut Opaque
             } else {
                 let ptr = self.alloc(layout.clone());
                 if !ptr.is_null() {
@@ -175,24 +176,23 @@ mod platform {
         }
 
         #[inline]
-        unsafe fn dealloc(&self, ptr: *mut Void, _layout: Layout) {
+        unsafe fn dealloc(&self, ptr: *mut Opaque, _layout: Layout) {
             libc::free(ptr as *mut libc::c_void)
         }
 
         #[inline]
-        unsafe fn realloc(&self, ptr: *mut Void, old_layout: Layout, new_size: usize) -> *mut Void {
-            let align = old_layout.align();
-            if align <= MIN_ALIGN && align <= new_size {
-                libc::realloc(ptr as *mut libc::c_void, new_size) as *mut Void
+        unsafe fn realloc(&self, ptr: *mut Opaque, layout: Layout, new_size: usize) -> *mut Opaque {
+            if layout.align() <= MIN_ALIGN && layout.align() <= new_size {
+                libc::realloc(ptr as *mut libc::c_void, new_size) as *mut Opaque
             } else {
-                self.realloc_fallback(ptr, old_layout, new_size)
+                self.realloc_fallback(ptr, layout, new_size)
             }
         }
     }
 
     #[cfg(any(target_os = "android", target_os = "redox", target_os = "solaris"))]
     #[inline]
-    unsafe fn aligned_malloc(layout: &Layout) -> *mut Void {
+    unsafe fn aligned_malloc(layout: &Layout) -> *mut Opaque {
         // On android we currently target API level 9 which unfortunately
         // doesn't have the `posix_memalign` API used below. Instead we use
         // `memalign`, but this unfortunately has the property on some systems
@@ -210,19 +210,19 @@ mod platform {
         // [3]: https://bugs.chromium.org/p/chromium/issues/detail?id=138579
         // [4]: https://chromium.googlesource.com/chromium/src/base/+/master/
         //                                       /memory/aligned_memory.cc
-        libc::memalign(layout.align(), layout.size()) as *mut Void
+        libc::memalign(layout.align(), layout.size()) as *mut Opaque
     }
 
     #[cfg(not(any(target_os = "android", target_os = "redox", target_os = "solaris")))]
     #[inline]
-    unsafe fn aligned_malloc(layout: &Layout) -> *mut Void {
+    unsafe fn aligned_malloc(layout: &Layout) -> *mut Opaque {
         let mut out = ptr::null_mut();
         let ret = libc::posix_memalign(&mut out, layout.align(), layout.size());
         if ret != 0 {
-            // FIXME: use Void::null_mut https://github.com/rust-lang/rust/issues/49659
-            0 as *mut Void
+            // FIXME: use Opaque::null_mut https://github.com/rust-lang/rust/issues/49659
+            0 as *mut Opaque
         } else {
-            out as *mut Void
+            out as *mut Opaque
         }
     }
 }
@@ -232,7 +232,7 @@ mod platform {
 mod platform {
     use MIN_ALIGN;
     use System;
-    use core::alloc::{GlobalAlloc, Void, Layout};
+    use core::alloc::{GlobalAlloc, Opaque, Layout};
 
     type LPVOID = *mut u8;
     type HANDLE = LPVOID;
@@ -264,7 +264,7 @@ mod platform {
     }
 
     #[inline]
-    unsafe fn allocate_with_flags(layout: Layout, flags: DWORD) -> *mut Void {
+    unsafe fn allocate_with_flags(layout: Layout, flags: DWORD) -> *mut Opaque {
         let ptr = if layout.align() <= MIN_ALIGN {
             HeapAlloc(GetProcessHeap(), flags, layout.size())
         } else {
@@ -276,23 +276,23 @@ mod platform {
                 align_ptr(ptr, layout.align())
             }
         };
-        ptr as *mut Void
+        ptr as *mut Opaque
     }
 
     #[unstable(feature = "allocator_api", issue = "32838")]
     unsafe impl GlobalAlloc for System {
         #[inline]
-        unsafe fn alloc(&self, layout: Layout) -> *mut Void {
+        unsafe fn alloc(&self, layout: Layout) -> *mut Opaque {
             allocate_with_flags(layout, 0)
         }
 
         #[inline]
-        unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut Void {
+        unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut Opaque {
             allocate_with_flags(layout, HEAP_ZERO_MEMORY)
         }
 
         #[inline]
-        unsafe fn dealloc(&self, ptr: *mut Void, layout: Layout) {
+        unsafe fn dealloc(&self, ptr: *mut Opaque, layout: Layout) {
             if layout.align() <= MIN_ALIGN {
                 let err = HeapFree(GetProcessHeap(), 0, ptr as LPVOID);
                 debug_assert!(err != 0, "Failed to free heap memory: {}",
@@ -306,12 +306,11 @@ mod platform {
         }
 
         #[inline]
-        unsafe fn realloc(&self, ptr: *mut Void, old_layout: Layout, new_size: usize) -> *mut Void {
-            let align = old_layout.align();
-            if align <= MIN_ALIGN {
-                HeapReAlloc(GetProcessHeap(), 0, ptr as LPVOID, new_size) as *mut Void
+        unsafe fn realloc(&self, ptr: *mut Opaque, layout: Layout, new_size: usize) -> *mut Opaque {
+            if layout.align() <= MIN_ALIGN {
+                HeapReAlloc(GetProcessHeap(), 0, ptr as LPVOID, new_size) as *mut Opaque
             } else {
-                self.realloc_fallback(ptr, old_layout, new_size)
+                self.realloc_fallback(ptr, layout, new_size)
             }
         }
     }
@@ -338,7 +337,7 @@ mod platform {
 mod platform {
     extern crate dlmalloc;
 
-    use core::alloc::{GlobalAlloc, Layout, Void};
+    use core::alloc::{GlobalAlloc, Layout, Opaque};
     use System;
 
     // No need for synchronization here as wasm is currently single-threaded
@@ -347,23 +346,23 @@ mod platform {
     #[unstable(feature = "allocator_api", issue = "32838")]
     unsafe impl GlobalAlloc for System {
         #[inline]
-        unsafe fn alloc(&self, layout: Layout) -> *mut Void {
-            DLMALLOC.malloc(layout.size(), layout.align()) as *mut Void
+        unsafe fn alloc(&self, layout: Layout) -> *mut Opaque {
+            DLMALLOC.malloc(layout.size(), layout.align()) as *mut Opaque
         }
 
         #[inline]
-        unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut Void {
-            DLMALLOC.calloc(layout.size(), layout.align()) as *mut Void
+        unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut Opaque {
+            DLMALLOC.calloc(layout.size(), layout.align()) as *mut Opaque
         }
 
         #[inline]
-        unsafe fn dealloc(&self, ptr: *mut Void, layout: Layout) {
+        unsafe fn dealloc(&self, ptr: *mut Opaque, layout: Layout) {
             DLMALLOC.free(ptr as *mut u8, layout.size(), layout.align())
         }
 
         #[inline]
-        unsafe fn realloc(&self, ptr: *mut Void, layout: Layout, new_size: usize) -> *mut Void {
-            DLMALLOC.realloc(ptr as *mut u8, layout.size(), layout.align(), new_size) as *mut Void
+        unsafe fn realloc(&self, ptr: *mut Opaque, layout: Layout, new_size: usize) -> *mut Opaque {
+            DLMALLOC.realloc(ptr as *mut u8, layout.size(), layout.align(), new_size) as *mut Opaque
         }
     }
 }
diff --git a/src/libcore/alloc.rs b/src/libcore/alloc.rs
index 97a49703bafd9..fdba91bec8013 100644
--- a/src/libcore/alloc.rs
+++ b/src/libcore/alloc.rs
@@ -24,12 +24,12 @@ use ptr::{self, NonNull};
 extern {
     /// An opaque, unsized type. Used for pointers to allocated memory.
     ///
-    /// This type can only be used behind a pointer like `*mut Void` or `ptr::NonNull<Void>`.
+    /// This type can only be used behind a pointer like `*mut Opaque` or `ptr::NonNull<Opaque>`.
     /// Such pointers are similar to C’s `void*` type.
-    pub type Void;
+    pub type Opaque;
 }
 
-impl Void {
+impl Opaque {
     /// Similar to `std::ptr::null`, which requires `T: Sized`.
     pub fn null() -> *const Self {
         0 as _
@@ -44,7 +44,7 @@ impl Void {
 /// Represents the combination of a starting address and
 /// a total capacity of the returned block.
 #[derive(Debug)]
-pub struct Excess(pub NonNull<Void>, pub usize);
+pub struct Excess(pub NonNull<Opaque>, pub usize);
 
 fn size_align<T>() -> (usize, usize) {
     (mem::size_of::<T>(), mem::align_of::<T>())
@@ -387,11 +387,11 @@ impl From<AllocErr> for CollectionAllocErr {
 
 // FIXME: docs
 pub unsafe trait GlobalAlloc {
-    unsafe fn alloc(&self, layout: Layout) -> *mut Void;
+    unsafe fn alloc(&self, layout: Layout) -> *mut Opaque;
 
-    unsafe fn dealloc(&self, ptr: *mut Void, layout: Layout);
+    unsafe fn dealloc(&self, ptr: *mut Opaque, layout: Layout);
 
-    unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut Void {
+    unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut Opaque {
         let size = layout.size();
         let ptr = self.alloc(layout);
         if !ptr.is_null() {
@@ -404,7 +404,7 @@ pub unsafe trait GlobalAlloc {
     ///
     /// `new_size`, when rounded up to the nearest multiple of `old_layout.align()`,
     /// must not overflow (i.e. the rounded value must be less than `usize::MAX`).
-    unsafe fn realloc(&self, ptr: *mut Void, old_layout: Layout, new_size: usize) -> *mut Void {
+    unsafe fn realloc(&self, ptr: *mut Opaque, old_layout: Layout, new_size: usize) -> *mut Opaque {
         let new_layout = Layout::from_size_align_unchecked(new_size, old_layout.align());
         let new_ptr = self.alloc(new_layout);
         if !new_ptr.is_null() {
@@ -554,7 +554,7 @@ pub unsafe trait Alloc {
     /// Clients wishing to abort computation in response to an
     /// allocation error are encouraged to call the allocator's `oom`
     /// method, rather than directly invoking `panic!` or similar.
-    unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr>;
+    unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr>;
 
     /// Deallocate the memory referenced by `ptr`.
     ///
@@ -571,7 +571,7 @@ pub unsafe trait Alloc {
     /// * In addition to fitting the block of memory `layout`, the
     ///   alignment of the `layout` must match the alignment used
     ///   to allocate that block of memory.
-    unsafe fn dealloc(&mut self, ptr: NonNull<Void>, layout: Layout);
+    unsafe fn dealloc(&mut self, ptr: NonNull<Opaque>, layout: Layout);
 
     /// Allocator-specific method for signaling an out-of-memory
     /// condition.
@@ -689,9 +689,9 @@ pub unsafe trait Alloc {
     /// reallocation error are encouraged to call the allocator's `oom`
     /// method, rather than directly invoking `panic!` or similar.
     unsafe fn realloc(&mut self,
-                      ptr: NonNull<Void>,
+                      ptr: NonNull<Opaque>,
                       layout: Layout,
-                      new_size: usize) -> Result<NonNull<Void>, AllocErr> {
+                      new_size: usize) -> Result<NonNull<Opaque>, AllocErr> {
         let old_size = layout.size();
 
         if new_size >= old_size {
@@ -732,7 +732,7 @@ pub unsafe trait Alloc {
     /// Clients wishing to abort computation in response to an
     /// allocation error are encouraged to call the allocator's `oom`
     /// method, rather than directly invoking `panic!` or similar.
-    unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
+    unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr> {
         let size = layout.size();
         let p = self.alloc(layout);
         if let Ok(p) = p {
@@ -781,7 +781,7 @@ pub unsafe trait Alloc {
     /// reallocation error are encouraged to call the allocator's `oom`
     /// method, rather than directly invoking `panic!` or similar.
     unsafe fn realloc_excess(&mut self,
-                             ptr: NonNull<Void>,
+                             ptr: NonNull<Opaque>,
                              layout: Layout,
                              new_size: usize) -> Result<Excess, AllocErr> {
         let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
@@ -826,7 +826,7 @@ pub unsafe trait Alloc {
     /// `grow_in_place` failures without aborting, or to fall back on
     /// another reallocation method before resorting to an abort.
     unsafe fn grow_in_place(&mut self,
-                            ptr: NonNull<Void>,
+                            ptr: NonNull<Opaque>,
                             layout: Layout,
                             new_size: usize) -> Result<(), CannotReallocInPlace> {
         let _ = ptr; // this default implementation doesn't care about the actual address.
@@ -881,7 +881,7 @@ pub unsafe trait Alloc {
     /// `shrink_in_place` failures without aborting, or to fall back
     /// on another reallocation method before resorting to an abort.
     unsafe fn shrink_in_place(&mut self,
-                              ptr: NonNull<Void>,
+                              ptr: NonNull<Opaque>,
                               layout: Layout,
                               new_size: usize) -> Result<(), CannotReallocInPlace> {
         let _ = ptr; // this default implementation doesn't care about the actual address.
@@ -960,7 +960,7 @@ pub unsafe trait Alloc {
     {
         let k = Layout::new::<T>();
         if k.size() > 0 {
-            self.dealloc(ptr.as_void(), k);
+            self.dealloc(ptr.as_opaque(), k);
         }
     }
 
@@ -1048,7 +1048,7 @@ pub unsafe trait Alloc {
         match (Layout::array::<T>(n_old), Layout::array::<T>(n_new)) {
             (Ok(ref k_old), Ok(ref k_new)) if k_old.size() > 0 && k_new.size() > 0 => {
                 debug_assert!(k_old.align() == k_new.align());
-                self.realloc(ptr.as_void(), k_old.clone(), k_new.size()).map(NonNull::cast)
+                self.realloc(ptr.as_opaque(), k_old.clone(), k_new.size()).map(NonNull::cast)
             }
             _ => {
                 Err(AllocErr)
@@ -1081,7 +1081,7 @@ pub unsafe trait Alloc {
     {
         match Layout::array::<T>(n) {
             Ok(ref k) if k.size() > 0 => {
-                Ok(self.dealloc(ptr.as_void(), k.clone()))
+                Ok(self.dealloc(ptr.as_opaque(), k.clone()))
             }
             _ => {
                 Err(AllocErr)
diff --git a/src/libcore/ptr.rs b/src/libcore/ptr.rs
index f4e668328ce97..4a7d7c410eb16 100644
--- a/src/libcore/ptr.rs
+++ b/src/libcore/ptr.rs
@@ -2751,9 +2751,9 @@ impl<T: ?Sized> NonNull<T> {
         }
     }
 
-    /// Cast to a `Void` pointer
+    /// Cast to an `Opaque` pointer
     #[unstable(feature = "allocator_api", issue = "32838")]
-    pub fn as_void(self) -> NonNull<::alloc::Void> {
+    pub fn as_opaque(self) -> NonNull<::alloc::Opaque> {
         unsafe {
             NonNull::new_unchecked(self.as_ptr() as _)
         }
diff --git a/src/librustc_allocator/expand.rs b/src/librustc_allocator/expand.rs
index 58d4c7f289c39..305502e7f063b 100644
--- a/src/librustc_allocator/expand.rs
+++ b/src/librustc_allocator/expand.rs
@@ -221,7 +221,7 @@ impl<'a> AllocFnFactory<'a> {
                 let ident = ident();
                 args.push(self.cx.arg(self.span, ident, self.ptr_u8()));
                 let arg = self.cx.expr_ident(self.span, ident);
-                self.cx.expr_cast(self.span, arg, self.ptr_void())
+                self.cx.expr_cast(self.span, arg, self.ptr_opaque())
             }
 
             AllocatorTy::Usize => {
@@ -276,13 +276,13 @@ impl<'a> AllocFnFactory<'a> {
         self.cx.ty_ptr(self.span, ty_u8, Mutability::Mutable)
     }
 
-    fn ptr_void(&self) -> P<Ty> {
-        let void = self.cx.path(self.span, vec![
+    fn ptr_opaque(&self) -> P<Ty> {
+        let opaque = self.cx.path(self.span, vec![
             self.core,
             Ident::from_str("alloc"),
-            Ident::from_str("Void"),
+            Ident::from_str("Opaque"),
         ]);
-        let ty_void = self.cx.ty_path(void);
-        self.cx.ty_ptr(self.span, ty_void, Mutability::Mutable)
+        let ty_opaque = self.cx.ty_path(opaque);
+        self.cx.ty_ptr(self.span, ty_opaque, Mutability::Mutable)
     }
 }
diff --git a/src/libstd/alloc.rs b/src/libstd/alloc.rs
index 4e728df010a47..ff578ec42d230 100644
--- a/src/libstd/alloc.rs
+++ b/src/libstd/alloc.rs
@@ -21,7 +21,7 @@
 #[doc(hidden)]
 #[allow(unused_attributes)]
 pub mod __default_lib_allocator {
-    use super::{System, Layout, GlobalAlloc, Void};
+    use super::{System, Layout, GlobalAlloc, Opaque};
     // for symbol names src/librustc/middle/allocator.rs
     // for signatures src/librustc_allocator/lib.rs
 
@@ -46,7 +46,7 @@ pub mod __default_lib_allocator {
     pub unsafe extern fn __rdl_dealloc(ptr: *mut u8,
                                        size: usize,
                                        align: usize) {
-        System.dealloc(ptr as *mut Void, Layout::from_size_align_unchecked(size, align))
+        System.dealloc(ptr as *mut Opaque, Layout::from_size_align_unchecked(size, align))
     }
 
     #[no_mangle]
@@ -56,7 +56,7 @@ pub mod __default_lib_allocator {
                                        align: usize,
                                        new_size: usize) -> *mut u8 {
         let old_layout = Layout::from_size_align_unchecked(old_size, align);
-        System.realloc(ptr as *mut Void, old_layout, new_size) as *mut u8
+        System.realloc(ptr as *mut Opaque, old_layout, new_size) as *mut u8
     }
 
     #[no_mangle]
diff --git a/src/libstd/collections/hash/table.rs b/src/libstd/collections/hash/table.rs
index 38c993737880e..93f059076d794 100644
--- a/src/libstd/collections/hash/table.rs
+++ b/src/libstd/collections/hash/table.rs
@@ -1183,7 +1183,7 @@ unsafe impl<#[may_dangle] K, #[may_dangle] V> Drop for RawTable<K, V> {
         debug_assert!(!oflo, "should be impossible");
 
         unsafe {
-            Global.dealloc(NonNull::new_unchecked(self.hashes.ptr()).as_void(),
+            Global.dealloc(NonNull::new_unchecked(self.hashes.ptr()).as_opaque(),
                            Layout::from_size_align(size, align).unwrap());
             // Remember how everything was allocated out of one buffer
             // during initialization? We only need one call to free here.
diff --git a/src/test/run-make-fulldeps/std-core-cycle/bar.rs b/src/test/run-make-fulldeps/std-core-cycle/bar.rs
index 20b87028fd1b3..62fd2ade1ca5c 100644
--- a/src/test/run-make-fulldeps/std-core-cycle/bar.rs
+++ b/src/test/run-make-fulldeps/std-core-cycle/bar.rs
@@ -16,11 +16,11 @@ use std::alloc::*;
 pub struct A;
 
 unsafe impl GlobalAlloc for A {
-    unsafe fn alloc(&self, _: Layout) -> *mut Void {
+    unsafe fn alloc(&self, _: Layout) -> *mut Opaque {
         loop {}
     }
 
-    unsafe fn dealloc(&self, _ptr: *mut Void, _: Layout) {
+    unsafe fn dealloc(&self, _ptr: *mut Opaque, _: Layout) {
         loop {}
     }
 }
diff --git a/src/test/run-pass/allocator/auxiliary/custom.rs b/src/test/run-pass/allocator/auxiliary/custom.rs
index 95096efc7ef68..e6a2e22983b25 100644
--- a/src/test/run-pass/allocator/auxiliary/custom.rs
+++ b/src/test/run-pass/allocator/auxiliary/custom.rs
@@ -13,18 +13,18 @@
 #![feature(heap_api, allocator_api)]
 #![crate_type = "rlib"]
 
-use std::heap::{GlobalAlloc, System, Layout, Void};
+use std::heap::{GlobalAlloc, System, Layout, Opaque};
 use std::sync::atomic::{AtomicUsize, Ordering};
 
 pub struct A(pub AtomicUsize);
 
 unsafe impl GlobalAlloc for A {
-    unsafe fn alloc(&self, layout: Layout) -> *mut Void {
+    unsafe fn alloc(&self, layout: Layout) -> *mut Opaque {
         self.0.fetch_add(1, Ordering::SeqCst);
         System.alloc(layout)
     }
 
-    unsafe fn dealloc(&self, ptr: *mut Void, layout: Layout) {
+    unsafe fn dealloc(&self, ptr: *mut Opaque, layout: Layout) {
         self.0.fetch_add(1, Ordering::SeqCst);
         System.dealloc(ptr, layout)
     }
diff --git a/src/test/run-pass/allocator/custom.rs b/src/test/run-pass/allocator/custom.rs
index f7b2fd73c87b9..415d39a593e16 100644
--- a/src/test/run-pass/allocator/custom.rs
+++ b/src/test/run-pass/allocator/custom.rs
@@ -15,7 +15,7 @@
 
 extern crate helper;
 
-use std::alloc::{self, Global, Alloc, System, Layout, Void};
+use std::alloc::{self, Global, Alloc, System, Layout, Opaque};
 use std::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT};
 
 static HITS: AtomicUsize = ATOMIC_USIZE_INIT;
@@ -23,12 +23,12 @@ static HITS: AtomicUsize = ATOMIC_USIZE_INIT;
 struct A;
 
 unsafe impl alloc::GlobalAlloc for A {
-    unsafe fn alloc(&self, layout: Layout) -> *mut Void {
+    unsafe fn alloc(&self, layout: Layout) -> *mut Opaque {
         HITS.fetch_add(1, Ordering::SeqCst);
         System.alloc(layout)
     }
 
-    unsafe fn dealloc(&self, ptr: *mut Void, layout: Layout) {
+    unsafe fn dealloc(&self, ptr: *mut Opaque, layout: Layout) {
         HITS.fetch_add(1, Ordering::SeqCst);
         System.dealloc(ptr, layout)
     }
diff --git a/src/test/run-pass/realloc-16687.rs b/src/test/run-pass/realloc-16687.rs
index 49ab0ee33109d..38cc23c16a976 100644
--- a/src/test/run-pass/realloc-16687.rs
+++ b/src/test/run-pass/realloc-16687.rs
@@ -64,7 +64,7 @@ unsafe fn test_triangle() -> bool {
             println!("deallocate({:?}, {:?}", ptr, layout);
         }
 
-        Global.dealloc(NonNull::new_unchecked(ptr).as_void(), layout);
+        Global.dealloc(NonNull::new_unchecked(ptr).as_opaque(), layout);
     }
 
     unsafe fn reallocate(ptr: *mut u8, old: Layout, new: Layout) -> *mut u8 {
@@ -72,7 +72,7 @@ unsafe fn test_triangle() -> bool {
             println!("reallocate({:?}, old={:?}, new={:?})", ptr, old, new);
         }
 
-        let ret = Global.realloc(NonNull::new_unchecked(ptr).as_void(), old.clone(), new.size())
+        let ret = Global.realloc(NonNull::new_unchecked(ptr).as_opaque(), old.clone(), new.size())
             .unwrap_or_else(|_| Global.oom());
 
         if PRINT {

From c5ffdd787d134c06735a1dc4457515a63bbce5f5 Mon Sep 17 00:00:00 2001
From: Simon Sapin <simon.sapin@exyr.org>
Date: Wed, 11 Apr 2018 20:16:45 +0200
Subject: [PATCH 27/27] Initial docs for the GlobalAlloc trait

---
 src/libcore/alloc.rs | 44 +++++++++++++++++++++++++++++++++++++++-----
 1 file changed, 39 insertions(+), 5 deletions(-)

diff --git a/src/libcore/alloc.rs b/src/libcore/alloc.rs
index fdba91bec8013..8f8849e32e6cb 100644
--- a/src/libcore/alloc.rs
+++ b/src/libcore/alloc.rs
@@ -385,10 +385,25 @@ impl From<AllocErr> for CollectionAllocErr {
     }
 }
 
-// FIXME: docs
+/// A memory allocator that can be registered to be the one backing `std::alloc::Global`
+/// though the `#[global_allocator]` attributes.
 pub unsafe trait GlobalAlloc {
+    /// Allocate memory as described by the given `layout`.
+    ///
+    /// Returns a pointer to newly-allocated memory,
+    /// or NULL to indicate allocation failure.
+    ///
+    /// # Safety
+    ///
+    /// **FIXME:** what are the exact requirements?
     unsafe fn alloc(&self, layout: Layout) -> *mut Opaque;
 
+    /// Deallocate the block of memory at the given `ptr` pointer with the given `layout`.
+    ///
+    /// # Safety
+    ///
+    /// **FIXME:** what are the exact requirements?
+    /// In particular around layout *fit*. (See docs for the `Alloc` trait.)
     unsafe fn dealloc(&self, ptr: *mut Opaque, layout: Layout);
 
     unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut Opaque {
@@ -400,24 +415,43 @@ pub unsafe trait GlobalAlloc {
         ptr
     }
 
+    /// Shink or grow a block of memory to the given `new_size`.
+    /// The block is described by the given `ptr` pointer and `layout`.
+    ///
+    /// Return a new pointer (which may or may not be the same as `ptr`),
+    /// or NULL to indicate reallocation failure.
+    ///
+    /// If reallocation is successful, the old `ptr` pointer is considered
+    /// to have been deallocated.
+    ///
     /// # Safety
     ///
     /// `new_size`, when rounded up to the nearest multiple of `old_layout.align()`,
     /// must not overflow (i.e. the rounded value must be less than `usize::MAX`).
-    unsafe fn realloc(&self, ptr: *mut Opaque, old_layout: Layout, new_size: usize) -> *mut Opaque {
-        let new_layout = Layout::from_size_align_unchecked(new_size, old_layout.align());
+    ///
+    /// **FIXME:** what are the exact requirements?
+    /// In particular around layout *fit*. (See docs for the `Alloc` trait.)
+    unsafe fn realloc(&self, ptr: *mut Opaque, layout: Layout, new_size: usize) -> *mut Opaque {
+        let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
         let new_ptr = self.alloc(new_layout);
         if !new_ptr.is_null() {
             ptr::copy_nonoverlapping(
                 ptr as *const u8,
                 new_ptr as *mut u8,
-                cmp::min(old_layout.size(), new_size),
+                cmp::min(layout.size(), new_size),
             );
-            self.dealloc(ptr, old_layout);
+            self.dealloc(ptr, layout);
         }
         new_ptr
     }
 
+    /// Aborts the thread or process, optionally performing
+    /// cleanup or logging diagnostic information before panicking or
+    /// aborting.
+    ///
+    /// `oom` is meant to be used by clients unable to cope with an
+    /// unsatisfied allocation request, and wish to abandon
+    /// computation rather than attempt to recover locally.
     fn oom(&self) -> ! {
         unsafe { ::intrinsics::abort() }
     }