From 81cd1e64f38299276ce131db37994086ec94ad35 Mon Sep 17 00:00:00 2001 From: Scott McMurray Date: Tue, 15 Jan 2019 20:48:52 -0800 Subject: [PATCH 01/34] Deprecate the unstable Vec::resize_default --- src/liballoc/vec.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/liballoc/vec.rs b/src/liballoc/vec.rs index ba3b3dfbfc2e1..198d1ee4bb4fc 100644 --- a/src/liballoc/vec.rs +++ b/src/liballoc/vec.rs @@ -1368,6 +1368,7 @@ impl Vec { /// # Examples /// /// ``` + /// # #![allow(deprecated)] /// #![feature(vec_resize_default)] /// /// let mut vec = vec![1, 2, 3]; @@ -1384,6 +1385,9 @@ impl Vec { /// [`Default`]: ../../std/default/trait.Default.html /// [`Clone`]: ../../std/clone/trait.Clone.html #[unstable(feature = "vec_resize_default", issue = "41758")] + #[rustc_deprecated(reason = "This is moving towards being removed in favor \ + of `.resize_with(Default::default)`. If you disagree, please comment \ + in the tracking issue.", since = "1.33.0")] pub fn resize_default(&mut self, new_len: usize) { let len = self.len(); From c6549681008a09b9a267f1470fe959b428d69736 Mon Sep 17 00:00:00 2001 From: Oliver Middleton Date: Thu, 17 Jan 2019 09:22:52 +0000 Subject: [PATCH 02/34] Deny the `overflowing_literals` lint for all editions --- .../src/lints/listing/deny-by-default.md | 20 +++++++++++++++++++ .../src/lints/listing/warn-by-default.md | 20 ------------------- src/librustc_lint/types.rs | 6 ++---- .../deny-overflowing-literals.rs} | 2 -- .../deny-overflowing-literals.stderr} | 2 +- src/test/ui/lint/lint-type-limits2.rs | 1 + src/test/ui/lint/lint-type-limits2.stderr | 10 +++++++--- src/test/ui/lint/lint-type-limits3.rs | 1 + src/test/ui/lint/lint-type-limits3.stderr | 10 +++++++--- src/test/ui/lint/type-overflow.rs | 1 + src/test/ui/lint/type-overflow.stderr | 20 +++++++++++-------- 11 files changed, 52 insertions(+), 41 deletions(-) rename src/test/ui/{editions/edition-deny-overflowing-literals-2018.rs => lint/deny-overflowing-literals.rs} (82%) rename src/test/ui/{editions/edition-deny-overflowing-literals-2018.stderr => lint/deny-overflowing-literals.stderr} (76%) diff --git a/src/doc/rustc/src/lints/listing/deny-by-default.md b/src/doc/rustc/src/lints/listing/deny-by-default.md index ff9e0235a0435..fa62d1a03f53b 100644 --- a/src/doc/rustc/src/lints/listing/deny-by-default.md +++ b/src/doc/rustc/src/lints/listing/deny-by-default.md @@ -149,6 +149,26 @@ error: const items should never be #[no_mangle] | ``` +## overflowing-literals + +This lint detects literal out of range for its type. Some +example code that triggers this lint: + +```rust,compile_fail +let x: u8 = 1000; +``` + +This will produce: + +```text +error: literal out of range for u8 + --> src/main.rs:2:17 + | +2 | let x: u8 = 1000; + | ^^^^ + | +``` + ## parenthesized-params-in-types-and-modules This lint detects incorrect parentheses. Some example code that triggers this diff --git a/src/doc/rustc/src/lints/listing/warn-by-default.md b/src/doc/rustc/src/lints/listing/warn-by-default.md index 7fbbe686b5bd0..ba927b1ef3b57 100644 --- a/src/doc/rustc/src/lints/listing/warn-by-default.md +++ b/src/doc/rustc/src/lints/listing/warn-by-default.md @@ -285,26 +285,6 @@ warning: functions generic over types must be mangled | ``` -## overflowing-literals - -This lint detects literal out of range for its type. Some -example code that triggers this lint: - -```rust -let x: u8 = 1000; -``` - -This will produce: - -```text -warning: literal out of range for u8 - --> src/main.rs:2:17 - | -2 | let x: u8 = 1000; - | ^^^^ - | -``` - ## path-statements This lint detects path statements with no effect. Some example code that diff --git a/src/librustc_lint/types.rs b/src/librustc_lint/types.rs index 642681a73a8a0..6d391814414fd 100644 --- a/src/librustc_lint/types.rs +++ b/src/librustc_lint/types.rs @@ -15,7 +15,6 @@ use std::{i8, i16, i32, i64, u8, u16, u32, u64, f32, f64}; use syntax::{ast, attr}; use syntax::errors::Applicability; use rustc_target::spec::abi::Abi; -use syntax::edition::Edition; use syntax_pos::Span; use syntax::source_map; @@ -29,9 +28,8 @@ declare_lint! { declare_lint! { OVERFLOWING_LITERALS, - Warn, - "literal out of range for its type", - Edition::Edition2018 => Deny + Deny, + "literal out of range for its type" } declare_lint! { diff --git a/src/test/ui/editions/edition-deny-overflowing-literals-2018.rs b/src/test/ui/lint/deny-overflowing-literals.rs similarity index 82% rename from src/test/ui/editions/edition-deny-overflowing-literals-2018.rs rename to src/test/ui/lint/deny-overflowing-literals.rs index 0527d75214a57..ebd6654d39b1f 100644 --- a/src/test/ui/editions/edition-deny-overflowing-literals-2018.rs +++ b/src/test/ui/lint/deny-overflowing-literals.rs @@ -1,5 +1,3 @@ -// edition:2018 - fn main() { let x: u8 = 256; //~^ error: literal out of range for u8 diff --git a/src/test/ui/editions/edition-deny-overflowing-literals-2018.stderr b/src/test/ui/lint/deny-overflowing-literals.stderr similarity index 76% rename from src/test/ui/editions/edition-deny-overflowing-literals-2018.stderr rename to src/test/ui/lint/deny-overflowing-literals.stderr index 6d1d8568bcf8f..7313dd0bfb5a7 100644 --- a/src/test/ui/editions/edition-deny-overflowing-literals-2018.stderr +++ b/src/test/ui/lint/deny-overflowing-literals.stderr @@ -1,5 +1,5 @@ error: literal out of range for u8 - --> $DIR/edition-deny-overflowing-literals-2018.rs:4:17 + --> $DIR/deny-overflowing-literals.rs:2:17 | LL | let x: u8 = 256; | ^^^ diff --git a/src/test/ui/lint/lint-type-limits2.rs b/src/test/ui/lint/lint-type-limits2.rs index 9c69ba12cf79e..c4486e0676887 100644 --- a/src/test/ui/lint/lint-type-limits2.rs +++ b/src/test/ui/lint/lint-type-limits2.rs @@ -1,4 +1,5 @@ #![allow(dead_code)] +#![warn(overflowing_literals)] // compile-flags: -D unused-comparisons fn main() { } diff --git a/src/test/ui/lint/lint-type-limits2.stderr b/src/test/ui/lint/lint-type-limits2.stderr index 3118bcec05e85..f88fff62e21fc 100644 --- a/src/test/ui/lint/lint-type-limits2.stderr +++ b/src/test/ui/lint/lint-type-limits2.stderr @@ -1,5 +1,5 @@ error: comparison is useless due to type limits - --> $DIR/lint-type-limits2.rs:12:5 + --> $DIR/lint-type-limits2.rs:13:5 | LL | 128 > bar() //~ ERROR comparison is useless due to type limits | ^^^^^^^^^^^ @@ -7,12 +7,16 @@ LL | 128 > bar() //~ ERROR comparison is useless due to type limits = note: requested on the command line with `-D unused-comparisons` warning: literal out of range for i8 - --> $DIR/lint-type-limits2.rs:12:5 + --> $DIR/lint-type-limits2.rs:13:5 | LL | 128 > bar() //~ ERROR comparison is useless due to type limits | ^^^ | - = note: #[warn(overflowing_literals)] on by default +note: lint level defined here + --> $DIR/lint-type-limits2.rs:2:9 + | +LL | #![warn(overflowing_literals)] + | ^^^^^^^^^^^^^^^^^^^^ error: aborting due to previous error diff --git a/src/test/ui/lint/lint-type-limits3.rs b/src/test/ui/lint/lint-type-limits3.rs index 3e95ad212eb16..a715c69f7849e 100644 --- a/src/test/ui/lint/lint-type-limits3.rs +++ b/src/test/ui/lint/lint-type-limits3.rs @@ -1,4 +1,5 @@ #![allow(dead_code)] +#![warn(overflowing_literals)] // compile-flags: -D unused-comparisons fn main() { } diff --git a/src/test/ui/lint/lint-type-limits3.stderr b/src/test/ui/lint/lint-type-limits3.stderr index 5e30b1646a755..4f47a7ce31665 100644 --- a/src/test/ui/lint/lint-type-limits3.stderr +++ b/src/test/ui/lint/lint-type-limits3.stderr @@ -1,5 +1,5 @@ error: comparison is useless due to type limits - --> $DIR/lint-type-limits3.rs:8:11 + --> $DIR/lint-type-limits3.rs:9:11 | LL | while 200 != i { //~ ERROR comparison is useless due to type limits | ^^^^^^^^ @@ -7,12 +7,16 @@ LL | while 200 != i { //~ ERROR comparison is useless due to type limits = note: requested on the command line with `-D unused-comparisons` warning: literal out of range for i8 - --> $DIR/lint-type-limits3.rs:8:11 + --> $DIR/lint-type-limits3.rs:9:11 | LL | while 200 != i { //~ ERROR comparison is useless due to type limits | ^^^ | - = note: #[warn(overflowing_literals)] on by default +note: lint level defined here + --> $DIR/lint-type-limits3.rs:2:9 + | +LL | #![warn(overflowing_literals)] + | ^^^^^^^^^^^^^^^^^^^^ error: aborting due to previous error diff --git a/src/test/ui/lint/type-overflow.rs b/src/test/ui/lint/type-overflow.rs index 2ccc52a0413e0..64e5951207379 100644 --- a/src/test/ui/lint/type-overflow.rs +++ b/src/test/ui/lint/type-overflow.rs @@ -1,4 +1,5 @@ // compile-pass +#![warn(overflowing_literals)] fn main() { let error = 255i8; //~WARNING literal out of range for i8 diff --git a/src/test/ui/lint/type-overflow.stderr b/src/test/ui/lint/type-overflow.stderr index 3c4a37c4adf56..349d0be016497 100644 --- a/src/test/ui/lint/type-overflow.stderr +++ b/src/test/ui/lint/type-overflow.stderr @@ -1,13 +1,17 @@ warning: literal out of range for i8 - --> $DIR/type-overflow.rs:4:17 + --> $DIR/type-overflow.rs:5:17 | LL | let error = 255i8; //~WARNING literal out of range for i8 | ^^^^^ | - = note: #[warn(overflowing_literals)] on by default +note: lint level defined here + --> $DIR/type-overflow.rs:2:9 + | +LL | #![warn(overflowing_literals)] + | ^^^^^^^^^^^^^^^^^^^^ warning: literal out of range for i8 - --> $DIR/type-overflow.rs:9:16 + --> $DIR/type-overflow.rs:10:16 | LL | let fail = 0b1000_0001i8; //~WARNING literal out of range for i8 | ^^^^^^^^^^^^^ help: consider using `u8` instead: `0b1000_0001u8` @@ -15,7 +19,7 @@ LL | let fail = 0b1000_0001i8; //~WARNING literal out of range for i8 = note: the literal `0b1000_0001i8` (decimal `129`) does not fit into an `i8` and will become `-127i8` warning: literal out of range for i64 - --> $DIR/type-overflow.rs:11:16 + --> $DIR/type-overflow.rs:12:16 | LL | let fail = 0x8000_0000_0000_0000i64; //~WARNING literal out of range for i64 | ^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `u64` instead: `0x8000_0000_0000_0000u64` @@ -23,7 +27,7 @@ LL | let fail = 0x8000_0000_0000_0000i64; //~WARNING literal out of range fo = note: the literal `0x8000_0000_0000_0000i64` (decimal `9223372036854775808`) does not fit into an `i64` and will become `-9223372036854775808i64` warning: literal out of range for u32 - --> $DIR/type-overflow.rs:13:16 + --> $DIR/type-overflow.rs:14:16 | LL | let fail = 0x1_FFFF_FFFFu32; //~WARNING literal out of range for u32 | ^^^^^^^^^^^^^^^^ help: consider using `u64` instead: `0x1_FFFF_FFFFu64` @@ -31,7 +35,7 @@ LL | let fail = 0x1_FFFF_FFFFu32; //~WARNING literal out of range for u32 = note: the literal `0x1_FFFF_FFFFu32` (decimal `8589934591`) does not fit into an `u32` and will become `4294967295u32` warning: literal out of range for i128 - --> $DIR/type-overflow.rs:15:22 + --> $DIR/type-overflow.rs:16:22 | LL | let fail: i128 = 0x8000_0000_0000_0000_0000_0000_0000_0000; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -40,7 +44,7 @@ LL | let fail: i128 = 0x8000_0000_0000_0000_0000_0000_0000_0000; = help: consider using `u128` instead warning: literal out of range for i32 - --> $DIR/type-overflow.rs:18:16 + --> $DIR/type-overflow.rs:19:16 | LL | let fail = 0x8FFF_FFFF_FFFF_FFFE; //~WARNING literal out of range for i32 | ^^^^^^^^^^^^^^^^^^^^^ @@ -49,7 +53,7 @@ LL | let fail = 0x8FFF_FFFF_FFFF_FFFE; //~WARNING literal out of range for i = help: consider using `i128` instead warning: literal out of range for i8 - --> $DIR/type-overflow.rs:20:17 + --> $DIR/type-overflow.rs:21:17 | LL | let fail = -0b1111_1111i8; //~WARNING literal out of range for i8 | ^^^^^^^^^^^^^ help: consider using `i16` instead: `0b1111_1111i16` From 6bfb280189ef6960525f18364c1b4644a913f4ce Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Fri, 1 Feb 2019 19:53:32 +0100 Subject: [PATCH 03/34] deprecate before_exec in favor of unsafe pre_exec --- src/libstd/sys/unix/ext/process.rs | 26 ++++++++++++++++--- src/libstd/sys/unix/process/process_common.rs | 2 +- 2 files changed, 24 insertions(+), 4 deletions(-) diff --git a/src/libstd/sys/unix/ext/process.rs b/src/libstd/sys/unix/ext/process.rs index 0282aaae90923..f5fc26dc9cca6 100644 --- a/src/libstd/sys/unix/ext/process.rs +++ b/src/libstd/sys/unix/ext/process.rs @@ -45,12 +45,32 @@ pub trait CommandExt { /// like `malloc` or acquiring a mutex are not guaranteed to work (due to /// other threads perhaps still running when the `fork` was run). /// + /// This also means that all resources such as file descriptors and + /// memory-mapped regions got duplicated. It is your responsibility to make + /// sure that the closure does not violate library invariants by making + /// invalid use of these duplicates. + /// /// When this closure is run, aspects such as the stdio file descriptors and /// working directory have successfully been changed, so output to these /// locations may not appear where intended. + #[stable(feature = "process_pre_exec", since = "1.34.0")] + unsafe fn pre_exec(&mut self, f: F) -> &mut process::Command + where F: FnMut() -> io::Result<()> + Send + Sync + 'static; + + /// Schedules a closure to be run just before the `exec` function is + /// invoked. + /// + /// This method should be unsafe, so it got deprecated in favor of the + /// unsafe [`pre_exec`]. + /// + /// [`pre_exec`]: #tymethod.pre_exec #[stable(feature = "process_exec", since = "1.15.0")] + #[rustc_deprecated(since = "1.34.0", reason = "should be unsafe, use `pre_exec` instead")] fn before_exec(&mut self, f: F) -> &mut process::Command - where F: FnMut() -> io::Result<()> + Send + Sync + 'static; + where F: FnMut() -> io::Result<()> + Send + Sync + 'static + { + unsafe { self.pre_exec(f) } + } /// Performs all the required setup by this `Command`, followed by calling /// the `execvp` syscall. @@ -97,10 +117,10 @@ impl CommandExt for process::Command { self } - fn before_exec(&mut self, f: F) -> &mut process::Command + unsafe fn pre_exec(&mut self, f: F) -> &mut process::Command where F: FnMut() -> io::Result<()> + Send + Sync + 'static { - self.as_inner_mut().before_exec(Box::new(f)); + self.as_inner_mut().pre_exec(Box::new(f)); self } diff --git a/src/libstd/sys/unix/process/process_common.rs b/src/libstd/sys/unix/process/process_common.rs index 2c55813c5cd39..9975064ca655f 100644 --- a/src/libstd/sys/unix/process/process_common.rs +++ b/src/libstd/sys/unix/process/process_common.rs @@ -149,7 +149,7 @@ impl Command { &mut self.closures } - pub fn before_exec(&mut self, + pub unsafe fn pre_exec(&mut self, f: Box io::Result<()> + Send + Sync>) { self.closures.push(f); } From d48433d920ad27ab57a27f087bcdec79ab36bfdc Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Fri, 1 Feb 2019 19:57:06 +0100 Subject: [PATCH 04/34] also replace before_exec by pre_exec on redox --- src/libstd/sys/redox/ext/process.rs | 28 ++++++++++++++++++++++++---- src/libstd/sys/redox/process.rs | 2 +- src/libstd/sys/unix/ext/process.rs | 2 +- 3 files changed, 26 insertions(+), 6 deletions(-) diff --git a/src/libstd/sys/redox/ext/process.rs b/src/libstd/sys/redox/ext/process.rs index 941fba8755b4a..78917ea91886b 100644 --- a/src/libstd/sys/redox/ext/process.rs +++ b/src/libstd/sys/redox/ext/process.rs @@ -36,7 +36,7 @@ pub trait CommandExt { /// will be called and the spawn operation will immediately return with a /// failure. /// - /// # Notes + /// # Notes and Safety /// /// This closure will be run in the context of the child process after a /// `fork`. This primarily means that any modifications made to memory on @@ -45,12 +45,32 @@ pub trait CommandExt { /// like `malloc` or acquiring a mutex are not guaranteed to work (due to /// other threads perhaps still running when the `fork` was run). /// + /// This also means that all resources such as file descriptors and + /// memory-mapped regions got duplicated. It is your responsibility to make + /// sure that the closure does not violate library invariants by making + /// invalid use of these duplicates. + /// /// When this closure is run, aspects such as the stdio file descriptors and /// working directory have successfully been changed, so output to these /// locations may not appear where intended. + #[stable(feature = "process_pre_exec", since = "1.34.0")] + unsafe fn pre_exec(&mut self, f: F) -> &mut process::Command + where F: FnMut() -> io::Result<()> + Send + Sync + 'static; + + /// Schedules a closure to be run just before the `exec` function is + /// invoked. + /// + /// This method should be unsafe, so it got deprecated in favor of the + /// unsafe [`pre_exec`]. + /// + /// [`pre_exec`]: #tymethod.pre_exec #[stable(feature = "process_exec", since = "1.15.0")] + #[rustc_deprecated(since = "1.34.0", reason = "should be unsafe, use `pre_exec` instead")] fn before_exec(&mut self, f: F) -> &mut process::Command - where F: FnMut() -> io::Result<()> + Send + Sync + 'static; + where F: FnMut() -> io::Result<()> + Send + Sync + 'static + { + unsafe { self.pre_exec(f) } + } /// Performs all the required setup by this `Command`, followed by calling /// the `execvp` syscall. @@ -87,10 +107,10 @@ impl CommandExt for process::Command { self } - fn before_exec(&mut self, f: F) -> &mut process::Command + unsafe fn pre_exec(&mut self, f: F) -> &mut process::Command where F: FnMut() -> io::Result<()> + Send + Sync + 'static { - self.as_inner_mut().before_exec(Box::new(f)); + self.as_inner_mut().pre_exec(Box::new(f)); self } diff --git a/src/libstd/sys/redox/process.rs b/src/libstd/sys/redox/process.rs index 4199ab98cf17e..9b85fa41a0a4f 100644 --- a/src/libstd/sys/redox/process.rs +++ b/src/libstd/sys/redox/process.rs @@ -116,7 +116,7 @@ impl Command { self.gid = Some(id); } - pub fn before_exec(&mut self, + pub unsafe fn pre_exec(&mut self, f: Box io::Result<()> + Send + Sync>) { self.closures.push(f); } diff --git a/src/libstd/sys/unix/ext/process.rs b/src/libstd/sys/unix/ext/process.rs index f5fc26dc9cca6..7cc5e9945938d 100644 --- a/src/libstd/sys/unix/ext/process.rs +++ b/src/libstd/sys/unix/ext/process.rs @@ -36,7 +36,7 @@ pub trait CommandExt { /// will be called and the spawn operation will immediately return with a /// failure. /// - /// # Notes + /// # Notes and Safety /// /// This closure will be run in the context of the child process after a /// `fork`. This primarily means that any modifications made to memory on From b1709d25e12fbffca53c30d05c16854256185900 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Fri, 1 Feb 2019 20:00:08 +0100 Subject: [PATCH 05/34] update test --- ...and-before-exec.rs => command-pre-exec.rs} | 26 +++++++++---------- 1 file changed, 13 insertions(+), 13 deletions(-) rename src/test/run-pass/{command-before-exec.rs => command-pre-exec.rs} (75%) diff --git a/src/test/run-pass/command-before-exec.rs b/src/test/run-pass/command-pre-exec.rs similarity index 75% rename from src/test/run-pass/command-before-exec.rs rename to src/test/run-pass/command-pre-exec.rs index 91d2636b2ae63..bca2b8410fa84 100644 --- a/src/test/run-pass/command-before-exec.rs +++ b/src/test/run-pass/command-pre-exec.rs @@ -29,53 +29,53 @@ fn main() { let me = env::current_exe().unwrap(); - let output = Command::new(&me).arg("test1").before_exec(|| { + let output = unsafe { Command::new(&me).arg("test1").pre_exec(|| { println!("hello"); Ok(()) - }).output().unwrap(); + }).output().unwrap() }; assert!(output.status.success()); assert!(output.stderr.is_empty()); assert_eq!(output.stdout, b"hello\nhello2\n"); - let output = Command::new(&me).arg("test2").before_exec(|| { + let output = unsafe { Command::new(&me).arg("test2").pre_exec(|| { env::set_var("FOO", "BAR"); Ok(()) - }).output().unwrap(); + }).output().unwrap() }; assert!(output.status.success()); assert!(output.stderr.is_empty()); assert!(output.stdout.is_empty()); - let output = Command::new(&me).arg("test3").before_exec(|| { + let output = unsafe { Command::new(&me).arg("test3").pre_exec(|| { env::set_current_dir("/").unwrap(); Ok(()) - }).output().unwrap(); + }).output().unwrap() }; assert!(output.status.success()); assert!(output.stderr.is_empty()); assert!(output.stdout.is_empty()); - let output = Command::new(&me).arg("bad").before_exec(|| { + let output = unsafe { Command::new(&me).arg("bad").pre_exec(|| { Err(Error::from_raw_os_error(102)) - }).output().unwrap_err(); + }).output().unwrap_err() }; assert_eq!(output.raw_os_error(), Some(102)); let pid = unsafe { libc::getpid() }; assert!(pid >= 0); - let output = Command::new(&me).arg("empty").before_exec(move || { - let child = unsafe { libc::getpid() }; + let output = unsafe { Command::new(&me).arg("empty").pre_exec(move || { + let child = libc::getpid(); assert!(child >= 0); assert!(pid != child); Ok(()) - }).output().unwrap(); + }).output().unwrap() }; assert!(output.status.success()); assert!(output.stderr.is_empty()); assert!(output.stdout.is_empty()); let mem = Arc::new(AtomicUsize::new(0)); let mem2 = mem.clone(); - let output = Command::new(&me).arg("empty").before_exec(move || { + let output = unsafe { Command::new(&me).arg("empty").pre_exec(move || { assert_eq!(mem2.fetch_add(1, Ordering::SeqCst), 0); Ok(()) - }).output().unwrap(); + }).output().unwrap() }; assert!(output.status.success()); assert!(output.stderr.is_empty()); assert!(output.stdout.is_empty()); From cbbf8a7ff932b599227b27d34e9b015374f5b37a Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Sat, 2 Feb 2019 11:00:55 +0100 Subject: [PATCH 06/34] deprecate things a bit slower --- src/libstd/sys/redox/ext/process.rs | 2 +- src/libstd/sys/unix/ext/process.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/libstd/sys/redox/ext/process.rs b/src/libstd/sys/redox/ext/process.rs index 78917ea91886b..4e669bbb2d7dc 100644 --- a/src/libstd/sys/redox/ext/process.rs +++ b/src/libstd/sys/redox/ext/process.rs @@ -65,7 +65,7 @@ pub trait CommandExt { /// /// [`pre_exec`]: #tymethod.pre_exec #[stable(feature = "process_exec", since = "1.15.0")] - #[rustc_deprecated(since = "1.34.0", reason = "should be unsafe, use `pre_exec` instead")] + #[rustc_deprecated(since = "1.37.0", reason = "should be unsafe, use `pre_exec` instead")] fn before_exec(&mut self, f: F) -> &mut process::Command where F: FnMut() -> io::Result<()> + Send + Sync + 'static { diff --git a/src/libstd/sys/unix/ext/process.rs b/src/libstd/sys/unix/ext/process.rs index 7cc5e9945938d..da0507c65423d 100644 --- a/src/libstd/sys/unix/ext/process.rs +++ b/src/libstd/sys/unix/ext/process.rs @@ -65,7 +65,7 @@ pub trait CommandExt { /// /// [`pre_exec`]: #tymethod.pre_exec #[stable(feature = "process_exec", since = "1.15.0")] - #[rustc_deprecated(since = "1.34.0", reason = "should be unsafe, use `pre_exec` instead")] + #[rustc_deprecated(since = "1.37.0", reason = "should be unsafe, use `pre_exec` instead")] fn before_exec(&mut self, f: F) -> &mut process::Command where F: FnMut() -> io::Result<()> + Send + Sync + 'static { From 6c67a7625fbd38b4b986981c553dc7eb5a7a4765 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Sat, 2 Feb 2019 11:05:43 +0100 Subject: [PATCH 07/34] pre_exec: expand docs --- src/libstd/sys/redox/ext/process.rs | 7 ++++--- src/libstd/sys/unix/ext/process.rs | 7 ++++--- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/src/libstd/sys/redox/ext/process.rs b/src/libstd/sys/redox/ext/process.rs index 4e669bbb2d7dc..55824dc4c059f 100644 --- a/src/libstd/sys/redox/ext/process.rs +++ b/src/libstd/sys/redox/ext/process.rs @@ -48,7 +48,8 @@ pub trait CommandExt { /// This also means that all resources such as file descriptors and /// memory-mapped regions got duplicated. It is your responsibility to make /// sure that the closure does not violate library invariants by making - /// invalid use of these duplicates. + /// invalid use of these duplicates. Moreover, POSIX demands that you only + /// perform operations that are explicitly documented as async-signal-safe. /// /// When this closure is run, aspects such as the stdio file descriptors and /// working directory have successfully been changed, so output to these @@ -60,8 +61,8 @@ pub trait CommandExt { /// Schedules a closure to be run just before the `exec` function is /// invoked. /// - /// This method should be unsafe, so it got deprecated in favor of the - /// unsafe [`pre_exec`]. + /// This method is stable and usable, but it should be unsafe. To fix + /// that, it got deprecated in favor of the unsafe [`pre_exec`]. /// /// [`pre_exec`]: #tymethod.pre_exec #[stable(feature = "process_exec", since = "1.15.0")] diff --git a/src/libstd/sys/unix/ext/process.rs b/src/libstd/sys/unix/ext/process.rs index da0507c65423d..ac0abc761ffb5 100644 --- a/src/libstd/sys/unix/ext/process.rs +++ b/src/libstd/sys/unix/ext/process.rs @@ -48,7 +48,8 @@ pub trait CommandExt { /// This also means that all resources such as file descriptors and /// memory-mapped regions got duplicated. It is your responsibility to make /// sure that the closure does not violate library invariants by making - /// invalid use of these duplicates. + /// invalid use of these duplicates. Moreover, POSIX demands that you only + /// perform operations that are explicitly documented as async-signal-safe. /// /// When this closure is run, aspects such as the stdio file descriptors and /// working directory have successfully been changed, so output to these @@ -60,8 +61,8 @@ pub trait CommandExt { /// Schedules a closure to be run just before the `exec` function is /// invoked. /// - /// This method should be unsafe, so it got deprecated in favor of the - /// unsafe [`pre_exec`]. + /// This method is stable and usable, but it should be unsafe. To fix + /// that, it got deprecated in favor of the unsafe [`pre_exec`]. /// /// [`pre_exec`]: #tymethod.pre_exec #[stable(feature = "process_exec", since = "1.15.0")] From 59da97d2b2d0e3d4baf70cd6fdf49c31c7def380 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Sat, 2 Feb 2019 11:08:21 +0100 Subject: [PATCH 08/34] rustfmt the test --- src/test/run-pass/command-pre-exec.rs | 92 ++++++++++++++++++--------- 1 file changed, 62 insertions(+), 30 deletions(-) diff --git a/src/test/run-pass/command-pre-exec.rs b/src/test/run-pass/command-pre-exec.rs index bca2b8410fa84..21783fedd39c9 100644 --- a/src/test/run-pass/command-pre-exec.rs +++ b/src/test/run-pass/command-pre-exec.rs @@ -2,7 +2,6 @@ // ignore-windows - this is a unix-specific test // ignore-cloudabi no processes // ignore-emscripten no processes - #![feature(process_exec, rustc_private)] extern crate libc; @@ -11,71 +10,104 @@ use std::env; use std::io::Error; use std::os::unix::process::CommandExt; use std::process::Command; -use std::sync::Arc; use std::sync::atomic::{AtomicUsize, Ordering}; +use std::sync::Arc; fn main() { if let Some(arg) = env::args().nth(1) { match &arg[..] { "test1" => println!("hello2"), "test2" => assert_eq!(env::var("FOO").unwrap(), "BAR"), - "test3" => assert_eq!(env::current_dir().unwrap() - .to_str().unwrap(), "/"), + "test3" => assert_eq!(env::current_dir().unwrap().to_str().unwrap(), "/"), "empty" => {} _ => panic!("unknown argument: {}", arg), } - return + return; } let me = env::current_exe().unwrap(); - let output = unsafe { Command::new(&me).arg("test1").pre_exec(|| { - println!("hello"); - Ok(()) - }).output().unwrap() }; + let output = unsafe { + Command::new(&me) + .arg("test1") + .pre_exec(|| { + println!("hello"); + Ok(()) + }) + .output() + .unwrap() + }; assert!(output.status.success()); assert!(output.stderr.is_empty()); assert_eq!(output.stdout, b"hello\nhello2\n"); - let output = unsafe { Command::new(&me).arg("test2").pre_exec(|| { - env::set_var("FOO", "BAR"); - Ok(()) - }).output().unwrap() }; + let output = unsafe { + Command::new(&me) + .arg("test2") + .pre_exec(|| { + env::set_var("FOO", "BAR"); + Ok(()) + }) + .output() + .unwrap() + }; assert!(output.status.success()); assert!(output.stderr.is_empty()); assert!(output.stdout.is_empty()); - let output = unsafe { Command::new(&me).arg("test3").pre_exec(|| { - env::set_current_dir("/").unwrap(); - Ok(()) - }).output().unwrap() }; + let output = unsafe { + Command::new(&me) + .arg("test3") + .pre_exec(|| { + env::set_current_dir("/").unwrap(); + Ok(()) + }) + .output() + .unwrap() + }; assert!(output.status.success()); assert!(output.stderr.is_empty()); assert!(output.stdout.is_empty()); - let output = unsafe { Command::new(&me).arg("bad").pre_exec(|| { - Err(Error::from_raw_os_error(102)) - }).output().unwrap_err() }; + let output = unsafe { + Command::new(&me) + .arg("bad") + .pre_exec(|| Err(Error::from_raw_os_error(102))) + .output() + .unwrap_err() + }; assert_eq!(output.raw_os_error(), Some(102)); let pid = unsafe { libc::getpid() }; assert!(pid >= 0); - let output = unsafe { Command::new(&me).arg("empty").pre_exec(move || { - let child = libc::getpid(); - assert!(child >= 0); - assert!(pid != child); - Ok(()) - }).output().unwrap() }; + let output = unsafe { + Command::new(&me) + .arg("empty") + .pre_exec(move || { + let child = libc::getpid(); + assert!(child >= 0); + assert!(pid != child); + Ok(()) + }) + .output() + .unwrap() + }; assert!(output.status.success()); assert!(output.stderr.is_empty()); assert!(output.stdout.is_empty()); let mem = Arc::new(AtomicUsize::new(0)); let mem2 = mem.clone(); - let output = unsafe { Command::new(&me).arg("empty").pre_exec(move || { - assert_eq!(mem2.fetch_add(1, Ordering::SeqCst), 0); - Ok(()) - }).output().unwrap() }; + let output = unsafe { + Command::new(&me) + .arg("empty") + .pre_exec(move || { + assert_eq!(mem2.fetch_add(1, Ordering::SeqCst), 0); + Ok(()) + }) + .output() + .unwrap() + }; assert!(output.status.success()); assert!(output.stderr.is_empty()); assert!(output.stdout.is_empty()); From 33ee99b26a499027546f88a7f4eeddd8d4985c39 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Sun, 3 Feb 2019 11:16:37 +0100 Subject: [PATCH 09/34] more formatting --- src/libstd/sys/redox/process.rs | 6 ++++-- src/libstd/sys/unix/process/process_common.rs | 6 ++++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/src/libstd/sys/redox/process.rs b/src/libstd/sys/redox/process.rs index 9b85fa41a0a4f..62eb8cb23ab56 100644 --- a/src/libstd/sys/redox/process.rs +++ b/src/libstd/sys/redox/process.rs @@ -116,8 +116,10 @@ impl Command { self.gid = Some(id); } - pub unsafe fn pre_exec(&mut self, - f: Box io::Result<()> + Send + Sync>) { + pub unsafe fn pre_exec( + &mut self, + f: Box io::Result<()> + Send + Sync>, + ) { self.closures.push(f); } diff --git a/src/libstd/sys/unix/process/process_common.rs b/src/libstd/sys/unix/process/process_common.rs index 9975064ca655f..7fa256e59b2db 100644 --- a/src/libstd/sys/unix/process/process_common.rs +++ b/src/libstd/sys/unix/process/process_common.rs @@ -149,8 +149,10 @@ impl Command { &mut self.closures } - pub unsafe fn pre_exec(&mut self, - f: Box io::Result<()> + Send + Sync>) { + pub unsafe fn pre_exec( + &mut self, + f: Box io::Result<()> + Send + Sync>, + ) { self.closures.push(f); } From e023403da2da17ba7320c53c415b960c93348247 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Sun, 3 Feb 2019 11:17:59 +0100 Subject: [PATCH 10/34] POSIX requires async signal safety for fork in signal handlers, not in general --- src/libstd/sys/redox/ext/process.rs | 3 +-- src/libstd/sys/unix/ext/process.rs | 3 +-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/src/libstd/sys/redox/ext/process.rs b/src/libstd/sys/redox/ext/process.rs index 55824dc4c059f..18aef768b5db1 100644 --- a/src/libstd/sys/redox/ext/process.rs +++ b/src/libstd/sys/redox/ext/process.rs @@ -48,8 +48,7 @@ pub trait CommandExt { /// This also means that all resources such as file descriptors and /// memory-mapped regions got duplicated. It is your responsibility to make /// sure that the closure does not violate library invariants by making - /// invalid use of these duplicates. Moreover, POSIX demands that you only - /// perform operations that are explicitly documented as async-signal-safe. + /// invalid use of these duplicates. /// /// When this closure is run, aspects such as the stdio file descriptors and /// working directory have successfully been changed, so output to these diff --git a/src/libstd/sys/unix/ext/process.rs b/src/libstd/sys/unix/ext/process.rs index ac0abc761ffb5..d869a2013dc27 100644 --- a/src/libstd/sys/unix/ext/process.rs +++ b/src/libstd/sys/unix/ext/process.rs @@ -48,8 +48,7 @@ pub trait CommandExt { /// This also means that all resources such as file descriptors and /// memory-mapped regions got duplicated. It is your responsibility to make /// sure that the closure does not violate library invariants by making - /// invalid use of these duplicates. Moreover, POSIX demands that you only - /// perform operations that are explicitly documented as async-signal-safe. + /// invalid use of these duplicates. /// /// When this closure is run, aspects such as the stdio file descriptors and /// working directory have successfully been changed, so output to these From eb5b09688696562d08ebb35e34ded723e6e44277 Mon Sep 17 00:00:00 2001 From: Matthieu M Date: Sun, 3 Feb 2019 16:58:29 +0100 Subject: [PATCH 11/34] RangeInclusive internal iteration performance improvement. Specialize Iterator::try_fold and DoubleEndedIterator::try_rfold to improve code generation in all internal iteration scenarios. This changes brings the performance of internal iteration with RangeInclusive on par with the performance of iteration with Range: - Single conditional jump in hot loop, - Unrolling and vectorization, - And even Closed Form substitution. Unfortunately, it only applies to internal iteration. Despite various attempts at stream-lining the implementation of next and next_back, LLVM has stubbornly refused to optimize external iteration appropriately, leaving me with a choice between: - The current implementation, for which Closed Form substitution is performed, but which uses 2 conditional jumps in the hot loop when optimization fail. - An implementation using a "is_done" boolean, which uses 1 conditional jump in the hot loop when optimization fail, allowing unrolling and vectorization, but for which Closed Form substitution fails. In the absence of any conclusive evidence as to which usecase matters most, and with no assurance that the lack of Closed Form substitution is not indicative of other optimizations being foiled, there is no way to pick one implementation over the other, and thus I defer to the statu quo as far as next and next_back are concerned. --- src/libcore/iter/range.rs | 51 ++++++++++++++++++++++++++++++++++++--- src/libcore/ops/range.rs | 2 ++ 2 files changed, 50 insertions(+), 3 deletions(-) diff --git a/src/libcore/iter/range.rs b/src/libcore/iter/range.rs index 66c09a0ddd0fb..52b0ccd48d476 100644 --- a/src/libcore/iter/range.rs +++ b/src/libcore/iter/range.rs @@ -1,6 +1,6 @@ use convert::TryFrom; use mem; -use ops::{self, Add, Sub}; +use ops::{self, Add, Sub, Try}; use usize; use super::{FusedIterator, TrustedLen}; @@ -368,11 +368,11 @@ impl Iterator for ops::RangeInclusive { Some(Less) => { self.is_empty = Some(false); self.start = plus_n.add_one(); - return Some(plus_n) + return Some(plus_n); } Some(Equal) => { self.is_empty = Some(true); - return Some(plus_n) + return Some(plus_n); } _ => {} } @@ -382,6 +382,29 @@ impl Iterator for ops::RangeInclusive { None } + #[inline] + fn try_fold(&mut self, init: B, mut f: F) -> R + where + Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try + { + self.compute_is_empty(); + + let mut accum = init; + + while self.start < self.end { + let n = self.start.add_one(); + let n = mem::replace(&mut self.start, n); + accum = f(accum, n)?; + } + + if self.start == self.end { + accum = f(accum, self.start.clone())?; + } + + self.is_empty = Some(true); + Try::from_ok(accum) + } + #[inline] fn last(mut self) -> Option { self.next_back() @@ -415,6 +438,28 @@ impl DoubleEndedIterator for ops::RangeInclusive { self.end.clone() }) } + + #[inline] + fn try_rfold(&mut self, init: B, mut f: F) -> R where + Self: Sized, F: FnMut(B, Self::Item) -> R, R: Try + { + self.compute_is_empty(); + + let mut accum = init; + + while self.start < self.end { + let n = self.end.sub_one(); + let n = mem::replace(&mut self.end, n); + accum = f(accum, n)?; + } + + if self.start == self.end { + accum = f(accum, self.start.clone())?; + } + + self.is_empty = Some(true); + Try::from_ok(accum) + } } #[stable(feature = "fused", since = "1.26.0")] diff --git a/src/libcore/ops/range.rs b/src/libcore/ops/range.rs index 815a4cfeed88e..6776ebdc66edc 100644 --- a/src/libcore/ops/range.rs +++ b/src/libcore/ops/range.rs @@ -334,12 +334,14 @@ pub struct RangeInclusive { trait RangeInclusiveEquality: Sized { fn canonicalized_is_empty(range: &RangeInclusive) -> bool; } + impl RangeInclusiveEquality for T { #[inline] default fn canonicalized_is_empty(range: &RangeInclusive) -> bool { range.is_empty.unwrap_or_default() } } + impl RangeInclusiveEquality for T { #[inline] fn canonicalized_is_empty(range: &RangeInclusive) -> bool { From f753d304c6c5d7f2c10147d783d58db7db4ffc4c Mon Sep 17 00:00:00 2001 From: Igor Sadikov Date: Tue, 5 Feb 2019 15:21:03 -0500 Subject: [PATCH 12/34] Suggest removing parentheses surrounding lifetimes --- src/libsyntax/parse/parser.rs | 17 +++++++++++++++-- .../parser/trait-object-lifetime-parens.stderr | 8 ++++---- 2 files changed, 19 insertions(+), 6 deletions(-) diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index cacdab980facd..1dbe3fb7ea620 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -5371,6 +5371,7 @@ impl<'a> Parser<'a> { if is_bound_start { let lo = self.span; let has_parens = self.eat(&token::OpenDelim(token::Paren)); + let inner_lo = self.span; let question = if self.eat(&token::Question) { Some(self.prev_span) } else { None }; if self.token.is_lifetime() { if let Some(question_span) = question { @@ -5379,9 +5380,21 @@ impl<'a> Parser<'a> { } bounds.push(GenericBound::Outlives(self.expect_lifetime())); if has_parens { + let inner_span = inner_lo.to(self.prev_span); self.expect(&token::CloseDelim(token::Paren))?; - self.span_err(self.prev_span, - "parenthesized lifetime bounds are not supported"); + let mut err = self.struct_span_err( + lo.to(self.prev_span), + "parenthesized lifetime bounds are not supported" + ); + if let Ok(snippet) = self.sess.source_map().span_to_snippet(inner_span) { + err.span_suggestion_short( + lo.to(self.prev_span), + "remove the parentheses", + snippet.to_owned(), + Applicability::MachineApplicable + ); + } + err.emit(); } } else { let lifetime_defs = self.parse_late_bound_lifetime_defs()?; diff --git a/src/test/ui/parser/trait-object-lifetime-parens.stderr b/src/test/ui/parser/trait-object-lifetime-parens.stderr index a9b542ddcc4d3..94ca66aef729b 100644 --- a/src/test/ui/parser/trait-object-lifetime-parens.stderr +++ b/src/test/ui/parser/trait-object-lifetime-parens.stderr @@ -1,14 +1,14 @@ error: parenthesized lifetime bounds are not supported - --> $DIR/trait-object-lifetime-parens.rs:5:24 + --> $DIR/trait-object-lifetime-parens.rs:5:21 | LL | fn f<'a, T: Trait + ('a)>() {} //~ ERROR parenthesized lifetime bounds are not supported - | ^ + | ^^^^ help: remove the parentheses error: parenthesized lifetime bounds are not supported - --> $DIR/trait-object-lifetime-parens.rs:8:27 + --> $DIR/trait-object-lifetime-parens.rs:8:24 | LL | let _: Box; //~ ERROR parenthesized lifetime bounds are not supported - | ^ + | ^^^^ help: remove the parentheses error: expected type, found `'a` --> $DIR/trait-object-lifetime-parens.rs:9:17 From 4fed67f94220351ffa60de1dca078c02a7c15734 Mon Sep 17 00:00:00 2001 From: Matthieu M Date: Sat, 9 Feb 2019 18:42:34 +0100 Subject: [PATCH 13/34] Fix exhaustion of inclusive range try_fold and try_rfold --- src/libcore/iter/range.rs | 14 ++++++++++++-- src/libcore/tests/iter.rs | 24 +++++++++++++++++++++--- 2 files changed, 33 insertions(+), 5 deletions(-) diff --git a/src/libcore/iter/range.rs b/src/libcore/iter/range.rs index 52b0ccd48d476..f0ed88c3dfd85 100644 --- a/src/libcore/iter/range.rs +++ b/src/libcore/iter/range.rs @@ -389,6 +389,10 @@ impl Iterator for ops::RangeInclusive { { self.compute_is_empty(); + if self.is_empty() { + return Try::from_ok(init); + } + let mut accum = init; while self.start < self.end { @@ -397,11 +401,12 @@ impl Iterator for ops::RangeInclusive { accum = f(accum, n)?; } + self.is_empty = Some(true); + if self.start == self.end { accum = f(accum, self.start.clone())?; } - self.is_empty = Some(true); Try::from_ok(accum) } @@ -445,6 +450,10 @@ impl DoubleEndedIterator for ops::RangeInclusive { { self.compute_is_empty(); + if self.is_empty() { + return Try::from_ok(init); + } + let mut accum = init; while self.start < self.end { @@ -453,11 +462,12 @@ impl DoubleEndedIterator for ops::RangeInclusive { accum = f(accum, n)?; } + self.is_empty = Some(true); + if self.start == self.end { accum = f(accum, self.start.clone())?; } - self.is_empty = Some(true); Try::from_ok(accum) } } diff --git a/src/libcore/tests/iter.rs b/src/libcore/tests/iter.rs index cf19851c17b35..89e190e074f1a 100644 --- a/src/libcore/tests/iter.rs +++ b/src/libcore/tests/iter.rs @@ -1738,19 +1738,37 @@ fn test_range_inclusive_folds() { assert_eq!((1..=10).sum::(), 55); assert_eq!((1..=10).rev().sum::(), 55); - let mut it = 40..=50; + let mut it = 44..=50; assert_eq!(it.try_fold(0, i8::checked_add), None); - assert_eq!(it, 44..=50); + assert_eq!(it, 47..=50); + assert_eq!(it.try_fold(0, i8::checked_add), None); + assert_eq!(it, 50..=50); + assert_eq!(it.try_fold(0, i8::checked_add), Some(50)); + assert!(it.is_empty()); + assert_eq!(it.try_fold(0, i8::checked_add), Some(0)); + assert!(it.is_empty()); + + let mut it = 40..=47; + assert_eq!(it.try_rfold(0, i8::checked_add), None); + assert_eq!(it, 40..=44); assert_eq!(it.try_rfold(0, i8::checked_add), None); - assert_eq!(it, 44..=47); + assert_eq!(it, 40..=41); + assert_eq!(it.try_rfold(0, i8::checked_add), Some(81)); + assert!(it.is_empty()); + assert_eq!(it.try_rfold(0, i8::checked_add), Some(0)); + assert!(it.is_empty()); let mut it = 10..=20; assert_eq!(it.try_fold(0, |a,b| Some(a+b)), Some(165)); assert!(it.is_empty()); + assert_eq!(it.try_fold(0, |a,b| Some(a+b)), Some(0)); + assert!(it.is_empty()); let mut it = 10..=20; assert_eq!(it.try_rfold(0, |a,b| Some(a+b)), Some(165)); assert!(it.is_empty()); + assert_eq!(it.try_rfold(0, |a,b| Some(a+b)), Some(0)); + assert!(it.is_empty()); } #[test] From 26bd43355f839f6005910056e89b2364b7fec06a Mon Sep 17 00:00:00 2001 From: Valentin Tolmer Date: Fri, 8 Feb 2019 14:14:15 +0100 Subject: [PATCH 14/34] Move the intrinsics into a submodule --- src/libcore/intrinsics.rs | 464 ++++++++++++++++++++------------------ 1 file changed, 249 insertions(+), 215 deletions(-) diff --git a/src/libcore/intrinsics.rs b/src/libcore/intrinsics.rs index f6de7566be914..e6098b1b24cc0 100644 --- a/src/libcore/intrinsics.rs +++ b/src/libcore/intrinsics.rs @@ -962,221 +962,6 @@ extern "rust-intrinsic" { /// value is not necessarily valid to be used to actually access memory. pub fn arith_offset(dst: *const T, offset: isize) -> *const T; - /// Copies `count * size_of::()` bytes from `src` to `dst`. The source - /// and destination must *not* overlap. - /// - /// For regions of memory which might overlap, use [`copy`] instead. - /// - /// `copy_nonoverlapping` is semantically equivalent to C's [`memcpy`], but - /// with the argument order swapped. - /// - /// [`copy`]: ./fn.copy.html - /// [`memcpy`]: https://en.cppreference.com/w/c/string/byte/memcpy - /// - /// # Safety - /// - /// Behavior is undefined if any of the following conditions are violated: - /// - /// * `src` must be [valid] for reads of `count * size_of::()` bytes. - /// - /// * `dst` must be [valid] for writes of `count * size_of::()` bytes. - /// - /// * Both `src` and `dst` must be properly aligned. - /// - /// * The region of memory beginning at `src` with a size of `count * - /// size_of::()` bytes must *not* overlap with the region of memory - /// beginning at `dst` with the same size. - /// - /// Like [`read`], `copy_nonoverlapping` creates a bitwise copy of `T`, regardless of - /// whether `T` is [`Copy`]. If `T` is not [`Copy`], using *both* the values - /// in the region beginning at `*src` and the region beginning at `*dst` can - /// [violate memory safety][read-ownership]. - /// - /// Note that even if the effectively copied size (`count * size_of::()`) is - /// `0`, the pointers must be non-NULL and properly aligned. - /// - /// [`Copy`]: ../marker/trait.Copy.html - /// [`read`]: ../ptr/fn.read.html - /// [read-ownership]: ../ptr/fn.read.html#ownership-of-the-returned-value - /// [valid]: ../ptr/index.html#safety - /// - /// # Examples - /// - /// Manually implement [`Vec::append`]: - /// - /// ``` - /// use std::ptr; - /// - /// /// Moves all the elements of `src` into `dst`, leaving `src` empty. - /// fn append(dst: &mut Vec, src: &mut Vec) { - /// let src_len = src.len(); - /// let dst_len = dst.len(); - /// - /// // Ensure that `dst` has enough capacity to hold all of `src`. - /// dst.reserve(src_len); - /// - /// unsafe { - /// // The call to offset is always safe because `Vec` will never - /// // allocate more than `isize::MAX` bytes. - /// let dst_ptr = dst.as_mut_ptr().offset(dst_len as isize); - /// let src_ptr = src.as_ptr(); - /// - /// // Truncate `src` without dropping its contents. We do this first, - /// // to avoid problems in case something further down panics. - /// src.set_len(0); - /// - /// // The two regions cannot overlap because mutable references do - /// // not alias, and two different vectors cannot own the same - /// // memory. - /// ptr::copy_nonoverlapping(src_ptr, dst_ptr, src_len); - /// - /// // Notify `dst` that it now holds the contents of `src`. - /// dst.set_len(dst_len + src_len); - /// } - /// } - /// - /// let mut a = vec!['r']; - /// let mut b = vec!['u', 's', 't']; - /// - /// append(&mut a, &mut b); - /// - /// assert_eq!(a, &['r', 'u', 's', 't']); - /// assert!(b.is_empty()); - /// ``` - /// - /// [`Vec::append`]: ../../std/vec/struct.Vec.html#method.append - #[stable(feature = "rust1", since = "1.0.0")] - pub fn copy_nonoverlapping(src: *const T, dst: *mut T, count: usize); - - /// Copies `count * size_of::()` bytes from `src` to `dst`. The source - /// and destination may overlap. - /// - /// If the source and destination will *never* overlap, - /// [`copy_nonoverlapping`] can be used instead. - /// - /// `copy` is semantically equivalent to C's [`memmove`], but with the argument - /// order swapped. Copying takes place as if the bytes were copied from `src` - /// to a temporary array and then copied from the array to `dst`. - /// - /// [`copy_nonoverlapping`]: ./fn.copy_nonoverlapping.html - /// [`memmove`]: https://en.cppreference.com/w/c/string/byte/memmove - /// - /// # Safety - /// - /// Behavior is undefined if any of the following conditions are violated: - /// - /// * `src` must be [valid] for reads of `count * size_of::()` bytes. - /// - /// * `dst` must be [valid] for writes of `count * size_of::()` bytes. - /// - /// * Both `src` and `dst` must be properly aligned. - /// - /// Like [`read`], `copy` creates a bitwise copy of `T`, regardless of - /// whether `T` is [`Copy`]. If `T` is not [`Copy`], using both the values - /// in the region beginning at `*src` and the region beginning at `*dst` can - /// [violate memory safety][read-ownership]. - /// - /// Note that even if the effectively copied size (`count * size_of::()`) is - /// `0`, the pointers must be non-NULL and properly aligned. - /// - /// [`Copy`]: ../marker/trait.Copy.html - /// [`read`]: ../ptr/fn.read.html - /// [read-ownership]: ../ptr/fn.read.html#ownership-of-the-returned-value - /// [valid]: ../ptr/index.html#safety - /// - /// # Examples - /// - /// Efficiently create a Rust vector from an unsafe buffer: - /// - /// ``` - /// use std::ptr; - /// - /// # #[allow(dead_code)] - /// unsafe fn from_buf_raw(ptr: *const T, elts: usize) -> Vec { - /// let mut dst = Vec::with_capacity(elts); - /// dst.set_len(elts); - /// ptr::copy(ptr, dst.as_mut_ptr(), elts); - /// dst - /// } - /// ``` - #[stable(feature = "rust1", since = "1.0.0")] - pub fn copy(src: *const T, dst: *mut T, count: usize); - - /// Sets `count * size_of::()` bytes of memory starting at `dst` to - /// `val`. - /// - /// `write_bytes` is similar to C's [`memset`], but sets `count * - /// size_of::()` bytes to `val`. - /// - /// [`memset`]: https://en.cppreference.com/w/c/string/byte/memset - /// - /// # Safety - /// - /// Behavior is undefined if any of the following conditions are violated: - /// - /// * `dst` must be [valid] for writes of `count * size_of::()` bytes. - /// - /// * `dst` must be properly aligned. - /// - /// Additionally, the caller must ensure that writing `count * - /// size_of::()` bytes to the given region of memory results in a valid - /// value of `T`. Using a region of memory typed as a `T` that contains an - /// invalid value of `T` is undefined behavior. - /// - /// Note that even if the effectively copied size (`count * size_of::()`) is - /// `0`, the pointer must be non-NULL and properly aligned. - /// - /// [valid]: ../ptr/index.html#safety - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use std::ptr; - /// - /// let mut vec = vec![0u32; 4]; - /// unsafe { - /// let vec_ptr = vec.as_mut_ptr(); - /// ptr::write_bytes(vec_ptr, 0xfe, 2); - /// } - /// assert_eq!(vec, [0xfefefefe, 0xfefefefe, 0, 0]); - /// ``` - /// - /// Creating an invalid value: - /// - /// ``` - /// use std::ptr; - /// - /// let mut v = Box::new(0i32); - /// - /// unsafe { - /// // Leaks the previously held value by overwriting the `Box` with - /// // a null pointer. - /// ptr::write_bytes(&mut v as *mut Box, 0, 1); - /// } - /// - /// // At this point, using or dropping `v` results in undefined behavior. - /// // drop(v); // ERROR - /// - /// // Even leaking `v` "uses" it, and hence is undefined behavior. - /// // mem::forget(v); // ERROR - /// - /// // In fact, `v` is invalid according to basic type layout invariants, so *any* - /// // operation touching it is undefined behavior. - /// // let v2 = v; // ERROR - /// - /// unsafe { - /// // Let us instead put in a valid value - /// ptr::write(&mut v as *mut Box, Box::new(42i32)); - /// } - /// - /// // Now the box is fine - /// assert_eq!(*v, 42); - /// ``` - #[stable(feature = "rust1", since = "1.0.0")] - pub fn write_bytes(dst: *mut T, val: u8, count: usize); - /// Equivalent to the appropriate `llvm.memcpy.p0i8.0i8.*` intrinsic, with /// a size of `count` * `size_of::()` and an alignment of /// `min_align_of::()` @@ -1524,3 +1309,252 @@ extern "rust-intrinsic" { /// Probably will never become stable. pub fn nontemporal_store(ptr: *mut T, val: T); } + +mod real_intrinsics { + extern "rust-intrinsic" { + /// Copies `count * size_of::()` bytes from `src` to `dst`. The source + /// and destination must *not* overlap. + /// For the full docs, see the stabilized wrapper [`copy_nonoverlapping`]. + /// + /// [`copy_nonoverlapping`]: ../../std/ptr/fn.copy_nonoverlapping.html + pub fn copy_nonoverlapping(src: *const T, dst: *mut T, count: usize); + + /// Copies `count * size_of::()` bytes from `src` to `dst`. The source + /// and destination may overlap. + /// For the full docs, see the stabilized wrapper [`copy`]. + /// + /// [`copy`]: ../../std/ptr/fn.copy.html + pub fn copy(src: *const T, dst: *mut T, count: usize); + + /// Sets `count * size_of::()` bytes of memory starting at `dst` to + /// `val`. + /// For the full docs, see the stabilized wrapper [`write_bytes`]. + /// + /// [`write_bytes`]: ../../std/ptr/fn.write_bytes.html + pub fn write_bytes(dst: *mut T, val: u8, count: usize); + } +} + +/// Copies `count * size_of::()` bytes from `src` to `dst`. The source +/// and destination must *not* overlap. +/// +/// For regions of memory which might overlap, use [`copy`] instead. +/// +/// `copy_nonoverlapping` is semantically equivalent to C's [`memcpy`], but +/// with the argument order swapped. +/// +/// [`copy`]: ./fn.copy.html +/// [`memcpy`]: https://en.cppreference.com/w/c/string/byte/memcpy +/// +/// # Safety +/// +/// Behavior is undefined if any of the following conditions are violated: +/// +/// * `src` must be [valid] for reads of `count * size_of::()` bytes. +/// +/// * `dst` must be [valid] for writes of `count * size_of::()` bytes. +/// +/// * Both `src` and `dst` must be properly aligned. +/// +/// * The region of memory beginning at `src` with a size of `count * +/// size_of::()` bytes must *not* overlap with the region of memory +/// beginning at `dst` with the same size. +/// +/// Like [`read`], `copy_nonoverlapping` creates a bitwise copy of `T`, regardless of +/// whether `T` is [`Copy`]. If `T` is not [`Copy`], using *both* the values +/// in the region beginning at `*src` and the region beginning at `*dst` can +/// [violate memory safety][read-ownership]. +/// +/// Note that even if the effectively copied size (`count * size_of::()`) is +/// `0`, the pointers must be non-NULL and properly aligned. +/// +/// [`Copy`]: ../marker/trait.Copy.html +/// [`read`]: ../ptr/fn.read.html +/// [read-ownership]: ../ptr/fn.read.html#ownership-of-the-returned-value +/// [valid]: ../ptr/index.html#safety +/// +/// # Examples +/// +/// Manually implement [`Vec::append`]: +/// +/// ``` +/// use std::ptr; +/// +/// /// Moves all the elements of `src` into `dst`, leaving `src` empty. +/// fn append(dst: &mut Vec, src: &mut Vec) { +/// let src_len = src.len(); +/// let dst_len = dst.len(); +/// +/// // Ensure that `dst` has enough capacity to hold all of `src`. +/// dst.reserve(src_len); +/// +/// unsafe { +/// // The call to offset is always safe because `Vec` will never +/// // allocate more than `isize::MAX` bytes. +/// let dst_ptr = dst.as_mut_ptr().offset(dst_len as isize); +/// let src_ptr = src.as_ptr(); +/// +/// // Truncate `src` without dropping its contents. We do this first, +/// // to avoid problems in case something further down panics. +/// src.set_len(0); +/// +/// // The two regions cannot overlap because mutable references do +/// // not alias, and two different vectors cannot own the same +/// // memory. +/// ptr::copy_nonoverlapping(src_ptr, dst_ptr, src_len); +/// +/// // Notify `dst` that it now holds the contents of `src`. +/// dst.set_len(dst_len + src_len); +/// } +/// } +/// +/// let mut a = vec!['r']; +/// let mut b = vec!['u', 's', 't']; +/// +/// append(&mut a, &mut b); +/// +/// assert_eq!(a, &['r', 'u', 's', 't']); +/// assert!(b.is_empty()); +/// ``` +/// +/// [`Vec::append`]: ../../std/vec/struct.Vec.html#method.append +#[stable(feature = "rust1", since = "1.0.0")] +#[inline] +pub unsafe fn copy_nonoverlapping(src: *const T, dst: *mut T, count: usize) { + real_intrinsics::copy_nonoverlapping(src, dst, count); +} + +/// Copies `count * size_of::()` bytes from `src` to `dst`. The source +/// and destination may overlap. +/// +/// If the source and destination will *never* overlap, +/// [`copy_nonoverlapping`] can be used instead. +/// +/// `copy` is semantically equivalent to C's [`memmove`], but with the argument +/// order swapped. Copying takes place as if the bytes were copied from `src` +/// to a temporary array and then copied from the array to `dst`. +/// +/// [`copy_nonoverlapping`]: ./fn.copy_nonoverlapping.html +/// [`memmove`]: https://en.cppreference.com/w/c/string/byte/memmove +/// +/// # Safety +/// +/// Behavior is undefined if any of the following conditions are violated: +/// +/// * `src` must be [valid] for reads of `count * size_of::()` bytes. +/// +/// * `dst` must be [valid] for writes of `count * size_of::()` bytes. +/// +/// * Both `src` and `dst` must be properly aligned. +/// +/// Like [`read`], `copy` creates a bitwise copy of `T`, regardless of +/// whether `T` is [`Copy`]. If `T` is not [`Copy`], using both the values +/// in the region beginning at `*src` and the region beginning at `*dst` can +/// [violate memory safety][read-ownership]. +/// +/// Note that even if the effectively copied size (`count * size_of::()`) is +/// `0`, the pointers must be non-NULL and properly aligned. +/// +/// [`Copy`]: ../marker/trait.Copy.html +/// [`read`]: ../ptr/fn.read.html +/// [read-ownership]: ../ptr/fn.read.html#ownership-of-the-returned-value +/// [valid]: ../ptr/index.html#safety +/// +/// # Examples +/// +/// Efficiently create a Rust vector from an unsafe buffer: +/// +/// ``` +/// use std::ptr; +/// +/// # #[allow(dead_code)] +/// unsafe fn from_buf_raw(ptr: *const T, elts: usize) -> Vec { +/// let mut dst = Vec::with_capacity(elts); +/// dst.set_len(elts); +/// ptr::copy(ptr, dst.as_mut_ptr(), elts); +/// dst +/// } +/// ``` +#[stable(feature = "rust1", since = "1.0.0")] +#[inline] +pub unsafe fn copy(src: *const T, dst: *mut T, count: usize) { + real_intrinsics::copy(src, dst, count) +} + +/// Sets `count * size_of::()` bytes of memory starting at `dst` to +/// `val`. +/// +/// `write_bytes` is similar to C's [`memset`], but sets `count * +/// size_of::()` bytes to `val`. +/// +/// [`memset`]: https://en.cppreference.com/w/c/string/byte/memset +/// +/// # Safety +/// +/// Behavior is undefined if any of the following conditions are violated: +/// +/// * `dst` must be [valid] for writes of `count * size_of::()` bytes. +/// +/// * `dst` must be properly aligned. +/// +/// Additionally, the caller must ensure that writing `count * +/// size_of::()` bytes to the given region of memory results in a valid +/// value of `T`. Using a region of memory typed as a `T` that contains an +/// invalid value of `T` is undefined behavior. +/// +/// Note that even if the effectively copied size (`count * size_of::()`) is +/// `0`, the pointer must be non-NULL and properly aligned. +/// +/// [valid]: ../ptr/index.html#safety +/// +/// # Examples +/// +/// Basic usage: +/// +/// ``` +/// use std::ptr; +/// +/// let mut vec = vec![0u32; 4]; +/// unsafe { +/// let vec_ptr = vec.as_mut_ptr(); +/// ptr::write_bytes(vec_ptr, 0xfe, 2); +/// } +/// assert_eq!(vec, [0xfefefefe, 0xfefefefe, 0, 0]); +/// ``` +/// +/// Creating an invalid value: +/// +/// ``` +/// use std::ptr; +/// +/// let mut v = Box::new(0i32); +/// +/// unsafe { +/// // Leaks the previously held value by overwriting the `Box` with +/// // a null pointer. +/// ptr::write_bytes(&mut v as *mut Box, 0, 1); +/// } +/// +/// // At this point, using or dropping `v` results in undefined behavior. +/// // drop(v); // ERROR +/// +/// // Even leaking `v` "uses" it, and hence is undefined behavior. +/// // mem::forget(v); // ERROR +/// +/// // In fact, `v` is invalid according to basic type layout invariants, so *any* +/// // operation touching it is undefined behavior. +/// // let v2 = v; // ERROR +/// +/// unsafe { +/// // Let us instead put in a valid value +/// ptr::write(&mut v as *mut Box, Box::new(42i32)); +/// } +/// +/// // Now the box is fine +/// assert_eq!(*v, 42); +/// ``` +#[stable(feature = "rust1", since = "1.0.0")] +#[inline] +pub unsafe fn write_bytes(dst: *mut T, val: u8, count: usize) { + real_intrinsics::write_bytes(dst, val, count) +} From 283ffcfce71df51f3d1da4d63441de150938cacd Mon Sep 17 00:00:00 2001 From: Matthew Jasper Date: Mon, 11 Feb 2019 21:09:47 +0000 Subject: [PATCH 15/34] Check the self-type of inherent associated constants --- src/librustc_typeck/check/mod.rs | 26 ++++++++++++------- .../user-annotations/dump-adt-brace-struct.rs | 4 ++- .../dump-adt-brace-struct.stderr | 8 +++--- .../ui/nll/user-annotations/dump-fn-method.rs | 19 ++++++++++---- .../user-annotations/dump-fn-method.stderr | 20 +++++++------- .../inherent-associated-constants.rs | 17 ++++++++++++ .../inherent-associated-constants.stderr | 10 +++++++ 7 files changed, 74 insertions(+), 30 deletions(-) create mode 100644 src/test/ui/nll/user-annotations/inherent-associated-constants.rs create mode 100644 src/test/ui/nll/user-annotations/inherent-associated-constants.stderr diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs index fb8f608812197..8d027b07188cc 100644 --- a/src/librustc_typeck/check/mod.rs +++ b/src/librustc_typeck/check/mod.rs @@ -2236,7 +2236,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { hir_id, def_id, substs, user_self_ty, self.tag(), ); - if !substs.is_noop() { + if Self::can_contain_user_lifetime_bounds((substs, user_self_ty)) { let canonicalized = self.infcx.canonicalize_user_type_annotation( &UserType::TypeOf(def_id, UserSubsts { substs, @@ -2431,15 +2431,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { let ty = self.to_ty(ast_ty); debug!("to_ty_saving_user_provided_ty: ty={:?}", ty); - // If the type given by the user has free regions, save it for - // later, since NLL would like to enforce those. Also pass in - // types that involve projections, since those can resolve to - // `'static` bounds (modulo #54940, which hopefully will be - // fixed by the time you see this comment, dear reader, - // although I have my doubts). Also pass in types with inference - // types, because they may be repeated. Other sorts of things - // are already sufficiently enforced with erased regions. =) - if ty.has_free_regions() || ty.has_projections() || ty.has_infer_types() { + if Self::can_contain_user_lifetime_bounds(ty) { let c_ty = self.infcx.canonicalize_response(&UserType::Ty(ty)); debug!("to_ty_saving_user_provided_ty: c_ty={:?}", c_ty); self.tables.borrow_mut().user_provided_types_mut().insert(ast_ty.hir_id, c_ty); @@ -2448,6 +2440,20 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { ty } + // If the type given by the user has free regions, save it for later, since + // NLL would like to enforce those. Also pass in types that involve + // projections, since those can resolve to `'static` bounds (modulo #54940, + // which hopefully will be fixed by the time you see this comment, dear + // reader, although I have my doubts). Also pass in types with inference + // types, because they may be repeated. Other sorts of things are already + // sufficiently enforced with erased regions. =) + fn can_contain_user_lifetime_bounds(t: T) -> bool + where + T: TypeFoldable<'tcx> + { + t.has_free_regions() || t.has_projections() || t.has_infer_types() + } + pub fn node_ty(&self, id: hir::HirId) -> Ty<'tcx> { match self.tables.borrow().node_types().get(id) { Some(&t) => t, diff --git a/src/test/ui/nll/user-annotations/dump-adt-brace-struct.rs b/src/test/ui/nll/user-annotations/dump-adt-brace-struct.rs index 5dcd41078c787..45f56836d18b5 100644 --- a/src/test/ui/nll/user-annotations/dump-adt-brace-struct.rs +++ b/src/test/ui/nll/user-annotations/dump-adt-brace-struct.rs @@ -15,5 +15,7 @@ fn main() { SomeStruct::<_> { t: 22 }; // Nothing interesting given, no annotation. - SomeStruct:: { t: 22 }; //~ ERROR [u32] + SomeStruct:: { t: 22 }; // No lifetime bounds given. + + SomeStruct::<&'static u32> { t: &22 }; //~ ERROR [&ReStatic u32] } diff --git a/src/test/ui/nll/user-annotations/dump-adt-brace-struct.stderr b/src/test/ui/nll/user-annotations/dump-adt-brace-struct.stderr index 123c26195d006..6e24da094e0d2 100644 --- a/src/test/ui/nll/user-annotations/dump-adt-brace-struct.stderr +++ b/src/test/ui/nll/user-annotations/dump-adt-brace-struct.stderr @@ -1,8 +1,8 @@ -error: user substs: UserSubsts { substs: [u32], user_self_ty: None } - --> $DIR/dump-adt-brace-struct.rs:18:5 +error: user substs: UserSubsts { substs: [&ReStatic u32], user_self_ty: None } + --> $DIR/dump-adt-brace-struct.rs:20:5 | -LL | SomeStruct:: { t: 22 }; //~ ERROR [u32] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +LL | SomeStruct::<&'static u32> { t: &22 }; //~ ERROR [&ReStatic u32] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: aborting due to previous error diff --git a/src/test/ui/nll/user-annotations/dump-fn-method.rs b/src/test/ui/nll/user-annotations/dump-fn-method.rs index 7551a9474dc08..b689f18c22593 100644 --- a/src/test/ui/nll/user-annotations/dump-fn-method.rs +++ b/src/test/ui/nll/user-annotations/dump-fn-method.rs @@ -11,7 +11,7 @@ trait Bazoom { fn method(&self, arg: T, arg2: U) { } } -impl Bazoom for T { +impl Bazoom for S { } fn foo<'a, T>(_: T) { } @@ -22,20 +22,29 @@ fn main() { let x = foo; x(22); - // Here: `u32` is given. - let x = foo::; //~ ERROR [u32] + // Here: `u32` is given, which doesn't contain any lifetimes, so we don't + // have any annotation. + let x = foo::; x(22); + let x = foo::<&'static u32>; //~ ERROR [&ReStatic u32] + x(&22); + // Here: we only want the `T` to be given, the rest should be variables. // // (`T` refers to the declaration of `Bazoom`) let x = <_ as Bazoom>::method::<_>; //~ ERROR [^0, u32, ^1] x(&22, 44, 66); - // Here: all are given - let x = >::method::; //~ ERROR [u8, u16, u32] + // Here: all are given and definitely contain no lifetimes, so we + // don't have any annotation. + let x = >::method::; x(&22, 44, 66); + // Here: all are given and we have a lifetime. + let x = >::method::; //~ ERROR [u8, &ReStatic u16, u32] + x(&22, &44, 66); + // Here: we want in particular that *only* the method `U` // annotation is given, the rest are variables. // diff --git a/src/test/ui/nll/user-annotations/dump-fn-method.stderr b/src/test/ui/nll/user-annotations/dump-fn-method.stderr index a1a4e43e8a3e9..04ceb8e5f8495 100644 --- a/src/test/ui/nll/user-annotations/dump-fn-method.stderr +++ b/src/test/ui/nll/user-annotations/dump-fn-method.stderr @@ -1,23 +1,23 @@ -error: user substs: UserSubsts { substs: [u32], user_self_ty: None } - --> $DIR/dump-fn-method.rs:26:13 +error: user substs: UserSubsts { substs: [&ReStatic u32], user_self_ty: None } + --> $DIR/dump-fn-method.rs:30:13 | -LL | let x = foo::; //~ ERROR [u32] - | ^^^^^^^^^^ +LL | let x = foo::<&'static u32>; //~ ERROR [&ReStatic u32] + | ^^^^^^^^^^^^^^^^^^^ error: user substs: UserSubsts { substs: [^0, u32, ^1], user_self_ty: None } - --> $DIR/dump-fn-method.rs:32:13 + --> $DIR/dump-fn-method.rs:36:13 | LL | let x = <_ as Bazoom>::method::<_>; //~ ERROR [^0, u32, ^1] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -error: user substs: UserSubsts { substs: [u8, u16, u32], user_self_ty: None } - --> $DIR/dump-fn-method.rs:36:13 +error: user substs: UserSubsts { substs: [u8, &ReStatic u16, u32], user_self_ty: None } + --> $DIR/dump-fn-method.rs:45:13 | -LL | let x = >::method::; //~ ERROR [u8, u16, u32] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +LL | let x = >::method::; //~ ERROR [u8, &ReStatic u16, u32] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: user substs: UserSubsts { substs: [^0, ^1, u32], user_self_ty: None } - --> $DIR/dump-fn-method.rs:44:5 + --> $DIR/dump-fn-method.rs:53:5 | LL | y.method::(44, 66); //~ ERROR [^0, ^1, u32] | ^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/src/test/ui/nll/user-annotations/inherent-associated-constants.rs b/src/test/ui/nll/user-annotations/inherent-associated-constants.rs new file mode 100644 index 0000000000000..2490187605ac1 --- /dev/null +++ b/src/test/ui/nll/user-annotations/inherent-associated-constants.rs @@ -0,0 +1,17 @@ +#![feature(nll)] + +struct A<'a>(&'a ()); + +impl A<'static> { + const IC: i32 = 10; +} + +fn non_wf_associated_const<'a>(x: i32) { + A::<'a>::IC; //~ ERROR lifetime may not live long enough +} + +fn wf_associated_const<'a>(x: i32) { + A::<'static>::IC; +} + +fn main() {} diff --git a/src/test/ui/nll/user-annotations/inherent-associated-constants.stderr b/src/test/ui/nll/user-annotations/inherent-associated-constants.stderr new file mode 100644 index 0000000000000..785b39ec887a0 --- /dev/null +++ b/src/test/ui/nll/user-annotations/inherent-associated-constants.stderr @@ -0,0 +1,10 @@ +error: lifetime may not live long enough + --> $DIR/inherent-associated-constants.rs:10:5 + | +LL | fn non_wf_associated_const<'a>(x: i32) { + | -- lifetime `'a` defined here +LL | A::<'a>::IC; //~ ERROR lifetime may not live long enough + | ^^^^^^^^^^^ requires that `'a` must outlive `'static` + +error: aborting due to previous error + From 235a6b7d065a2fc55ceee323e85b9309b16e84bf Mon Sep 17 00:00:00 2001 From: Oliver Scherer Date: Sat, 16 Feb 2019 12:32:22 +0100 Subject: [PATCH 16/34] Expose const -> op functions that don't allow violiting const eval invariants --- src/librustc_mir/const_eval.rs | 4 +- src/librustc_mir/interpret/operand.rs | 77 +++++++++++------------- src/librustc_mir/transform/const_prop.rs | 2 +- 3 files changed, 37 insertions(+), 46 deletions(-) diff --git a/src/librustc_mir/const_eval.rs b/src/librustc_mir/const_eval.rs index 7be7f4b439289..a4b2d6d36878d 100644 --- a/src/librustc_mir/const_eval.rs +++ b/src/librustc_mir/const_eval.rs @@ -476,7 +476,7 @@ pub fn const_field<'a, 'tcx>( let ecx = mk_eval_cx(tcx, DUMMY_SP, param_env); let result = (|| { // get the operand again - let op = ecx.lazy_const_to_op(ty::LazyConst::Evaluated(value), value.ty)?; + let op = ecx.const_to_op(value, None)?; // downcast let down = match variant { None => op, @@ -502,7 +502,7 @@ pub fn const_variant_index<'a, 'tcx>( ) -> EvalResult<'tcx, VariantIdx> { trace!("const_variant_index: {:?}", val); let ecx = mk_eval_cx(tcx, DUMMY_SP, param_env); - let op = ecx.lazy_const_to_op(ty::LazyConst::Evaluated(val), val.ty)?; + let op = ecx.const_to_op(val, None)?; Ok(ecx.read_discriminant(op)?.1) } diff --git a/src/librustc_mir/interpret/operand.rs b/src/librustc_mir/interpret/operand.rs index 7da907028eebf..4f34ffc128e69 100644 --- a/src/librustc_mir/interpret/operand.rs +++ b/src/librustc_mir/interpret/operand.rs @@ -12,7 +12,7 @@ use rustc::mir::interpret::{ EvalResult, EvalErrorKind, }; use super::{ - EvalContext, Machine, AllocMap, Allocation, AllocationExtra, + EvalContext, Machine, MemPlace, MPlaceTy, PlaceTy, Place, MemoryKind, }; pub use rustc::mir::interpret::ScalarMaybeUndef; @@ -545,14 +545,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> Move(ref place) => self.eval_place_to_op(place, layout)?, - Constant(ref constant) => { - let layout = from_known_layout(layout, || { - let ty = self.monomorphize(mir_op.ty(self.mir(), *self.tcx))?; - self.layout_of(ty) - })?; - let op = self.const_value_to_op(*constant.literal)?; - OpTy { op, layout } - } + Constant(ref constant) => self.lazy_const_to_op(*constant.literal, layout)?, }; trace!("{:?}: {:?}", mir_op, *op); Ok(op) @@ -568,38 +561,55 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> .collect() } - // Used when Miri runs into a constant, and (indirectly through lazy_const_to_op) by CTFE. - fn const_value_to_op( + // Used when Miri runs into a constant, and by CTFE. + pub fn lazy_const_to_op( &self, val: ty::LazyConst<'tcx>, - ) -> EvalResult<'tcx, Operand> { - trace!("const_value_to_op: {:?}", val); - let val = match val { + layout: Option>, + ) -> EvalResult<'tcx, OpTy<'tcx, M::PointerTag>> { + trace!("const_to_op: {:?}", val); + match val { ty::LazyConst::Unevaluated(def_id, substs) => { let instance = self.resolve(def_id, substs)?; - return Ok(*OpTy::from(self.const_eval_raw(GlobalId { + return Ok(OpTy::from(self.const_eval_raw(GlobalId { instance, promoted: None, })?)); }, - ty::LazyConst::Evaluated(c) => c, - }; - match val.val { + ty::LazyConst::Evaluated(c) => self.const_to_op(c, layout), + } + } + + // Used when Miri runs into a constant, and (indirectly through lazy_const_to_op) by CTFE. + pub fn const_to_op( + &self, + val: ty::Const<'tcx>, + layout: Option>, + ) -> EvalResult<'tcx, OpTy<'tcx, M::PointerTag>> { + let layout = from_known_layout(layout, || { + let ty = self.monomorphize(val.ty)?; + self.layout_of(ty) + })?; + let op = match val.val { ConstValue::ByRef(id, alloc, offset) => { // We rely on mutability being set correctly in that allocation to prevent writes // where none should happen -- and for `static mut`, we copy on demand anyway. - Ok(Operand::Indirect( + Operand::Indirect( MemPlace::from_ptr(Pointer::new(id, offset), alloc.align) - ).with_default_tag()) + ).with_default_tag() }, ConstValue::Slice(a, b) => - Ok(Operand::Immediate(Immediate::ScalarPair( + Operand::Immediate(Immediate::ScalarPair( a.into(), Scalar::from_uint(b, self.tcx.data_layout.pointer_size).into(), - )).with_default_tag()), + )).with_default_tag(), ConstValue::Scalar(x) => - Ok(Operand::Immediate(Immediate::Scalar(x.into())).with_default_tag()), - } + Operand::Immediate(Immediate::Scalar(x.into())).with_default_tag(), + }; + Ok(OpTy { + op, + layout, + }) } /// Read discriminant, return the runtime value as well as the variant index. @@ -699,23 +709,4 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> } }) } - -} - -impl<'a, 'mir, 'tcx, M> EvalContext<'a, 'mir, 'tcx, M> -where - M: Machine<'a, 'mir, 'tcx, PointerTag=()>, - // FIXME: Working around https://github.com/rust-lang/rust/issues/24159 - M::MemoryMap: AllocMap, Allocation<(), M::AllocExtra>)>, - M::AllocExtra: AllocationExtra<(), M::MemoryExtra>, -{ - // FIXME: CTFE should use allocations, then we can remove this. - pub(crate) fn lazy_const_to_op( - &self, - cnst: ty::LazyConst<'tcx>, - ty: ty::Ty<'tcx>, - ) -> EvalResult<'tcx, OpTy<'tcx>> { - let op = self.const_value_to_op(cnst)?; - Ok(OpTy { op, layout: self.layout_of(ty)? }) - } } diff --git a/src/librustc_mir/transform/const_prop.rs b/src/librustc_mir/transform/const_prop.rs index 7da00c4ea0c36..d69a5130b24d0 100644 --- a/src/librustc_mir/transform/const_prop.rs +++ b/src/librustc_mir/transform/const_prop.rs @@ -253,7 +253,7 @@ impl<'a, 'mir, 'tcx> ConstPropagator<'a, 'mir, 'tcx> { source_info: SourceInfo, ) -> Option> { self.ecx.tcx.span = source_info.span; - match self.ecx.lazy_const_to_op(*c.literal, c.ty) { + match self.ecx.lazy_const_to_op(*c.literal, None) { Ok(op) => { Some((op, c.span)) }, From bd18cc5708328600a94a02444caf27a5fb6ca20c Mon Sep 17 00:00:00 2001 From: Oliver Scherer Date: Sat, 16 Feb 2019 12:36:23 +0100 Subject: [PATCH 17/34] Remove an intermediate value from discriminant reading --- src/librustc_mir/interpret/operand.rs | 2 +- src/librustc_mir/interpret/step.rs | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/librustc_mir/interpret/operand.rs b/src/librustc_mir/interpret/operand.rs index 4f34ffc128e69..a638c008e760f 100644 --- a/src/librustc_mir/interpret/operand.rs +++ b/src/librustc_mir/interpret/operand.rs @@ -508,7 +508,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> // Evaluate a place with the goal of reading from it. This lets us sometimes // avoid allocations. - fn eval_place_to_op( + pub(super) fn eval_place_to_op( &self, mir_place: &mir::Place<'tcx>, layout: Option>, diff --git a/src/librustc_mir/interpret/step.rs b/src/librustc_mir/interpret/step.rs index 97ef2b5fa3485..656c13c16d9ed 100644 --- a/src/librustc_mir/interpret/step.rs +++ b/src/librustc_mir/interpret/step.rs @@ -266,8 +266,8 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> } Discriminant(ref place) => { - let place = self.eval_place(place)?; - let discr_val = self.read_discriminant(self.place_to_op(place)?)?.0; + let op = self.eval_place_to_op(place, None)?; + let discr_val = self.read_discriminant(op)?.0; let size = dest.layout.size; self.write_scalar(Scalar::from_uint(discr_val, size), dest)?; } From b2bf37aec075099a8b58c1a52ebca944f318d530 Mon Sep 17 00:00:00 2001 From: Oliver Scherer Date: Sat, 16 Feb 2019 13:27:54 +0100 Subject: [PATCH 18/34] Burn some invariants we keep up into code --- src/librustc_mir/const_eval.rs | 2 +- src/librustc_mir/interpret/operand.rs | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/librustc_mir/const_eval.rs b/src/librustc_mir/const_eval.rs index a4b2d6d36878d..9eac125d7a434 100644 --- a/src/librustc_mir/const_eval.rs +++ b/src/librustc_mir/const_eval.rs @@ -76,7 +76,7 @@ pub fn op_to_const<'tcx>( _ => false, }; let normalized_op = if normalize { - ecx.try_read_immediate(op)? + Ok(*ecx.read_immediate(op).expect("normalization works on validated constants")) } else { match *op { Operand::Indirect(mplace) => Err(mplace), diff --git a/src/librustc_mir/interpret/operand.rs b/src/librustc_mir/interpret/operand.rs index a638c008e760f..0394ad2b0a65f 100644 --- a/src/librustc_mir/interpret/operand.rs +++ b/src/librustc_mir/interpret/operand.rs @@ -269,7 +269,7 @@ pub(super) fn from_known_layout<'tcx>( impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> { /// Try reading an immediate in memory; this is interesting particularly for ScalarPair. /// Returns `None` if the layout does not permit loading this as a value. - pub(super) fn try_read_immediate_from_mplace( + fn try_read_immediate_from_mplace( &self, mplace: MPlaceTy<'tcx, M::PointerTag>, ) -> EvalResult<'tcx, Option>> { @@ -323,7 +323,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> /// Note that for a given layout, this operation will either always fail or always /// succeed! Whether it succeeds depends on whether the layout can be represented /// in a `Immediate`, not on which data is stored there currently. - pub(crate) fn try_read_immediate( + pub(super) fn try_read_immediate( &self, src: OpTy<'tcx, M::PointerTag>, ) -> EvalResult<'tcx, Result, MemPlace>> { From f7c493121d4989895dd9c213ed4e877429229b86 Mon Sep 17 00:00:00 2001 From: Oliver Scherer Date: Sat, 16 Feb 2019 14:29:27 +0100 Subject: [PATCH 19/34] Reuse the `Pointer` type instead of passing reassembling it at many use sites --- src/librustc/ich/impls_ty.rs | 2 +- src/librustc/mir/interpret/value.rs | 5 ++--- src/librustc/ty/structural_impls.rs | 4 ++-- src/librustc_codegen_llvm/consts.rs | 2 +- src/librustc_codegen_ssa/mir/operand.rs | 4 ++-- src/librustc_codegen_ssa/mir/place.rs | 4 ++-- src/librustc_mir/const_eval.rs | 2 +- src/librustc_mir/hair/pattern/_match.rs | 12 +++++------- src/librustc_mir/interpret/operand.rs | 4 ++-- src/librustc_mir/monomorphize/collector.rs | 2 +- src/librustc_typeck/check/mod.rs | 2 +- 11 files changed, 20 insertions(+), 23 deletions(-) diff --git a/src/librustc/ich/impls_ty.rs b/src/librustc/ich/impls_ty.rs index 6f04a68a6ed61..04e03d0d95aa2 100644 --- a/src/librustc/ich/impls_ty.rs +++ b/src/librustc/ich/impls_ty.rs @@ -307,7 +307,7 @@ impl_stable_hash_for!( impl<'tcx> for enum mir::interpret::ConstValue<'tcx> [ mir::interpret::ConstValue ] { Scalar(val), Slice(a, b), - ByRef(id, alloc, offset), + ByRef(ptr, alloc), } ); impl_stable_hash_for!(struct crate::mir::interpret::RawConst<'tcx> { diff --git a/src/librustc/mir/interpret/value.rs b/src/librustc/mir/interpret/value.rs index 5ec7de4308a13..1e5ba2c176bd2 100644 --- a/src/librustc/mir/interpret/value.rs +++ b/src/librustc/mir/interpret/value.rs @@ -31,9 +31,8 @@ pub enum ConstValue<'tcx> { /// it. Slice(Scalar, u64), - /// An allocation together with an offset into the allocation. - /// Invariant: the `AllocId` matches the allocation. - ByRef(AllocId, &'tcx Allocation, Size), + /// An allocation together with a pointer into the allocation. + ByRef(Pointer, &'tcx Allocation), } #[cfg(target_arch = "x86_64")] diff --git a/src/librustc/ty/structural_impls.rs b/src/librustc/ty/structural_impls.rs index d09cfa84a1690..0d1dc4ee2032f 100644 --- a/src/librustc/ty/structural_impls.rs +++ b/src/librustc/ty/structural_impls.rs @@ -499,8 +499,8 @@ impl<'a, 'tcx> Lift<'tcx> for ConstValue<'a> { match *self { ConstValue::Scalar(x) => Some(ConstValue::Scalar(x)), ConstValue::Slice(x, y) => Some(ConstValue::Slice(x, y)), - ConstValue::ByRef(x, alloc, z) => Some(ConstValue::ByRef( - x, alloc.lift_to_tcx(tcx)?, z, + ConstValue::ByRef(ptr, alloc) => Some(ConstValue::ByRef( + ptr, alloc.lift_to_tcx(tcx)?, )), } } diff --git a/src/librustc_codegen_llvm/consts.rs b/src/librustc_codegen_llvm/consts.rs index ca9e2c87be237..26d3bd9124707 100644 --- a/src/librustc_codegen_llvm/consts.rs +++ b/src/librustc_codegen_llvm/consts.rs @@ -71,7 +71,7 @@ pub fn codegen_static_initializer( let static_ = cx.tcx.const_eval(param_env.and(cid))?; let alloc = match static_.val { - ConstValue::ByRef(_, alloc, n) if n.bytes() == 0 => alloc, + ConstValue::ByRef(ptr, alloc) if ptr.offset.bytes() == 0 => alloc, _ => bug!("static const eval returned {:#?}", static_), }; Ok((const_alloc_to_llvm(cx, alloc), alloc)) diff --git a/src/librustc_codegen_ssa/mir/operand.rs b/src/librustc_codegen_ssa/mir/operand.rs index 2c6d968bb032a..3cac1befaf4ef 100644 --- a/src/librustc_codegen_ssa/mir/operand.rs +++ b/src/librustc_codegen_ssa/mir/operand.rs @@ -101,8 +101,8 @@ impl<'a, 'tcx: 'a, V: CodegenObject> OperandRef<'tcx, V> { let b_llval = bx.cx().const_usize(b); OperandValue::Pair(a_llval, b_llval) }, - ConstValue::ByRef(_, alloc, offset) => { - return Ok(bx.load_operand(bx.cx().from_const_alloc(layout, alloc, offset))); + ConstValue::ByRef(ptr, alloc) => { + return Ok(bx.load_operand(bx.cx().from_const_alloc(layout, alloc, ptr.offset))); }, }; diff --git a/src/librustc_codegen_ssa/mir/place.rs b/src/librustc_codegen_ssa/mir/place.rs index 9d6826d8756b7..1edcbfead2c94 100644 --- a/src/librustc_codegen_ssa/mir/place.rs +++ b/src/librustc_codegen_ssa/mir/place.rs @@ -417,8 +417,8 @@ impl<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { let layout = cx.layout_of(self.monomorphize(&ty)); match bx.tcx().const_eval(param_env.and(cid)) { Ok(val) => match val.val { - mir::interpret::ConstValue::ByRef(_, alloc, offset) => { - bx.cx().from_const_alloc(layout, alloc, offset) + mir::interpret::ConstValue::ByRef(ptr, alloc) => { + bx.cx().from_const_alloc(layout, alloc, ptr.offset) } _ => bug!("promoteds should have an allocation: {:?}", val), }, diff --git a/src/librustc_mir/const_eval.rs b/src/librustc_mir/const_eval.rs index 9eac125d7a434..94b9f0eadd0e7 100644 --- a/src/librustc_mir/const_eval.rs +++ b/src/librustc_mir/const_eval.rs @@ -96,7 +96,7 @@ pub fn op_to_const<'tcx>( // FIXME shouldn't it be the case that `mark_static_initialized` has already // interned this? I thought that is the entire point of that `FinishStatic` stuff? let alloc = ecx.tcx.intern_const_alloc(alloc); - ConstValue::ByRef(ptr.alloc_id, alloc, ptr.offset) + ConstValue::ByRef(ptr, alloc) }, Ok(Immediate::Scalar(x)) => ConstValue::Scalar(x.not_undef()?), diff --git a/src/librustc_mir/hair/pattern/_match.rs b/src/librustc_mir/hair/pattern/_match.rs index 1c7e1aa4d71e0..bf2f0d5d81f47 100644 --- a/src/librustc_mir/hair/pattern/_match.rs +++ b/src/librustc_mir/hair/pattern/_match.rs @@ -172,7 +172,7 @@ use rustc::ty::{self, Ty, TyCtxt, TypeFoldable, Const}; use rustc::ty::layout::{Integer, IntegerExt, VariantIdx, Size}; use rustc::mir::Field; -use rustc::mir::interpret::{ConstValue, Pointer, Scalar}; +use rustc::mir::interpret::{ConstValue, Scalar}; use rustc::util::common::ErrorReported; use syntax::attr::{SignedInt, UnsignedInt}; @@ -214,9 +214,8 @@ impl<'a, 'tcx> LiteralExpander<'a, 'tcx> { match (val, &crty.sty, &rty.sty) { // the easy case, deref a reference (ConstValue::Scalar(Scalar::Ptr(p)), x, y) if x == y => ConstValue::ByRef( - p.alloc_id, + p, self.tcx.alloc_map.lock().unwrap_memory(p.alloc_id), - p.offset, ), // unsize array to slice if pattern is array but match value or other patterns are slice (ConstValue::Scalar(Scalar::Ptr(p)), ty::Array(t, n), ty::Slice(u)) => { @@ -1428,7 +1427,7 @@ fn slice_pat_covered_by_const<'tcx>( suffix: &[Pattern<'tcx>] ) -> Result { let data: &[u8] = match (const_val.val, &const_val.ty.sty) { - (ConstValue::ByRef(id, alloc, offset), ty::Array(t, n)) => { + (ConstValue::ByRef(ptr, alloc), ty::Array(t, n)) => { if *t != tcx.types.u8 { // FIXME(oli-obk): can't mix const patterns with slice patterns and get // any sort of exhaustiveness/unreachable check yet @@ -1436,7 +1435,6 @@ fn slice_pat_covered_by_const<'tcx>( // are definitely unreachable. return Ok(false); } - let ptr = Pointer::new(id, offset); let n = n.assert_usize(tcx).unwrap(); alloc.get_bytes(&tcx, ptr, Size::from_bytes(n)).unwrap() }, @@ -1778,8 +1776,8 @@ fn specialize<'p, 'a: 'p, 'tcx: 'a>( let (opt_ptr, n, ty) = match value.ty.sty { ty::TyKind::Array(t, n) => { match value.val { - ConstValue::ByRef(id, alloc, offset) => ( - Some((Pointer::new(id, offset), alloc)), + ConstValue::ByRef(ptr, alloc) => ( + Some((ptr, alloc)), n.unwrap_usize(cx.tcx), t, ), diff --git a/src/librustc_mir/interpret/operand.rs b/src/librustc_mir/interpret/operand.rs index 0394ad2b0a65f..1d6aff749c593 100644 --- a/src/librustc_mir/interpret/operand.rs +++ b/src/librustc_mir/interpret/operand.rs @@ -591,11 +591,11 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> self.layout_of(ty) })?; let op = match val.val { - ConstValue::ByRef(id, alloc, offset) => { + ConstValue::ByRef(ptr, alloc) => { // We rely on mutability being set correctly in that allocation to prevent writes // where none should happen -- and for `static mut`, we copy on demand anyway. Operand::Indirect( - MemPlace::from_ptr(Pointer::new(id, offset), alloc.align) + MemPlace::from_ptr(ptr, alloc.align) ).with_default_tag() }, ConstValue::Slice(a, b) => diff --git a/src/librustc_mir/monomorphize/collector.rs b/src/librustc_mir/monomorphize/collector.rs index a76aa7454cbe4..32c9d5c0c4467 100644 --- a/src/librustc_mir/monomorphize/collector.rs +++ b/src/librustc_mir/monomorphize/collector.rs @@ -1257,7 +1257,7 @@ fn collect_const<'a, 'tcx>( ConstValue::Slice(Scalar::Ptr(ptr), _) | ConstValue::Scalar(Scalar::Ptr(ptr)) => collect_miri(tcx, ptr.alloc_id, output), - ConstValue::ByRef(_id, alloc, _offset) => { + ConstValue::ByRef(_ptr, alloc) => { for &((), id) in alloc.relocations.values() { collect_miri(tcx, id, output); } diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs index 91e44a1588268..cbc8fde53a263 100644 --- a/src/librustc_typeck/check/mod.rs +++ b/src/librustc_typeck/check/mod.rs @@ -1464,7 +1464,7 @@ fn maybe_check_static_with_link_section(tcx: TyCtxt, id: DefId, span: Span) { }; let param_env = ty::ParamEnv::reveal_all(); if let Ok(static_) = tcx.const_eval(param_env.and(cid)) { - let alloc = if let ConstValue::ByRef(_, allocation, _) = static_.val { + let alloc = if let ConstValue::ByRef(_, allocation) = static_.val { allocation } else { bug!("Matching on non-ByRef static") From 7db96a37e7b4b38cdd0354d715cecd56dfdd03b0 Mon Sep 17 00:00:00 2001 From: Oliver Scherer Date: Sat, 16 Feb 2019 14:33:54 +0100 Subject: [PATCH 20/34] Reintroduce the invariant comment for clarity --- src/librustc/mir/interpret/value.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/librustc/mir/interpret/value.rs b/src/librustc/mir/interpret/value.rs index 1e5ba2c176bd2..956182fc8b275 100644 --- a/src/librustc/mir/interpret/value.rs +++ b/src/librustc/mir/interpret/value.rs @@ -32,6 +32,7 @@ pub enum ConstValue<'tcx> { Slice(Scalar, u64), /// An allocation together with a pointer into the allocation. + /// Invariant: the pointer's `AllocId` resolves to the allocation. ByRef(Pointer, &'tcx Allocation), } From bee3c670dbf974d097f41447a1214b27bbf9acca Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Sat, 16 Feb 2019 14:52:34 +0100 Subject: [PATCH 21/34] Update src/librustc_mir/interpret/operand.rs Co-Authored-By: oli-obk --- src/librustc_mir/interpret/operand.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/librustc_mir/interpret/operand.rs b/src/librustc_mir/interpret/operand.rs index 1d6aff749c593..3ec042efb26f8 100644 --- a/src/librustc_mir/interpret/operand.rs +++ b/src/librustc_mir/interpret/operand.rs @@ -580,7 +580,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> } } - // Used when Miri runs into a constant, and (indirectly through lazy_const_to_op) by CTFE. + // Used when Miri runs into a constant, and by CTFE. pub fn const_to_op( &self, val: ty::Const<'tcx>, From 525983a2a4ac3029dd9979d924ef444f48a4d7b3 Mon Sep 17 00:00:00 2001 From: Oliver Scherer Date: Fri, 15 Feb 2019 21:05:47 +0100 Subject: [PATCH 22/34] Make validity checking use `MPlaceTy` instead of `OpTy` --- src/librustc_mir/const_eval.rs | 12 +++++------- src/librustc_mir/interpret/place.rs | 2 +- src/librustc_mir/interpret/validity.rs | 23 +++++++++++------------ 3 files changed, 17 insertions(+), 20 deletions(-) diff --git a/src/librustc_mir/const_eval.rs b/src/librustc_mir/const_eval.rs index 94b9f0eadd0e7..3c2c0af0bae80 100644 --- a/src/librustc_mir/const_eval.rs +++ b/src/librustc_mir/const_eval.rs @@ -523,13 +523,11 @@ fn validate_and_turn_into_const<'a, 'tcx>( let cid = key.value; let ecx = mk_eval_cx(tcx, tcx.def_span(key.value.instance.def_id()), key.param_env); let val = (|| { - let op = ecx.raw_const_to_mplace(constant)?.into(); - // FIXME: Once the visitor infrastructure landed, change validation to - // work directly on `MPlaceTy`. - let mut ref_tracking = RefTracking::new(op); - while let Some((op, path)) = ref_tracking.todo.pop() { + let mplace = ecx.raw_const_to_mplace(constant)?; + let mut ref_tracking = RefTracking::new(mplace); + while let Some((mplace, path)) = ref_tracking.todo.pop() { ecx.validate_operand( - op, + mplace.into(), path, Some(&mut ref_tracking), true, // const mode @@ -538,7 +536,7 @@ fn validate_and_turn_into_const<'a, 'tcx>( // Now that we validated, turn this into a proper constant. let def_id = cid.instance.def.def_id(); let normalize = tcx.is_static(def_id).is_none() && cid.promoted.is_none(); - op_to_const(&ecx, op, normalize) + op_to_const(&ecx, mplace.into(), normalize) })(); val.map_err(|error| { diff --git a/src/librustc_mir/interpret/place.rs b/src/librustc_mir/interpret/place.rs index b29e09900f6b1..abc18f1364f87 100644 --- a/src/librustc_mir/interpret/place.rs +++ b/src/librustc_mir/interpret/place.rs @@ -58,7 +58,7 @@ impl<'tcx, Tag> ::std::ops::Deref for PlaceTy<'tcx, Tag> { } /// A MemPlace with its layout. Constructing it is only possible in this module. -#[derive(Copy, Clone, Debug)] +#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)] pub struct MPlaceTy<'tcx, Tag=()> { mplace: MemPlace, pub layout: TyLayout<'tcx>, diff --git a/src/librustc_mir/interpret/validity.rs b/src/librustc_mir/interpret/validity.rs index 8b97d9ded7449..0163d53f027cf 100644 --- a/src/librustc_mir/interpret/validity.rs +++ b/src/librustc_mir/interpret/validity.rs @@ -11,7 +11,7 @@ use rustc::mir::interpret::{ }; use super::{ - OpTy, Machine, EvalContext, ValueVisitor, + OpTy, Machine, EvalContext, ValueVisitor, MPlaceTy, }; macro_rules! validation_failure { @@ -74,13 +74,13 @@ pub enum PathElem { } /// State for tracking recursive validation of references -pub struct RefTracking<'tcx, Tag> { - pub seen: FxHashSet<(OpTy<'tcx, Tag>)>, - pub todo: Vec<(OpTy<'tcx, Tag>, Vec)>, +pub struct RefTracking { + pub seen: FxHashSet, + pub todo: Vec<(T, Vec)>, } -impl<'tcx, Tag: Copy+Eq+Hash> RefTracking<'tcx, Tag> { - pub fn new(op: OpTy<'tcx, Tag>) -> Self { +impl<'tcx, T: Copy + Eq + Hash> RefTracking { + pub fn new(op: T) -> Self { let mut ref_tracking = RefTracking { seen: FxHashSet::default(), todo: vec![(op, Vec::new())], @@ -151,7 +151,7 @@ struct ValidityVisitor<'rt, 'a: 'rt, 'mir: 'rt, 'tcx: 'a+'rt+'mir, M: Machine<'a /// starts must not be changed! `visit_fields` and `visit_array` rely on /// this stack discipline. path: Vec, - ref_tracking: Option<&'rt mut RefTracking<'tcx, M::PointerTag>>, + ref_tracking: Option<&'rt mut RefTracking>>, const_mode: bool, ecx: &'rt EvalContext<'a, 'mir, 'tcx, M>, } @@ -399,16 +399,15 @@ impl<'rt, 'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> // before. Proceed recursively even for integer pointers, no // reason to skip them! They are (recursively) valid for some ZST, // but not for others (e.g., `!` is a ZST). - let op = place.into(); - if ref_tracking.seen.insert(op) { - trace!("Recursing below ptr {:#?}", *op); + if ref_tracking.seen.insert(place) { + trace!("Recursing below ptr {:#?}", *place); // We need to clone the path anyway, make sure it gets created // with enough space for the additional `Deref`. let mut new_path = Vec::with_capacity(self.path.len()+1); new_path.clone_from(&self.path); new_path.push(PathElem::Deref); // Remember to come back to this later. - ref_tracking.todo.push((op, new_path)); + ref_tracking.todo.push((place, new_path)); } } } @@ -598,7 +597,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> &self, op: OpTy<'tcx, M::PointerTag>, path: Vec, - ref_tracking: Option<&mut RefTracking<'tcx, M::PointerTag>>, + ref_tracking: Option<&mut RefTracking>>, const_mode: bool, ) -> EvalResult<'tcx> { trace!("validate_operand: {:?}, {:?}", *op, op.layout.ty); From 27e438ad948f9e430281e77b0abe3885b64f3bd0 Mon Sep 17 00:00:00 2001 From: Oliver Scherer Date: Sat, 16 Feb 2019 14:51:53 +0100 Subject: [PATCH 23/34] Make `may_normalize` explicit in the type system --- src/librustc_mir/const_eval.rs | 72 ++++++++++++++++++---------------- 1 file changed, 38 insertions(+), 34 deletions(-) diff --git a/src/librustc_mir/const_eval.rs b/src/librustc_mir/const_eval.rs index 3c2c0af0bae80..2f8e3189d12e9 100644 --- a/src/librustc_mir/const_eval.rs +++ b/src/librustc_mir/const_eval.rs @@ -21,7 +21,7 @@ use syntax::ast::Mutability; use syntax::source_map::{Span, DUMMY_SP}; use crate::interpret::{self, - PlaceTy, MPlaceTy, MemPlace, OpTy, ImmTy, Operand, Immediate, Scalar, Pointer, + PlaceTy, MPlaceTy, MemPlace, OpTy, ImmTy, Immediate, Scalar, Pointer, RawConst, ConstValue, EvalResult, EvalError, EvalErrorKind, GlobalId, EvalContext, StackPopCleanup, Allocation, AllocId, MemoryKind, @@ -62,45 +62,46 @@ pub(crate) fn eval_promoted<'a, 'mir, 'tcx>( eval_body_using_ecx(&mut ecx, cid, Some(mir), param_env) } -// FIXME: These two conversion functions are bad hacks. We should just always use allocations. -pub fn op_to_const<'tcx>( +fn mplace_to_const<'tcx>( + ecx: &CompileTimeEvalContext<'_, '_, 'tcx>, + mplace: MPlaceTy<'tcx>, +) -> EvalResult<'tcx, ty::Const<'tcx>> { + let MemPlace { ptr, align, meta } = *mplace; + // extract alloc-offset pair + assert!(meta.is_none()); + let ptr = ptr.to_ptr()?; + let alloc = ecx.memory.get(ptr.alloc_id)?; + assert!(alloc.align >= align); + assert!(alloc.bytes.len() as u64 - ptr.offset.bytes() >= mplace.layout.size.bytes()); + let mut alloc = alloc.clone(); + alloc.align = align; + // FIXME shouldn't it be the case that `mark_static_initialized` has already + // interned this? I thought that is the entire point of that `FinishStatic` stuff? + let alloc = ecx.tcx.intern_const_alloc(alloc); + let val = ConstValue::ByRef(ptr, alloc); + Ok(ty::Const { val, ty: mplace.layout.ty }) +} + +fn op_to_const<'tcx>( ecx: &CompileTimeEvalContext<'_, '_, 'tcx>, op: OpTy<'tcx>, - may_normalize: bool, ) -> EvalResult<'tcx, ty::Const<'tcx>> { // We do not normalize just any data. Only scalar layout and slices. - let normalize = may_normalize - && match op.layout.abi { - layout::Abi::Scalar(..) => true, - layout::Abi::ScalarPair(..) => op.layout.ty.is_slice(), - _ => false, - }; + let normalize = match op.layout.abi { + layout::Abi::Scalar(..) => true, + layout::Abi::ScalarPair(..) => op.layout.ty.is_slice(), + _ => false, + }; let normalized_op = if normalize { - Ok(*ecx.read_immediate(op).expect("normalization works on validated constants")) + Err(*ecx.read_immediate(op).expect("normalization works on validated constants")) } else { - match *op { - Operand::Indirect(mplace) => Err(mplace), - Operand::Immediate(val) => Ok(val) - } + op.try_as_mplace() }; let val = match normalized_op { - Err(MemPlace { ptr, align, meta }) => { - // extract alloc-offset pair - assert!(meta.is_none()); - let ptr = ptr.to_ptr()?; - let alloc = ecx.memory.get(ptr.alloc_id)?; - assert!(alloc.align >= align); - assert!(alloc.bytes.len() as u64 - ptr.offset.bytes() >= op.layout.size.bytes()); - let mut alloc = alloc.clone(); - alloc.align = align; - // FIXME shouldn't it be the case that `mark_static_initialized` has already - // interned this? I thought that is the entire point of that `FinishStatic` stuff? - let alloc = ecx.tcx.intern_const_alloc(alloc); - ConstValue::ByRef(ptr, alloc) - }, - Ok(Immediate::Scalar(x)) => + Ok(mplace) => return mplace_to_const(ecx, mplace), + Err(Immediate::Scalar(x)) => ConstValue::Scalar(x.not_undef()?), - Ok(Immediate::ScalarPair(a, b)) => + Err(Immediate::ScalarPair(a, b)) => ConstValue::Slice(a.not_undef()?, b.to_usize(ecx)?), }; Ok(ty::Const { val, ty: op.layout.ty }) @@ -486,7 +487,7 @@ pub fn const_field<'a, 'tcx>( let field = ecx.operand_field(down, field.index() as u64)?; // and finally move back to the const world, always normalizing because // this is not called for statics. - op_to_const(&ecx, field, true) + op_to_const(&ecx, field) })(); result.map_err(|error| { let err = error_to_const_error(&ecx, error); @@ -535,8 +536,11 @@ fn validate_and_turn_into_const<'a, 'tcx>( } // Now that we validated, turn this into a proper constant. let def_id = cid.instance.def.def_id(); - let normalize = tcx.is_static(def_id).is_none() && cid.promoted.is_none(); - op_to_const(&ecx, mplace.into(), normalize) + if tcx.is_static(def_id).is_some() || cid.promoted.is_some() { + mplace_to_const(&ecx, mplace) + } else { + op_to_const(&ecx, mplace.into()) + } })(); val.map_err(|error| { From 4fdeb2da747231b03f85786f4ef6ce04d88da864 Mon Sep 17 00:00:00 2001 From: Oliver Scherer Date: Sat, 16 Feb 2019 14:54:02 +0100 Subject: [PATCH 24/34] Add `eval` prefix to clarify what the function does --- src/librustc_mir/interpret/operand.rs | 4 ++-- src/librustc_mir/transform/const_prop.rs | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/librustc_mir/interpret/operand.rs b/src/librustc_mir/interpret/operand.rs index 3ec042efb26f8..08f6667039a34 100644 --- a/src/librustc_mir/interpret/operand.rs +++ b/src/librustc_mir/interpret/operand.rs @@ -545,7 +545,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> Move(ref place) => self.eval_place_to_op(place, layout)?, - Constant(ref constant) => self.lazy_const_to_op(*constant.literal, layout)?, + Constant(ref constant) => self.eval_lazy_const_to_op(*constant.literal, layout)?, }; trace!("{:?}: {:?}", mir_op, *op); Ok(op) @@ -562,7 +562,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> } // Used when Miri runs into a constant, and by CTFE. - pub fn lazy_const_to_op( + pub fn eval_lazy_const_to_op( &self, val: ty::LazyConst<'tcx>, layout: Option>, diff --git a/src/librustc_mir/transform/const_prop.rs b/src/librustc_mir/transform/const_prop.rs index d69a5130b24d0..1acc7b6e0c57f 100644 --- a/src/librustc_mir/transform/const_prop.rs +++ b/src/librustc_mir/transform/const_prop.rs @@ -253,7 +253,7 @@ impl<'a, 'mir, 'tcx> ConstPropagator<'a, 'mir, 'tcx> { source_info: SourceInfo, ) -> Option> { self.ecx.tcx.span = source_info.span; - match self.ecx.lazy_const_to_op(*c.literal, None) { + match self.ecx.eval_lazy_const_to_op(*c.literal, None) { Ok(op) => { Some((op, c.span)) }, From 4b085337bbb3434604f90503538e0211eb3bb8f6 Mon Sep 17 00:00:00 2001 From: Oliver Scherer Date: Sat, 16 Feb 2019 15:05:14 +0100 Subject: [PATCH 25/34] Update docs and visibilities of const to op methods --- src/librustc_mir/interpret/operand.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/librustc_mir/interpret/operand.rs b/src/librustc_mir/interpret/operand.rs index 08f6667039a34..311c5c6d10be3 100644 --- a/src/librustc_mir/interpret/operand.rs +++ b/src/librustc_mir/interpret/operand.rs @@ -561,7 +561,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> .collect() } - // Used when Miri runs into a constant, and by CTFE. + // Used when Miri runs into a constant, and by const propagation. pub fn eval_lazy_const_to_op( &self, val: ty::LazyConst<'tcx>, @@ -580,8 +580,8 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> } } - // Used when Miri runs into a constant, and by CTFE. - pub fn const_to_op( + // Used when the miri-engine runs into a constant. + crate fn const_to_op( &self, val: ty::Const<'tcx>, layout: Option>, From 1fe7eb00944c3c41059e16daa7b401bc8b04447c Mon Sep 17 00:00:00 2001 From: Oliver Scherer Date: Sat, 16 Feb 2019 15:16:02 +0100 Subject: [PATCH 26/34] Limit the visibility further and expand on a comment --- src/librustc_mir/interpret/operand.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/librustc_mir/interpret/operand.rs b/src/librustc_mir/interpret/operand.rs index 311c5c6d10be3..de362a7a96a7f 100644 --- a/src/librustc_mir/interpret/operand.rs +++ b/src/librustc_mir/interpret/operand.rs @@ -562,7 +562,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> } // Used when Miri runs into a constant, and by const propagation. - pub fn eval_lazy_const_to_op( + crate fn eval_lazy_const_to_op( &self, val: ty::LazyConst<'tcx>, layout: Option>, @@ -580,7 +580,8 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> } } - // Used when the miri-engine runs into a constant. + // Used when the miri-engine runs into a constant and for extracting information from constants + // in patterns via the `const_eval` module crate fn const_to_op( &self, val: ty::Const<'tcx>, From d26bf742db0754893567401e49ae8b016c878a92 Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Fri, 15 Feb 2019 08:31:44 +1100 Subject: [PATCH 27/34] Change `Token::interpolated_to_tokenstream()`. It is currently a method of `Token`, but it only is valid to call if `self` is a `Token::Interpolated`. This commit eliminates the possibility of misuse by changing it to an associated function that takes a `Nonterminal`, which also simplifies the call sites. This requires splitting out a new function, `nonterminal_to_string`. --- src/librustc/hir/lowering.rs | 10 ++--- src/libsyntax/parse/token.rs | 12 ++---- src/libsyntax/print/pprust.rs | 52 ++++++++++++++------------ src/libsyntax_ext/proc_macro_server.rs | 4 +- 4 files changed, 38 insertions(+), 40 deletions(-) diff --git a/src/librustc/hir/lowering.rs b/src/librustc/hir/lowering.rs index 84487c40f8745..bbbd38cfed7e4 100644 --- a/src/librustc/hir/lowering.rs +++ b/src/librustc/hir/lowering.rs @@ -1131,12 +1131,12 @@ impl<'a> LoweringContext<'a> { fn lower_token(&mut self, token: Token, span: Span) -> TokenStream { match token { - Token::Interpolated(_) => {} - other => return TokenTree::Token(span, other).into(), + Token::Interpolated(nt) => { + let tts = Token::interpolated_to_tokenstream(&self.sess.parse_sess, nt, span); + self.lower_token_stream(tts) + } + other => TokenTree::Token(span, other).into(), } - - let tts = token.interpolated_to_tokenstream(&self.sess.parse_sess, span); - self.lower_token_stream(tts) } fn lower_arm(&mut self, arm: &Arm) -> hir::Arm { diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index ff7f3e0bfaef3..976eea2bb54b2 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -508,14 +508,8 @@ impl Token { } } - pub fn interpolated_to_tokenstream(&self, sess: &ParseSess, span: Span) - -> TokenStream - { - let nt = match *self { - Token::Interpolated(ref nt) => nt, - _ => panic!("only works on interpolated tokens"), - }; - + pub fn interpolated_to_tokenstream(sess: &ParseSess, nt: Lrc<(Nonterminal, LazyTokenStream)>, + span: Span) -> TokenStream { // An `Interpolated` token means that we have a `Nonterminal` // which is often a parsed AST item. At this point we now need // to convert the parsed AST to an actual token stream, e.g. @@ -558,7 +552,7 @@ impl Token { let tokens_for_real = nt.1.force(|| { // FIXME(#43081): Avoid this pretty-print + reparse hack - let source = pprust::token_to_string(self); + let source = pprust::nonterminal_to_string(&nt.0); let filename = FileName::macro_expansion_source_code(&source); let (tokens, errors) = parse_stream_from_source_str( filename, source, sess, Some(span)); diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index cdf805176a293..0e48e3a5dff2b 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -4,7 +4,7 @@ use crate::ast::{Attribute, MacDelimiter, GenericArg}; use crate::util::parser::{self, AssocOp, Fixity}; use crate::attr; use crate::source_map::{self, SourceMap, Spanned}; -use crate::parse::token::{self, BinOpToken, Token}; +use crate::parse::token::{self, BinOpToken, Nonterminal, Token}; use crate::parse::lexer::comments; use crate::parse::{self, ParseSess}; use crate::print::pp::{self, Breaks}; @@ -257,29 +257,33 @@ pub fn token_to_string(tok: &Token) -> String { token::Comment => "/* */".to_string(), token::Shebang(s) => format!("/* shebang: {}*/", s), - token::Interpolated(ref nt) => match nt.0 { - token::NtExpr(ref e) => expr_to_string(e), - token::NtMeta(ref e) => meta_item_to_string(e), - token::NtTy(ref e) => ty_to_string(e), - token::NtPath(ref e) => path_to_string(e), - token::NtItem(ref e) => item_to_string(e), - token::NtBlock(ref e) => block_to_string(e), - token::NtStmt(ref e) => stmt_to_string(e), - token::NtPat(ref e) => pat_to_string(e), - token::NtIdent(e, false) => ident_to_string(e), - token::NtIdent(e, true) => format!("r#{}", ident_to_string(e)), - token::NtLifetime(e) => ident_to_string(e), - token::NtLiteral(ref e) => expr_to_string(e), - token::NtTT(ref tree) => tt_to_string(tree.clone()), - token::NtArm(ref e) => arm_to_string(e), - token::NtImplItem(ref e) => impl_item_to_string(e), - token::NtTraitItem(ref e) => trait_item_to_string(e), - token::NtGenerics(ref e) => generic_params_to_string(&e.params), - token::NtWhereClause(ref e) => where_clause_to_string(e), - token::NtArg(ref e) => arg_to_string(e), - token::NtVis(ref e) => vis_to_string(e), - token::NtForeignItem(ref e) => foreign_item_to_string(e), - } + token::Interpolated(ref nt) => nonterminal_to_string(&nt.0), + } +} + +pub fn nonterminal_to_string(nt: &Nonterminal) -> String { + match *nt { + token::NtExpr(ref e) => expr_to_string(e), + token::NtMeta(ref e) => meta_item_to_string(e), + token::NtTy(ref e) => ty_to_string(e), + token::NtPath(ref e) => path_to_string(e), + token::NtItem(ref e) => item_to_string(e), + token::NtBlock(ref e) => block_to_string(e), + token::NtStmt(ref e) => stmt_to_string(e), + token::NtPat(ref e) => pat_to_string(e), + token::NtIdent(e, false) => ident_to_string(e), + token::NtIdent(e, true) => format!("r#{}", ident_to_string(e)), + token::NtLifetime(e) => ident_to_string(e), + token::NtLiteral(ref e) => expr_to_string(e), + token::NtTT(ref tree) => tt_to_string(tree.clone()), + token::NtArm(ref e) => arm_to_string(e), + token::NtImplItem(ref e) => impl_item_to_string(e), + token::NtTraitItem(ref e) => trait_item_to_string(e), + token::NtGenerics(ref e) => generic_params_to_string(&e.params), + token::NtWhereClause(ref e) => where_clause_to_string(e), + token::NtArg(ref e) => arg_to_string(e), + token::NtVis(ref e) => vis_to_string(e), + token::NtForeignItem(ref e) => foreign_item_to_string(e), } } diff --git a/src/libsyntax_ext/proc_macro_server.rs b/src/libsyntax_ext/proc_macro_server.rs index fd82dac5ab6d8..60ce65baa48a3 100644 --- a/src/libsyntax_ext/proc_macro_server.rs +++ b/src/libsyntax_ext/proc_macro_server.rs @@ -178,8 +178,8 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec)> tt!(Punct::new('#', false)) } - Interpolated(_) => { - let stream = token.interpolated_to_tokenstream(sess, span); + Interpolated(nt) => { + let stream = Token::interpolated_to_tokenstream(sess, nt, span); TokenTree::Group(Group { delimiter: Delimiter::None, stream, From f8801f3bf641f0277087e6621d09f9a6a373b36c Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Fri, 15 Feb 2019 09:10:02 +1100 Subject: [PATCH 28/34] Remove `LazyTokenStream`. It's present within `Token::Interpolated` as an optimization, so that if a nonterminal is converted to a `TokenStream` multiple times, the first-computed value is saved and reused. But in practice it's not needed. `interpolated_to_tokenstream()` is a cold function: it's only called a few dozen times while compiling rustc itself, and a few hundred times across the entire `rustc-perf` suite. Furthermore, when it is called, it is almost always the first conversion, so no benefit is gained from it. So this commit removes `LazyTokenStream`, along with the now-unnecessary `Token::interpolated()`. As well as a significant simplification, the removal speeds things up slightly, mostly due to not having to `drop` the `LazyTokenStream` instances. --- src/librustc/hir/map/def_collector.rs | 2 +- src/librustc_resolve/build_reduced_graph.rs | 2 +- src/libsyntax/attr/mod.rs | 4 +- src/libsyntax/ext/base.rs | 2 +- src/libsyntax/ext/expand.rs | 5 +- src/libsyntax/ext/tt/macro_parser.rs | 10 +- src/libsyntax/ext/tt/transcribe.rs | 3 +- src/libsyntax/mut_visit.rs | 5 +- src/libsyntax/parse/attr.rs | 4 +- src/libsyntax/parse/parser.rs | 16 +-- src/libsyntax/parse/token.rs | 113 +++++--------------- src/libsyntax/print/pprust.rs | 2 +- src/libsyntax_ext/deriving/custom.rs | 3 +- 13 files changed, 58 insertions(+), 113 deletions(-) diff --git a/src/librustc/hir/map/def_collector.rs b/src/librustc/hir/map/def_collector.rs index 8fe10a85ef380..72aa9570cc2ff 100644 --- a/src/librustc/hir/map/def_collector.rs +++ b/src/librustc/hir/map/def_collector.rs @@ -339,7 +339,7 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> { fn visit_token(&mut self, t: Token) { if let Token::Interpolated(nt) = t { - if let token::NtExpr(ref expr) = nt.0 { + if let token::NtExpr(ref expr) = *nt { if let ExprKind::Mac(..) = expr.node { self.visit_macro_invoc(expr.id); } diff --git a/src/librustc_resolve/build_reduced_graph.rs b/src/librustc_resolve/build_reduced_graph.rs index a82f8df154725..29de5308a3cdb 100644 --- a/src/librustc_resolve/build_reduced_graph.rs +++ b/src/librustc_resolve/build_reduced_graph.rs @@ -1025,7 +1025,7 @@ impl<'a, 'b> Visitor<'a> for BuildReducedGraphVisitor<'a, 'b> { fn visit_token(&mut self, t: Token) { if let Token::Interpolated(nt) = t { - if let token::NtExpr(ref expr) = nt.0 { + if let token::NtExpr(ref expr) = *nt { if let ast::ExprKind::Mac(..) = expr.node { self.visit_invoc(expr.id); } diff --git a/src/libsyntax/attr/mod.rs b/src/libsyntax/attr/mod.rs index 420f7426ad786..b5fc850731404 100644 --- a/src/libsyntax/attr/mod.rs +++ b/src/libsyntax/attr/mod.rs @@ -517,7 +517,7 @@ impl MetaItem { let span = span.with_hi(segments.last().unwrap().ident.span.hi()); Path { span, segments } } - Some(TokenTree::Token(_, Token::Interpolated(ref nt))) => match nt.0 { + Some(TokenTree::Token(_, Token::Interpolated(nt))) => match *nt { token::Nonterminal::NtIdent(ident, _) => Path::from_ident(ident), token::Nonterminal::NtMeta(ref meta) => return Some(meta.clone()), token::Nonterminal::NtPath(ref path) => path.clone(), @@ -682,7 +682,7 @@ impl LitKind { match token { Token::Ident(ident, false) if ident.name == "true" => Some(LitKind::Bool(true)), Token::Ident(ident, false) if ident.name == "false" => Some(LitKind::Bool(false)), - Token::Interpolated(ref nt) => match nt.0 { + Token::Interpolated(nt) => match *nt { token::NtExpr(ref v) | token::NtLiteral(ref v) => match v.node { ExprKind::Lit(ref lit) => Some(lit.node.clone()), _ => None, diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 5980261593dbf..452cc2f2c65cc 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -266,7 +266,7 @@ impl TTMacroExpander for F impl MutVisitor for AvoidInterpolatedIdents { fn visit_tt(&mut self, tt: &mut tokenstream::TokenTree) { if let tokenstream::TokenTree::Token(_, token::Interpolated(nt)) = tt { - if let token::NtIdent(ident, is_raw) = nt.0 { + if let token::NtIdent(ident, is_raw) = **nt { *tt = tokenstream::TokenTree::Token(ident.span, token::Ident(ident, is_raw)); } diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index d398437d7affc..55a6471e3688d 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -25,6 +25,7 @@ use syntax_pos::{Span, DUMMY_SP, FileName}; use syntax_pos::hygiene::ExpnFormat; use rustc_data_structures::fx::FxHashMap; +use rustc_data_structures::sync::Lrc; use std::fs; use std::io::ErrorKind; use std::{iter, mem}; @@ -586,14 +587,14 @@ impl<'a, 'b> MacroExpander<'a, 'b> { } AttrProcMacro(ref mac, ..) => { self.gate_proc_macro_attr_item(attr.span, &item); - let item_tok = TokenTree::Token(DUMMY_SP, Token::interpolated(match item { + let item_tok = TokenTree::Token(DUMMY_SP, Token::Interpolated(Lrc::new(match item { Annotatable::Item(item) => token::NtItem(item), Annotatable::TraitItem(item) => token::NtTraitItem(item.into_inner()), Annotatable::ImplItem(item) => token::NtImplItem(item.into_inner()), Annotatable::ForeignItem(item) => token::NtForeignItem(item.into_inner()), Annotatable::Stmt(stmt) => token::NtStmt(stmt.into_inner()), Annotatable::Expr(expr) => token::NtExpr(expr), - })).into(); + }))).into(); let input = self.extract_proc_macro_attr_input(attr.tokens, attr.span); let tok_result = mac.expand(self.cx, attr.span, input, item_tok); let res = self.parse_ast_fragment(tok_result, invoc.fragment_kind, diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 5de1ccec8609e..c2607ed530cf0 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -829,7 +829,7 @@ fn may_begin_with(name: &str, token: &Token) -> bool { }, "block" => match *token { Token::OpenDelim(token::Brace) => true, - Token::Interpolated(ref nt) => match nt.0 { + Token::Interpolated(ref nt) => match **nt { token::NtItem(_) | token::NtPat(_) | token::NtTy(_) @@ -843,9 +843,9 @@ fn may_begin_with(name: &str, token: &Token) -> bool { }, "path" | "meta" => match *token { Token::ModSep | Token::Ident(..) => true, - Token::Interpolated(ref nt) => match nt.0 { + Token::Interpolated(ref nt) => match **nt { token::NtPath(_) | token::NtMeta(_) => true, - _ => may_be_ident(&nt.0), + _ => may_be_ident(&nt), }, _ => false, }, @@ -862,12 +862,12 @@ fn may_begin_with(name: &str, token: &Token) -> bool { Token::ModSep | // path Token::Lt | // path (UFCS constant) Token::BinOp(token::Shl) => true, // path (double UFCS) - Token::Interpolated(ref nt) => may_be_ident(&nt.0), + Token::Interpolated(ref nt) => may_be_ident(nt), _ => false, }, "lifetime" => match *token { Token::Lifetime(_) => true, - Token::Interpolated(ref nt) => match nt.0 { + Token::Interpolated(ref nt) => match **nt { token::NtLifetime(_) | token::NtTT(_) => true, _ => false, }, diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index b9a50cc6488dd..5c240e237f632 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -149,7 +149,8 @@ pub fn transcribe(cx: &ExtCtxt<'_>, result.push(tt.clone().into()); } else { sp = sp.apply_mark(cx.current_expansion.mark); - let token = TokenTree::Token(sp, Token::interpolated((**nt).clone())); + let token = + TokenTree::Token(sp, Token::Interpolated(Lrc::new((**nt).clone()))); result.push(token.into()); } } else { diff --git a/src/libsyntax/mut_visit.rs b/src/libsyntax/mut_visit.rs index 1e5eb0992bd1b..59fae789188b0 100644 --- a/src/libsyntax/mut_visit.rs +++ b/src/libsyntax/mut_visit.rs @@ -581,9 +581,8 @@ pub fn noop_visit_token(t: &mut Token, vis: &mut T) { token::Ident(id, _is_raw) => vis.visit_ident(id), token::Lifetime(id) => vis.visit_ident(id), token::Interpolated(nt) => { - let nt = Lrc::make_mut(nt); - vis.visit_interpolated(&mut nt.0); - nt.1 = token::LazyTokenStream::new(); + let mut nt = Lrc::make_mut(nt); + vis.visit_interpolated(&mut nt); } _ => {} } diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index b36ca0574cb8d..9020c8c6a2dc6 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -141,7 +141,7 @@ impl<'a> Parser<'a> { /// The delimiters or `=` are still put into the resulting token stream. crate fn parse_meta_item_unrestricted(&mut self) -> PResult<'a, (ast::Path, TokenStream)> { let meta = match self.token { - token::Interpolated(ref nt) => match nt.0 { + token::Interpolated(ref nt) => match **nt { Nonterminal::NtMeta(ref meta) => Some(meta.clone()), _ => None, }, @@ -227,7 +227,7 @@ impl<'a> Parser<'a> { /// meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ; pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> { let nt_meta = match self.token { - token::Interpolated(ref nt) => match nt.0 { + token::Interpolated(ref nt) => match **nt { token::NtMeta(ref e) => Some(e.clone()), _ => None, }, diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index e22047938e518..559e992ee0d7a 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -119,7 +119,7 @@ enum BlockMode { macro_rules! maybe_whole_expr { ($p:expr) => { if let token::Interpolated(nt) = $p.token.clone() { - match nt.0 { + match *nt { token::NtExpr(ref e) | token::NtLiteral(ref e) => { $p.bump(); return Ok((*e).clone()); @@ -146,7 +146,7 @@ macro_rules! maybe_whole_expr { macro_rules! maybe_whole { ($p:expr, $constructor:ident, |$x:ident| $e:expr) => { if let token::Interpolated(nt) = $p.token.clone() { - if let token::$constructor($x) = nt.0.clone() { + if let token::$constructor($x) = (*nt).clone() { $p.bump(); return Ok($e); } @@ -1570,7 +1570,7 @@ impl<'a> Parser<'a> { Some(body) } token::Interpolated(ref nt) => { - match &nt.0 { + match **nt { token::NtBlock(..) => { *at_end = true; let (inner_attrs, body) = self.parse_inner_attrs_and_block()?; @@ -1913,7 +1913,7 @@ impl<'a> Parser<'a> { fn is_named_argument(&mut self) -> bool { let offset = match self.token { - token::Interpolated(ref nt) => match nt.0 { + token::Interpolated(ref nt) => match **nt { token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon), _ => 0, } @@ -2099,7 +2099,7 @@ impl<'a> Parser<'a> { /// Matches `token_lit = LIT_INTEGER | ...`. fn parse_lit_token(&mut self) -> PResult<'a, LitKind> { let out = match self.token { - token::Interpolated(ref nt) => match nt.0 { + token::Interpolated(ref nt) => match **nt { token::NtExpr(ref v) | token::NtLiteral(ref v) => match v.node { ExprKind::Lit(ref lit) => { lit.node.clone() } _ => { return self.unexpected_last(&self.token); } @@ -2299,7 +2299,7 @@ impl<'a> Parser<'a> { /// attributes. pub fn parse_path_allowing_meta(&mut self, style: PathStyle) -> PResult<'a, ast::Path> { let meta_ident = match self.token { - token::Interpolated(ref nt) => match nt.0 { + token::Interpolated(ref nt) => match **nt { token::NtMeta(ref meta) => match meta.node { ast::MetaItemKind::Word => Some(meta.ident.clone()), _ => None, @@ -3271,7 +3271,7 @@ impl<'a> Parser<'a> { self.meta_var_span = Some(self.span); // Interpolated identifier and lifetime tokens are replaced with usual identifier // and lifetime tokens, so the former are never encountered during normal parsing. - match nt.0 { + match **nt { token::NtIdent(ident, is_raw) => (token::Ident(ident, is_raw), ident.span), token::NtLifetime(ident) => (token::Lifetime(ident), ident.span), _ => return, @@ -3403,7 +3403,7 @@ impl<'a> Parser<'a> { // can't continue an expression after an ident token::Ident(ident, is_raw) => token::ident_can_begin_expr(ident, is_raw), token::Literal(..) | token::Pound => true, - token::Interpolated(ref nt) => match nt.0 { + token::Interpolated(ref nt) => match **nt { token::NtIdent(..) | token::NtExpr(..) | token::NtBlock(..) | token::NtPath(..) => true, _ => false, diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 976eea2bb54b2..a1d1a0f51baf0 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -13,16 +13,15 @@ use crate::syntax::parse::parse_stream_from_source_str; use crate::syntax::parse::parser::emit_unclosed_delims; use crate::tokenstream::{self, DelimSpan, TokenStream, TokenTree}; -use serialize::{Decodable, Decoder, Encodable, Encoder}; use syntax_pos::symbol::{self, Symbol}; use syntax_pos::{self, Span, FileName}; use log::info; -use std::{cmp, fmt}; +use std::fmt; use std::mem; #[cfg(target_arch = "x86_64")] use rustc_data_structures::static_assert; -use rustc_data_structures::sync::{Lrc, Lock}; +use rustc_data_structures::sync::Lrc; #[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum BinOpToken { @@ -184,9 +183,8 @@ pub enum Token { Ident(ast::Ident, /* is_raw */ bool), Lifetime(ast::Ident), - // The `LazyTokenStream` is a pure function of the `Nonterminal`, - // and so the `LazyTokenStream` can be ignored by Eq, Hash, etc. - Interpolated(Lrc<(Nonterminal, LazyTokenStream)>), + Interpolated(Lrc), + // Can be expanded into several tokens. /// A doc comment. DocComment(ast::Name), @@ -209,10 +207,6 @@ pub enum Token { static_assert!(MEM_SIZE_OF_STATEMENT: mem::size_of::() == 16); impl Token { - pub fn interpolated(nt: Nonterminal) -> Token { - Token::Interpolated(Lrc::new((nt, LazyTokenStream::new()))) - } - /// Recovers a `Token` from an `ast::Ident`. This creates a raw identifier if necessary. pub fn from_ast_ident(ident: ast::Ident) -> Token { Ident(ident, ident.is_raw_guess()) @@ -244,7 +238,7 @@ impl Token { ModSep | // global path Lifetime(..) | // labeled loop Pound => true, // expression attributes - Interpolated(ref nt) => match nt.0 { + Interpolated(ref nt) => match **nt { NtLiteral(..) | NtIdent(..) | NtExpr(..) | @@ -272,7 +266,7 @@ impl Token { Lifetime(..) | // lifetime bound in trait object Lt | BinOp(Shl) | // associated path ModSep => true, // global path - Interpolated(ref nt) => match nt.0 { + Interpolated(ref nt) => match **nt { NtIdent(..) | NtTy(..) | NtPath(..) | NtLifetime(..) => true, _ => false, }, @@ -284,7 +278,7 @@ impl Token { pub fn can_begin_const_arg(&self) -> bool { match self { OpenDelim(Brace) => true, - Interpolated(ref nt) => match nt.0 { + Interpolated(ref nt) => match **nt { NtExpr(..) => true, NtBlock(..) => true, NtLiteral(..) => true, @@ -316,7 +310,7 @@ impl Token { BinOp(Minus) => true, Ident(ident, false) if ident.name == keywords::True.name() => true, Ident(ident, false) if ident.name == keywords::False.name() => true, - Interpolated(ref nt) => match nt.0 { + Interpolated(ref nt) => match **nt { NtLiteral(..) => true, _ => false, }, @@ -328,7 +322,7 @@ impl Token { pub fn ident(&self) -> Option<(ast::Ident, /* is_raw */ bool)> { match *self { Ident(ident, is_raw) => Some((ident, is_raw)), - Interpolated(ref nt) => match nt.0 { + Interpolated(ref nt) => match **nt { NtIdent(ident, is_raw) => Some((ident, is_raw)), _ => None, }, @@ -339,7 +333,7 @@ impl Token { pub fn lifetime(&self) -> Option { match *self { Lifetime(ident) => Some(ident), - Interpolated(ref nt) => match nt.0 { + Interpolated(ref nt) => match **nt { NtLifetime(ident) => Some(ident), _ => None, }, @@ -367,7 +361,7 @@ impl Token { /// Returns `true` if the token is an interpolated path. fn is_path(&self) -> bool { if let Interpolated(ref nt) = *self { - if let NtPath(..) = nt.0 { + if let NtPath(..) = **nt { return true; } } @@ -508,8 +502,8 @@ impl Token { } } - pub fn interpolated_to_tokenstream(sess: &ParseSess, nt: Lrc<(Nonterminal, LazyTokenStream)>, - span: Span) -> TokenStream { + pub fn interpolated_to_tokenstream(sess: &ParseSess, nt: Lrc, span: Span) + -> TokenStream { // An `Interpolated` token means that we have a `Nonterminal` // which is often a parsed AST item. At this point we now need // to convert the parsed AST to an actual token stream, e.g. @@ -524,41 +518,36 @@ impl Token { // stream they came from. Here we attempt to extract these // lossless token streams before we fall back to the // stringification. - let mut tokens = None; - - match nt.0 { + let tokens = match *nt { Nonterminal::NtItem(ref item) => { - tokens = prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span); + prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span) } Nonterminal::NtTraitItem(ref item) => { - tokens = prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span); + prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span) } Nonterminal::NtImplItem(ref item) => { - tokens = prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span); + prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span) } Nonterminal::NtIdent(ident, is_raw) => { let token = Token::Ident(ident, is_raw); - tokens = Some(TokenTree::Token(ident.span, token).into()); + Some(TokenTree::Token(ident.span, token).into()) } Nonterminal::NtLifetime(ident) => { let token = Token::Lifetime(ident); - tokens = Some(TokenTree::Token(ident.span, token).into()); + Some(TokenTree::Token(ident.span, token).into()) } Nonterminal::NtTT(ref tt) => { - tokens = Some(tt.clone().into()); + Some(tt.clone().into()) } - _ => {} - } + _ => None, + }; - let tokens_for_real = nt.1.force(|| { - // FIXME(#43081): Avoid this pretty-print + reparse hack - let source = pprust::nonterminal_to_string(&nt.0); - let filename = FileName::macro_expansion_source_code(&source); - let (tokens, errors) = parse_stream_from_source_str( - filename, source, sess, Some(span)); - emit_unclosed_delims(&errors, &sess.span_diagnostic); - tokens - }); + // FIXME(#43081): Avoid this pretty-print + reparse hack + let source = pprust::nonterminal_to_string(&nt); + let filename = FileName::macro_expansion_source_code(&source); + let (tokens_for_real, errors) = + parse_stream_from_source_str(filename, source, sess, Some(span)); + emit_unclosed_delims(&errors, &sess.span_diagnostic); // During early phases of the compiler the AST could get modified // directly (e.g., attributes added or removed) and the internal cache @@ -734,52 +723,6 @@ crate fn is_op(tok: &Token) -> bool { } } -#[derive(Clone)] -pub struct LazyTokenStream(Lock>); - -impl cmp::Eq for LazyTokenStream {} -impl PartialEq for LazyTokenStream { - fn eq(&self, _other: &LazyTokenStream) -> bool { - true - } -} - -impl fmt::Debug for LazyTokenStream { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Debug::fmt(&self.clone().0.into_inner(), f) - } -} - -impl LazyTokenStream { - pub fn new() -> Self { - LazyTokenStream(Lock::new(None)) - } - - fn force TokenStream>(&self, f: F) -> TokenStream { - let mut opt_stream = self.0.lock(); - if opt_stream.is_none() { - *opt_stream = Some(f()); - } - opt_stream.clone().unwrap() - } -} - -impl Encodable for LazyTokenStream { - fn encode(&self, _: &mut S) -> Result<(), S::Error> { - Ok(()) - } -} - -impl Decodable for LazyTokenStream { - fn decode(_: &mut D) -> Result { - Ok(LazyTokenStream::new()) - } -} - -impl ::std::hash::Hash for LazyTokenStream { - fn hash(&self, _hasher: &mut H) {} -} - fn prepend_attrs(sess: &ParseSess, attrs: &[ast::Attribute], tokens: Option<&tokenstream::TokenStream>, diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 0e48e3a5dff2b..dcf9815f6d1ba 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -257,7 +257,7 @@ pub fn token_to_string(tok: &Token) -> String { token::Comment => "/* */".to_string(), token::Shebang(s) => format!("/* shebang: {}*/", s), - token::Interpolated(ref nt) => nonterminal_to_string(&nt.0), + token::Interpolated(ref nt) => nonterminal_to_string(nt), } } diff --git a/src/libsyntax_ext/deriving/custom.rs b/src/libsyntax_ext/deriving/custom.rs index 6aba4d83cd27c..cfc3c931598a1 100644 --- a/src/libsyntax_ext/deriving/custom.rs +++ b/src/libsyntax_ext/deriving/custom.rs @@ -2,6 +2,7 @@ use crate::proc_macro_impl::EXEC_STRATEGY; use crate::proc_macro_server; use errors::FatalError; +use rustc_data_structures::sync::Lrc; use syntax::ast::{self, ItemKind, Attribute, Mac}; use syntax::attr::{mark_used, mark_known}; use syntax::source_map::Span; @@ -65,7 +66,7 @@ impl MultiItemModifier for ProcMacroDerive { // Mark attributes as known, and used. MarkAttrs(&self.attrs).visit_item(&item); - let token = Token::interpolated(token::NtItem(item)); + let token = Token::Interpolated(Lrc::new(token::NtItem(item))); let input = tokenstream::TokenTree::Token(DUMMY_SP, token).into(); let server = proc_macro_server::Rustc::new(ecx); From f0d8fbd283654479c50a2fec94bf2362eed0f189 Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Fri, 15 Feb 2019 12:36:10 +1100 Subject: [PATCH 29/34] Avoid a `clone()` in `transcribe()`. The current code (expensively) clones the value within an `Rc`. This commit changes things so that the `Rc` itself is (cheaply) cloned instead, avoid some allocations. This requires converting a few `Rc` instances to `Lrc`. --- src/libsyntax/ext/tt/macro_parser.rs | 19 ++++++++++--------- src/libsyntax/ext/tt/transcribe.rs | 3 +-- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index c2607ed530cf0..fe1cffb092b1c 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -88,6 +88,7 @@ use smallvec::{smallvec, SmallVec}; use syntax_pos::Span; use rustc_data_structures::fx::FxHashMap; +use rustc_data_structures::sync::Lrc; use std::collections::hash_map::Entry::{Occupied, Vacant}; use std::mem; use std::ops::{Deref, DerefMut}; @@ -179,7 +180,7 @@ struct MatcherPos<'root, 'tt: 'root> { /// all bound matches from the submatcher into the shared top-level `matches` vector. If `sep` /// and `up` are `Some`, then `matches` is _not_ the shared top-level list. Instead, if one /// wants the shared `matches`, one should use `up.matches`. - matches: Box<[Rc]>, + matches: Box<[Lrc]>, /// The position in `matches` corresponding to the first metavar in this matcher's sequence of /// token trees. In other words, the first metavar in the first token of `top_elts` corresponds /// to `matches[match_lo]`. @@ -218,7 +219,7 @@ struct MatcherPos<'root, 'tt: 'root> { impl<'root, 'tt> MatcherPos<'root, 'tt> { /// Adds `m` as a named match for the `idx`-th metavar. fn push_match(&mut self, idx: usize, m: NamedMatch) { - let matches = Rc::make_mut(&mut self.matches[idx]); + let matches = Lrc::make_mut(&mut self.matches[idx]); matches.push(m); } } @@ -295,11 +296,11 @@ pub fn count_names(ms: &[TokenTree]) -> usize { } /// `len` `Vec`s (initially shared and empty) that will store matches of metavars. -fn create_matches(len: usize) -> Box<[Rc]> { +fn create_matches(len: usize) -> Box<[Lrc]> { if len == 0 { vec![] } else { - let empty_matches = Rc::new(SmallVec::new()); + let empty_matches = Lrc::new(SmallVec::new()); vec![empty_matches; len] }.into_boxed_slice() } @@ -353,8 +354,8 @@ fn initial_matcher_pos<'root, 'tt>(ms: &'tt [TokenTree], open: Span) -> MatcherP /// token tree it was derived from. #[derive(Debug, Clone)] pub enum NamedMatch { - MatchedSeq(Rc, DelimSpan), - MatchedNonterminal(Rc), + MatchedSeq(Lrc, DelimSpan), + MatchedNonterminal(Lrc), } /// Takes a sequence of token trees `ms` representing a matcher which successfully matched input @@ -561,7 +562,7 @@ fn inner_parse_loop<'root, 'tt>( new_item.match_cur += seq.num_captures; new_item.idx += 1; for idx in item.match_cur..item.match_cur + seq.num_captures { - new_item.push_match(idx, MatchedSeq(Rc::new(smallvec![]), sp)); + new_item.push_match(idx, MatchedSeq(Lrc::new(smallvec![]), sp)); } cur_items.push(new_item); } @@ -707,7 +708,7 @@ pub fn parse( let matches = eof_items[0] .matches .iter_mut() - .map(|dv| Rc::make_mut(dv).pop().unwrap()); + .map(|dv| Lrc::make_mut(dv).pop().unwrap()); return nameize(sess, ms, matches); } else if eof_items.len() > 1 { return Error( @@ -780,7 +781,7 @@ pub fn parse( let match_cur = item.match_cur; item.push_match( match_cur, - MatchedNonterminal(Rc::new(parse_nt(&mut parser, span, &ident.as_str()))), + MatchedNonterminal(Lrc::new(parse_nt(&mut parser, span, &ident.as_str()))), ); item.idx += 1; item.match_cur += 1; diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 5c240e237f632..bd2adb5ac13ba 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -149,8 +149,7 @@ pub fn transcribe(cx: &ExtCtxt<'_>, result.push(tt.clone().into()); } else { sp = sp.apply_mark(cx.current_expansion.mark); - let token = - TokenTree::Token(sp, Token::Interpolated(Lrc::new((**nt).clone()))); + let token = TokenTree::Token(sp, Token::Interpolated(nt.clone())); result.push(token.into()); } } else { From 82ad4f1f45a60995c0955e28bbed3885008e3ee5 Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Mon, 18 Feb 2019 10:06:26 +1100 Subject: [PATCH 30/34] Make `interpolated_to_tokenstream` a method on `Nonterminal`. --- src/librustc/hir/lowering.rs | 2 +- src/libsyntax/parse/token.rs | 163 ++++++++++++------------- src/libsyntax/tokenstream.rs | 4 +- src/libsyntax_ext/proc_macro_server.rs | 2 +- 4 files changed, 85 insertions(+), 86 deletions(-) diff --git a/src/librustc/hir/lowering.rs b/src/librustc/hir/lowering.rs index bbbd38cfed7e4..961e892789a81 100644 --- a/src/librustc/hir/lowering.rs +++ b/src/librustc/hir/lowering.rs @@ -1132,7 +1132,7 @@ impl<'a> LoweringContext<'a> { fn lower_token(&mut self, token: Token, span: Span) -> TokenStream { match token { Token::Interpolated(nt) => { - let tts = Token::interpolated_to_tokenstream(&self.sess.parse_sess, nt, span); + let tts = nt.to_tokenstream(&self.sess.parse_sess, span); self.lower_token_stream(tts) } other => TokenTree::Token(span, other).into(), diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index a1d1a0f51baf0..eec422d6266c3 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -86,7 +86,7 @@ impl Lit { } } - // See comments in `interpolated_to_tokenstream` for why we care about + // See comments in `Nonterminal::to_tokenstream` for why we care about // *probably* equal here rather than actual equality fn probably_equal_for_proc_macro(&self, other: &Lit) -> bool { mem::discriminant(self) == mem::discriminant(other) @@ -502,87 +502,7 @@ impl Token { } } - pub fn interpolated_to_tokenstream(sess: &ParseSess, nt: Lrc, span: Span) - -> TokenStream { - // An `Interpolated` token means that we have a `Nonterminal` - // which is often a parsed AST item. At this point we now need - // to convert the parsed AST to an actual token stream, e.g. - // un-parse it basically. - // - // Unfortunately there's not really a great way to do that in a - // guaranteed lossless fashion right now. The fallback here is - // to just stringify the AST node and reparse it, but this loses - // all span information. - // - // As a result, some AST nodes are annotated with the token - // stream they came from. Here we attempt to extract these - // lossless token streams before we fall back to the - // stringification. - let tokens = match *nt { - Nonterminal::NtItem(ref item) => { - prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span) - } - Nonterminal::NtTraitItem(ref item) => { - prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span) - } - Nonterminal::NtImplItem(ref item) => { - prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span) - } - Nonterminal::NtIdent(ident, is_raw) => { - let token = Token::Ident(ident, is_raw); - Some(TokenTree::Token(ident.span, token).into()) - } - Nonterminal::NtLifetime(ident) => { - let token = Token::Lifetime(ident); - Some(TokenTree::Token(ident.span, token).into()) - } - Nonterminal::NtTT(ref tt) => { - Some(tt.clone().into()) - } - _ => None, - }; - - // FIXME(#43081): Avoid this pretty-print + reparse hack - let source = pprust::nonterminal_to_string(&nt); - let filename = FileName::macro_expansion_source_code(&source); - let (tokens_for_real, errors) = - parse_stream_from_source_str(filename, source, sess, Some(span)); - emit_unclosed_delims(&errors, &sess.span_diagnostic); - - // During early phases of the compiler the AST could get modified - // directly (e.g., attributes added or removed) and the internal cache - // of tokens my not be invalidated or updated. Consequently if the - // "lossless" token stream disagrees with our actual stringification - // (which has historically been much more battle-tested) then we go - // with the lossy stream anyway (losing span information). - // - // Note that the comparison isn't `==` here to avoid comparing spans, - // but it *also* is a "probable" equality which is a pretty weird - // definition. We mostly want to catch actual changes to the AST - // like a `#[cfg]` being processed or some weird `macro_rules!` - // expansion. - // - // What we *don't* want to catch is the fact that a user-defined - // literal like `0xf` is stringified as `15`, causing the cached token - // stream to not be literal `==` token-wise (ignoring spans) to the - // token stream we got from stringification. - // - // Instead the "probably equal" check here is "does each token - // recursively have the same discriminant?" We basically don't look at - // the token values here and assume that such fine grained token stream - // modifications, including adding/removing typically non-semantic - // tokens such as extra braces and commas, don't happen. - if let Some(tokens) = tokens { - if tokens.probably_equal_for_proc_macro(&tokens_for_real) { - return tokens - } - info!("cached tokens found, but they're not \"probably equal\", \ - going with stringified version"); - } - return tokens_for_real - } - - // See comments in `interpolated_to_tokenstream` for why we care about + // See comments in `Nonterminal::to_tokenstream` for why we care about // *probably* equal here rather than actual equality crate fn probably_equal_for_proc_macro(&self, other: &Token) -> bool { if mem::discriminant(self) != mem::discriminant(other) { @@ -714,6 +634,85 @@ impl fmt::Debug for Nonterminal { } } +impl Nonterminal { + pub fn to_tokenstream(&self, sess: &ParseSess, span: Span) -> TokenStream { + // A `Nonterminal` is often a parsed AST item. At this point we now + // need to convert the parsed AST to an actual token stream, e.g. + // un-parse it basically. + // + // Unfortunately there's not really a great way to do that in a + // guaranteed lossless fashion right now. The fallback here is to just + // stringify the AST node and reparse it, but this loses all span + // information. + // + // As a result, some AST nodes are annotated with the token stream they + // came from. Here we attempt to extract these lossless token streams + // before we fall back to the stringification. + let tokens = match *self { + Nonterminal::NtItem(ref item) => { + prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span) + } + Nonterminal::NtTraitItem(ref item) => { + prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span) + } + Nonterminal::NtImplItem(ref item) => { + prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span) + } + Nonterminal::NtIdent(ident, is_raw) => { + let token = Token::Ident(ident, is_raw); + Some(TokenTree::Token(ident.span, token).into()) + } + Nonterminal::NtLifetime(ident) => { + let token = Token::Lifetime(ident); + Some(TokenTree::Token(ident.span, token).into()) + } + Nonterminal::NtTT(ref tt) => { + Some(tt.clone().into()) + } + _ => None, + }; + + // FIXME(#43081): Avoid this pretty-print + reparse hack + let source = pprust::nonterminal_to_string(self); + let filename = FileName::macro_expansion_source_code(&source); + let (tokens_for_real, errors) = + parse_stream_from_source_str(filename, source, sess, Some(span)); + emit_unclosed_delims(&errors, &sess.span_diagnostic); + + // During early phases of the compiler the AST could get modified + // directly (e.g., attributes added or removed) and the internal cache + // of tokens my not be invalidated or updated. Consequently if the + // "lossless" token stream disagrees with our actual stringification + // (which has historically been much more battle-tested) then we go + // with the lossy stream anyway (losing span information). + // + // Note that the comparison isn't `==` here to avoid comparing spans, + // but it *also* is a "probable" equality which is a pretty weird + // definition. We mostly want to catch actual changes to the AST + // like a `#[cfg]` being processed or some weird `macro_rules!` + // expansion. + // + // What we *don't* want to catch is the fact that a user-defined + // literal like `0xf` is stringified as `15`, causing the cached token + // stream to not be literal `==` token-wise (ignoring spans) to the + // token stream we got from stringification. + // + // Instead the "probably equal" check here is "does each token + // recursively have the same discriminant?" We basically don't look at + // the token values here and assume that such fine grained token stream + // modifications, including adding/removing typically non-semantic + // tokens such as extra braces and commas, don't happen. + if let Some(tokens) = tokens { + if tokens.probably_equal_for_proc_macro(&tokens_for_real) { + return tokens + } + info!("cached tokens found, but they're not \"probably equal\", \ + going with stringified version"); + } + return tokens_for_real + } +} + crate fn is_op(tok: &Token) -> bool { match *tok { OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) | diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index c4f2cffb09708..283679e758b54 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -72,7 +72,7 @@ impl TokenTree { } } - // See comments in `interpolated_to_tokenstream` for why we care about + // See comments in `Nonterminal::to_tokenstream` for why we care about // *probably* equal here rather than actual equality // // This is otherwise the same as `eq_unspanned`, only recursing with a @@ -310,7 +310,7 @@ impl TokenStream { t1.next().is_none() && t2.next().is_none() } - // See comments in `interpolated_to_tokenstream` for why we care about + // See comments in `Nonterminal::to_tokenstream` for why we care about // *probably* equal here rather than actual equality // // This is otherwise the same as `eq_unspanned`, only recursing with a diff --git a/src/libsyntax_ext/proc_macro_server.rs b/src/libsyntax_ext/proc_macro_server.rs index 60ce65baa48a3..699539b62f515 100644 --- a/src/libsyntax_ext/proc_macro_server.rs +++ b/src/libsyntax_ext/proc_macro_server.rs @@ -179,7 +179,7 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec)> } Interpolated(nt) => { - let stream = Token::interpolated_to_tokenstream(sess, nt, span); + let stream = nt.to_tokenstream(sess, span); TokenTree::Group(Group { delimiter: Delimiter::None, stream, From 895a79423bf5298e13a177ee6317f43380d437bc Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Mon, 18 Feb 2019 10:17:59 +1100 Subject: [PATCH 31/34] Remove some unnecessary `into()` calls. These are probably leftovers from recent `TokenStream` simplifications. --- src/librustc/hir/lowering.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/librustc/hir/lowering.rs b/src/librustc/hir/lowering.rs index 961e892789a81..f891e6470ae40 100644 --- a/src/librustc/hir/lowering.rs +++ b/src/librustc/hir/lowering.rs @@ -1124,7 +1124,7 @@ impl<'a> LoweringContext<'a> { TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited( span, delim, - self.lower_token_stream(tts.into()).into(), + self.lower_token_stream(tts), ).into(), } } From 9312ca10b6cd672c4fa1da4b4a3cc232f7d7dde6 Mon Sep 17 00:00:00 2001 From: Scott McMurray Date: Sun, 17 Feb 2019 23:55:45 -0800 Subject: [PATCH 32/34] Add a note about 2018e if someone uses `try {` in 2015e --- src/librustc_resolve/error_reporting.rs | 4 ++++ src/test/ui/try-block/try-block-in-edition2015.stderr | 2 ++ 2 files changed, 6 insertions(+) diff --git a/src/librustc_resolve/error_reporting.rs b/src/librustc_resolve/error_reporting.rs index 8300e691bbea4..fe9ae8b83007e 100644 --- a/src/librustc_resolve/error_reporting.rs +++ b/src/librustc_resolve/error_reporting.rs @@ -251,6 +251,10 @@ impl<'a> Resolver<'a> { format!("{}!", path_str), Applicability::MaybeIncorrect, ); + if path_str == "try" && span.rust_2015() { + err.note("if you want the `try` keyword, \ + you need to be in the 2018 edition"); + } } (Def::TyAlias(..), PathSource::Trait(_)) => { err.span_label(span, "type aliases cannot be used as traits"); diff --git a/src/test/ui/try-block/try-block-in-edition2015.stderr b/src/test/ui/try-block/try-block-in-edition2015.stderr index a7b81060d3dc6..7394fec6f3660 100644 --- a/src/test/ui/try-block/try-block-in-edition2015.stderr +++ b/src/test/ui/try-block/try-block-in-edition2015.stderr @@ -16,6 +16,8 @@ error[E0574]: expected struct, variant or union type, found macro `try` | LL | let try_result: Option<_> = try { | ^^^ help: use `!` to invoke the macro: `try!` + | + = note: if you want the `try` keyword, you need to be in the 2018 edition error: aborting due to 2 previous errors From abb07c42ecd364940c16ecd740116b847d7bcffc Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Wed, 20 Feb 2019 09:49:52 +0300 Subject: [PATCH 33/34] remove a bit of dead code --- src/libsyntax/ext/build.rs | 6 ------ 1 file changed, 6 deletions(-) diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index 48f6e4c0c8203..27b0cfb163077 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -9,12 +9,6 @@ use crate::ThinVec; use rustc_target::spec::abi::Abi; use syntax_pos::{Pos, Span, DUMMY_SP}; -// Transitional re-exports so qquote can find the paths it is looking for -mod syntax { - pub use crate::ext; - pub use crate::parse; -} - pub trait AstBuilder { // paths fn path(&self, span: Span, strs: Vec ) -> ast::Path; From 65622e319e21d95bb7275109aecacb8e2526ce04 Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Wed, 20 Feb 2019 10:10:11 +0300 Subject: [PATCH 34/34] cleanup macro after 2018 transition We can now use `?` --- src/libsyntax/ext/expand.rs | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index d398437d7affc..f50663f97853f 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -36,10 +36,8 @@ macro_rules! ast_fragments { ( $($Kind:ident($AstTy:ty) { $kind_name:expr; - // FIXME: HACK: this should be `$(one ...)?` and `$(many ...)?` but `?` macro - // repetition was removed from 2015 edition in #51587 because of ambiguities. - $(one fn $mut_visit_ast:ident; fn $visit_ast:ident;)* - $(many fn $flat_map_ast_elt:ident; fn $visit_ast_elt:ident;)* + $(one fn $mut_visit_ast:ident; fn $visit_ast:ident;)? + $(many fn $flat_map_ast_elt:ident; fn $visit_ast_elt:ident;)? fn $make_ast:ident; })* ) => {