From 0f34e0dabb1251de6769576105a0cd34d6a1db44 Mon Sep 17 00:00:00 2001 From: Shotaro Yamada Date: Sat, 12 Jan 2019 01:53:47 +0900 Subject: [PATCH 01/19] Add fmt benchmarks --- src/libcore/benches/fmt.rs | 110 +++++++++++++++++++++++++++++++++++++ src/libcore/benches/lib.rs | 1 + 2 files changed, 111 insertions(+) create mode 100644 src/libcore/benches/fmt.rs diff --git a/src/libcore/benches/fmt.rs b/src/libcore/benches/fmt.rs new file mode 100644 index 0000000000000..92f10c760c6d2 --- /dev/null +++ b/src/libcore/benches/fmt.rs @@ -0,0 +1,110 @@ +use std::io::{self, Write as IoWrite}; +use std::fmt::{self, Write as FmtWrite}; +use test::Bencher; + +#[bench] +fn write_vec_value(bh: &mut Bencher) { + bh.iter(|| { + let mut mem = Vec::new(); + for _ in 0..1000 { + mem.write_all("abc".as_bytes()).unwrap(); + } + }); +} + +#[bench] +fn write_vec_ref(bh: &mut Bencher) { + bh.iter(|| { + let mut mem = Vec::new(); + let wr = &mut mem as &mut dyn io::Write; + for _ in 0..1000 { + wr.write_all("abc".as_bytes()).unwrap(); + } + }); +} + +#[bench] +fn write_vec_macro1(bh: &mut Bencher) { + bh.iter(|| { + let mut mem = Vec::new(); + let wr = &mut mem as &mut dyn io::Write; + for _ in 0..1000 { + write!(wr, "abc").unwrap(); + } + }); +} + +#[bench] +fn write_vec_macro2(bh: &mut Bencher) { + bh.iter(|| { + let mut mem = Vec::new(); + let wr = &mut mem as &mut dyn io::Write; + for _ in 0..1000 { + write!(wr, "{}", "abc").unwrap(); + } + }); +} + +#[bench] +fn write_vec_macro_debug(bh: &mut Bencher) { + bh.iter(|| { + let mut mem = Vec::new(); + let wr = &mut mem as &mut dyn io::Write; + for _ in 0..1000 { + write!(wr, "{:?}", "☃").unwrap(); + } + }); +} + +#[bench] +fn write_str_value(bh: &mut Bencher) { + bh.iter(|| { + let mut mem = String::new(); + for _ in 0..1000 { + mem.write_str("abc").unwrap(); + } + }); +} + +#[bench] +fn write_str_ref(bh: &mut Bencher) { + bh.iter(|| { + let mut mem = String::new(); + let wr = &mut mem as &mut dyn fmt::Write; + for _ in 0..1000 { + wr.write_str("abc").unwrap(); + } + }); +} + +#[bench] +fn write_str_macro1(bh: &mut Bencher) { + bh.iter(|| { + let mut mem = String::new(); + for _ in 0..1000 { + write!(mem, "abc").unwrap(); + } + }); +} + +#[bench] +fn write_str_macro2(bh: &mut Bencher) { + bh.iter(|| { + let mut mem = String::new(); + let wr = &mut mem as &mut dyn fmt::Write; + for _ in 0..1000 { + write!(wr, "{}", "abc").unwrap(); + } + }); +} + +#[bench] +fn write_str_macro_debug(bh: &mut Bencher) { + bh.iter(|| { + let mut mem = String::new(); + let wr = &mut mem as &mut dyn fmt::Write; + for _ in 0..1000 { + write!(wr, "{:?}", "☃").unwrap(); + } + }); +} diff --git a/src/libcore/benches/lib.rs b/src/libcore/benches/lib.rs index 5b4971c81dd92..48572af611a5b 100644 --- a/src/libcore/benches/lib.rs +++ b/src/libcore/benches/lib.rs @@ -11,3 +11,4 @@ mod iter; mod num; mod ops; mod slice; +mod fmt; From d7a7ce9edd487dc151426dcb6d89911cc741e605 Mon Sep 17 00:00:00 2001 From: Shotaro Yamada Date: Sat, 12 Jan 2019 01:53:59 +0900 Subject: [PATCH 02/19] Utilize specialized zip iterator impl name old ns/iter new ns/iter diff ns/iter diff % speedup fmt::write_str_macro1 13,927 12,489 -1,438 -10.33% x 1.12 fmt::write_str_macro2 24,633 23,418 -1,215 -4.93% x 1.05 fmt::write_str_macro_debug 234,633 233,092 -1,541 -0.66% x 1.01 fmt::write_str_ref 5,819 5,823 4 0.07% x 1.00 fmt::write_str_value 6,012 5,828 -184 -3.06% x 1.03 fmt::write_vec_macro1 18,550 17,143 -1,407 -7.58% x 1.08 fmt::write_vec_macro2 30,369 28,920 -1,449 -4.77% x 1.05 fmt::write_vec_macro_debug 244,338 244,901 563 0.23% x 1.00 fmt::write_vec_ref 5,952 5,885 -67 -1.13% x 1.01 fmt::write_vec_value 5,944 5,894 -50 -0.84% x 1.01 --- src/libcore/fmt/mod.rs | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/libcore/fmt/mod.rs b/src/libcore/fmt/mod.rs index ec1aeb8a7d1e9..306a715f02d6e 100644 --- a/src/libcore/fmt/mod.rs +++ b/src/libcore/fmt/mod.rs @@ -1026,28 +1026,30 @@ pub fn write(output: &mut dyn Write, args: Arguments) -> Result { curarg: args.args.iter(), }; - let mut pieces = args.pieces.iter(); + let mut idx = 0; match args.fmt { None => { // We can use default formatting parameters for all arguments. - for (arg, piece) in args.args.iter().zip(pieces.by_ref()) { + for (arg, piece) in args.args.iter().zip(args.pieces.iter()) { formatter.buf.write_str(*piece)?; (arg.formatter)(arg.value, &mut formatter)?; + idx += 1; } } Some(fmt) => { // Every spec has a corresponding argument that is preceded by // a string piece. - for (arg, piece) in fmt.iter().zip(pieces.by_ref()) { + for (arg, piece) in fmt.iter().zip(args.pieces.iter()) { formatter.buf.write_str(*piece)?; formatter.run(arg)?; + idx += 1; } } } // There can be only one trailing string piece left. - if let Some(piece) = pieces.next() { + if let Some(piece) = args.pieces.get(idx) { formatter.buf.write_str(*piece)?; } From 038d8372244ab088ea186e10704e2bfc4e83f477 Mon Sep 17 00:00:00 2001 From: Shotaro Yamada Date: Sat, 12 Jan 2019 13:30:03 +0900 Subject: [PATCH 03/19] Fix simple formatting optimization name old2 ns/iter new2 ns/iter diff ns/iter diff % speedup fmt::write_str_macro1 12,295 12,308 13 0.11% x 1.00 fmt::write_str_macro2 24,079 21,451 -2,628 -10.91% x 1.12 fmt::write_str_macro_debug 238,363 230,807 -7,556 -3.17% x 1.03 fmt::write_str_ref 6,203 6,064 -139 -2.24% x 1.02 fmt::write_str_value 6,225 6,075 -150 -2.41% x 1.02 fmt::write_vec_macro1 17,144 17,121 -23 -0.13% x 1.00 fmt::write_vec_macro2 29,845 26,703 -3,142 -10.53% x 1.12 fmt::write_vec_macro_debug 248,840 242,117 -6,723 -2.70% x 1.03 fmt::write_vec_ref 5,954 6,438 484 8.13% x 0.92 fmt::write_vec_value 5,959 6,439 480 8.06% x 0.93 --- src/libfmt_macros/lib.rs | 9 +++++++++ src/libsyntax_ext/format.rs | 5 ++++- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/src/libfmt_macros/lib.rs b/src/libfmt_macros/lib.rs index 32ae878909f30..da440cdd72f80 100644 --- a/src/libfmt_macros/lib.rs +++ b/src/libfmt_macros/lib.rs @@ -72,6 +72,15 @@ pub enum Position<'a> { ArgumentNamed(&'a str), } +impl Position<'_> { + pub fn index(&self) -> Option { + match self { + ArgumentIs(i) | ArgumentImplicitlyIs(i) => Some(*i), + _ => None, + } + } +} + /// Enum of alignments which are supported. #[derive(Copy, Clone, PartialEq)] pub enum Alignment { diff --git a/src/libsyntax_ext/format.rs b/src/libsyntax_ext/format.rs index 61722ba551653..0613c78e49590 100644 --- a/src/libsyntax_ext/format.rs +++ b/src/libsyntax_ext/format.rs @@ -493,7 +493,10 @@ impl<'a, 'b> Context<'a, 'b> { let fill = arg.format.fill.unwrap_or(' '); - if *arg != simple_arg || fill != ' ' { + let pos_simple = + arg.position.index() == simple_arg.position.index(); + + if !pos_simple || arg.format != simple_arg.format || fill != ' ' { self.all_pieces_simple = false; } From 04c74f46f0a0ecf886f1c12b51483d38690fac22 Mon Sep 17 00:00:00 2001 From: Stjepan Glavina Date: Sun, 13 Jan 2019 11:16:14 +0100 Subject: [PATCH 04/19] Add core::iter::once_with --- src/libcore/iter/mod.rs | 2 + src/libcore/iter/sources.rs | 108 ++++++++++++++++++++++++++++++++++++ src/libcore/lib.rs | 1 + src/libcore/tests/iter.rs | 17 ++++++ src/libcore/tests/lib.rs | 1 + 5 files changed, 129 insertions(+) diff --git a/src/libcore/iter/mod.rs b/src/libcore/iter/mod.rs index 03369d6c8f3fd..f647a61a584c6 100644 --- a/src/libcore/iter/mod.rs +++ b/src/libcore/iter/mod.rs @@ -329,6 +329,8 @@ pub use self::sources::{RepeatWith, repeat_with}; pub use self::sources::{Empty, empty}; #[stable(feature = "iter_once", since = "1.2.0")] pub use self::sources::{Once, once}; +#[unstable(feature = "iter_once_with", issue = "0")] +pub use self::sources::{OnceWith, once_with}; #[unstable(feature = "iter_unfold", issue = "55977")] pub use self::sources::{Unfold, unfold, Successors, successors}; diff --git a/src/libcore/iter/sources.rs b/src/libcore/iter/sources.rs index 2a39089a8a229..d183fa3a7c233 100644 --- a/src/libcore/iter/sources.rs +++ b/src/libcore/iter/sources.rs @@ -377,6 +377,114 @@ pub fn once(value: T) -> Once { Once { inner: Some(value).into_iter() } } +/// An iterator that repeats elements of type `A` endlessly by +/// applying the provided closure `F: FnMut() -> A`. +/// +/// This `struct` is created by the [`once_with`] function. +/// See its documentation for more. +/// +/// [`once_with`]: fn.once_with.html +#[derive(Copy, Clone, Debug)] +#[unstable(feature = "iter_once_with", issue = "0")] +pub struct OnceWith { + gen: Option, +} + +#[unstable(feature = "iter_once_with", issue = "0")] +impl A> Iterator for OnceWith { + type Item = A; + + #[inline] + fn next(&mut self) -> Option { + self.gen.take().map(|f| f()) + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + self.gen.iter().size_hint() + } +} + +#[unstable(feature = "iter_once_with", issue = "0")] +impl A> DoubleEndedIterator for OnceWith { + fn next_back(&mut self) -> Option { + self.next() + } +} + +#[unstable(feature = "iter_once_with", issue = "0")] +impl A> ExactSizeIterator for OnceWith { + fn len(&self) -> usize { + self.gen.iter().len() + } +} + +#[unstable(feature = "iter_once_with", issue = "0")] +impl A> FusedIterator for OnceWith {} + +#[unstable(feature = "iter_once_with", issue = "0")] +unsafe impl A> TrustedLen for OnceWith {} + +/// Creates an iterator that lazily generates a value exactly once by invoking +/// the provided closure. +/// +/// This is commonly used to adapt a single value generator into a [`chain`] of +/// other kinds of iteration. Maybe you have an iterator that covers almost +/// everything, but you need an extra special case. Maybe you have a function +/// which works on iterators, but you only need to process one value. +/// +/// Unlike [`once`], this function will lazily generate the value on request. +/// +/// [`once`]: fn.once.html +/// +/// # Examples +/// +/// Basic usage: +/// +/// ``` +/// use std::iter; +/// +/// // one is the loneliest number +/// let mut one = iter::once_with(|| 1); +/// +/// assert_eq!(Some(1), one.next()); +/// +/// // just one, that's all we get +/// assert_eq!(None, one.next()); +/// ``` +/// +/// Chaining together with another iterator. Let's say that we want to iterate +/// over each file of the `.foo` directory, but also a configuration file, +/// `.foorc`: +/// +/// ```no_run +/// use std::iter; +/// use std::fs; +/// use std::path::PathBuf; +/// +/// let dirs = fs::read_dir(".foo").unwrap(); +/// +/// // we need to convert from an iterator of DirEntry-s to an iterator of +/// // PathBufs, so we use map +/// let dirs = dirs.map(|file| file.unwrap().path()); +/// +/// // now, our iterator just for our config file +/// let config = iter::once_with(|| PathBuf::from(".foorc")); +/// +/// // chain the two iterators together into one big iterator +/// let files = dirs.chain(config); +/// +/// // this will give us all of the files in .foo as well as .foorc +/// for f in files { +/// println!("{:?}", f); +/// } +/// ``` +#[inline] +#[unstable(feature = "iter_once_with", issue = "0")] +pub fn once_with A>(gen: F) -> OnceWith { + OnceWith { gen: Some(gen) } +} + /// Creates a new iterator where each iteration calls the provided closure /// `F: FnMut(&mut St) -> Option`. /// diff --git a/src/libcore/lib.rs b/src/libcore/lib.rs index a5f20d08e47be..598e7fd706a24 100644 --- a/src/libcore/lib.rs +++ b/src/libcore/lib.rs @@ -79,6 +79,7 @@ #![feature(extern_types)] #![feature(fundamental)] #![feature(intrinsics)] +#![feature(iter_once_with)] #![feature(lang_items)] #![feature(link_llvm_intrinsics)] #![feature(never_type)] diff --git a/src/libcore/tests/iter.rs b/src/libcore/tests/iter.rs index cf19851c17b35..b62f55b2cd003 100644 --- a/src/libcore/tests/iter.rs +++ b/src/libcore/tests/iter.rs @@ -1906,6 +1906,23 @@ fn test_once() { assert_eq!(it.next(), None); } +#[test] +fn test_once_with() { + let mut count = 0; + let mut it = once_with(|| { + count += 1; + 42 + }); + + assert_eq!(count, 0); + assert_eq!(it.next(), Some(42)); + assert_eq!(count, 1); + assert_eq!(it.next(), None); + assert_eq!(count, 1); + assert_eq!(it.next(), None); + assert_eq!(count, 1); +} + #[test] fn test_empty() { let mut it = empty::(); diff --git a/src/libcore/tests/lib.rs b/src/libcore/tests/lib.rs index 72846daf16a6b..a9b8decfd0262 100644 --- a/src/libcore/tests/lib.rs +++ b/src/libcore/tests/lib.rs @@ -12,6 +12,7 @@ #![feature(hashmap_internals)] #![feature(iter_copied)] #![feature(iter_nth_back)] +#![feature(iter_once_with)] #![feature(iter_unfold)] #![feature(pattern)] #![feature(range_is_empty)] From de6566ce391478c5c483fed98eaf6e3c37f4dab9 Mon Sep 17 00:00:00 2001 From: Ariel Ben-Yehuda Date: Sat, 5 Jan 2019 16:19:34 +0200 Subject: [PATCH 05/19] forbid manually impl'ing one of an object type's marker traits This shouldn't break compatibility for crates that do not use `feature(optin_builtin_traits)`, because as the test shows, it is only possible to impl a marker trait for a trait object in the crate the marker trait is defined in, which must define `feature(optin_builtin_traits)`. Fixes #56934 --- src/librustc_typeck/coherence/mod.rs | 17 +++++++-- ...ce-impl-trait-for-marker-trait-negative.rs | 29 +++++++++++++++ ...mpl-trait-for-marker-trait-negative.stderr | 37 +++++++++++++++++++ ...ce-impl-trait-for-marker-trait-positive.rs | 29 +++++++++++++++ ...mpl-trait-for-marker-trait-positive.stderr | 37 +++++++++++++++++++ 5 files changed, 145 insertions(+), 4 deletions(-) create mode 100644 src/test/ui/coherence/coherence-impl-trait-for-marker-trait-negative.rs create mode 100644 src/test/ui/coherence/coherence-impl-trait-for-marker-trait-negative.stderr create mode 100644 src/test/ui/coherence/coherence-impl-trait-for-marker-trait-positive.rs create mode 100644 src/test/ui/coherence/coherence-impl-trait-for-marker-trait-positive.stderr diff --git a/src/librustc_typeck/coherence/mod.rs b/src/librustc_typeck/coherence/mod.rs index ce71be07efd42..8053ed130e91b 100644 --- a/src/librustc_typeck/coherence/mod.rs +++ b/src/librustc_typeck/coherence/mod.rs @@ -171,13 +171,23 @@ fn check_impl_overlap<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, node_id: ast::NodeI // This is something like impl Trait1 for Trait2. Illegal // if Trait1 is a supertrait of Trait2 or Trait2 is not object safe. - if let Some(principal_def_id) = data.principal_def_id() { - if !tcx.is_object_safe(principal_def_id) { + let component_def_ids = data.iter().flat_map(|predicate| { + match predicate.skip_binder() { + ty::ExistentialPredicate::Trait(tr) => Some(tr.def_id), + ty::ExistentialPredicate::AutoTrait(def_id) => Some(*def_id), + // An associated type projection necessarily comes with + // an additional `Trait` requirement. + ty::ExistentialPredicate::Projection(..) => None, + } + }); + + for component_def_id in component_def_ids { + if !tcx.is_object_safe(component_def_id) { // This is an error, but it will be reported by wfcheck. Ignore it here. // This is tested by `coherence-impl-trait-for-trait-object-safe.rs`. } else { let mut supertrait_def_ids = - traits::supertrait_def_ids(tcx, principal_def_id); + traits::supertrait_def_ids(tcx, component_def_id); if supertrait_def_ids.any(|d| d == trait_def_id) { let sp = tcx.sess.source_map().def_span(tcx.span_of_impl(impl_def_id).unwrap()); struct_span_err!(tcx.sess, @@ -193,6 +203,5 @@ fn check_impl_overlap<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, node_id: ast::NodeI } } } - // FIXME: also check auto-trait def-ids? (e.g. `impl Sync for Foo+Sync`)? } } diff --git a/src/test/ui/coherence/coherence-impl-trait-for-marker-trait-negative.rs b/src/test/ui/coherence/coherence-impl-trait-for-marker-trait-negative.rs new file mode 100644 index 0000000000000..5ea69190951e1 --- /dev/null +++ b/src/test/ui/coherence/coherence-impl-trait-for-marker-trait-negative.rs @@ -0,0 +1,29 @@ +#![feature(optin_builtin_traits)] + +// Test for issue #56934 - that it is impossible to redundantly +// implement an auto-trait for a trait object type that contains it. + +// Negative impl variant. + +auto trait Marker1 {} +auto trait Marker2 {} + +trait Object: Marker1 {} + +// A supertrait marker is illegal... +impl !Marker1 for dyn Object + Marker2 { } //~ ERROR E0371 +// ...and also a direct component. +impl !Marker2 for dyn Object + Marker2 { } //~ ERROR E0371 + +// But implementing a marker if it is not present is OK. +impl !Marker2 for dyn Object {} // OK + +// A non-principal trait-object type is orphan even in its crate. +impl !Send for dyn Marker2 {} //~ ERROR E0117 + +// And impl'ing a remote marker for a local trait object is forbidden +// by one of these special orphan-like rules. +impl !Send for dyn Object {} //~ ERROR E0321 +impl !Send for dyn Object + Marker2 {} //~ ERROR E0321 + +fn main() { } diff --git a/src/test/ui/coherence/coherence-impl-trait-for-marker-trait-negative.stderr b/src/test/ui/coherence/coherence-impl-trait-for-marker-trait-negative.stderr new file mode 100644 index 0000000000000..6e146760db5e3 --- /dev/null +++ b/src/test/ui/coherence/coherence-impl-trait-for-marker-trait-negative.stderr @@ -0,0 +1,37 @@ +error[E0371]: the object type `(dyn Object + Marker2 + 'static)` automatically implements the trait `Marker1` + --> $DIR/coherence-impl-trait-for-marker-trait-negative.rs:14:1 + | +LL | impl !Marker1 for dyn Object + Marker2 { } //~ ERROR E0371 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `(dyn Object + Marker2 + 'static)` automatically implements trait `Marker1` + +error[E0371]: the object type `(dyn Object + Marker2 + 'static)` automatically implements the trait `Marker2` + --> $DIR/coherence-impl-trait-for-marker-trait-negative.rs:16:1 + | +LL | impl !Marker2 for dyn Object + Marker2 { } //~ ERROR E0371 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `(dyn Object + Marker2 + 'static)` automatically implements trait `Marker2` + +error[E0117]: only traits defined in the current crate can be implemented for arbitrary types + --> $DIR/coherence-impl-trait-for-marker-trait-negative.rs:22:1 + | +LL | impl !Send for dyn Marker2 {} //~ ERROR E0117 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ impl doesn't use types inside crate + | + = note: the impl does not reference any types defined in this crate + = note: define and implement a trait or new type instead + +error[E0321]: cross-crate traits with a default impl, like `std::marker::Send`, can only be implemented for a struct/enum type, not `(dyn Object + 'static)` + --> $DIR/coherence-impl-trait-for-marker-trait-negative.rs:26:1 + | +LL | impl !Send for dyn Object {} //~ ERROR E0321 + | ^^^^^^^^^^^^^^^^^^^^^^^^^ can't implement cross-crate trait with a default impl for non-struct/enum type + +error[E0321]: cross-crate traits with a default impl, like `std::marker::Send`, can only be implemented for a struct/enum type, not `(dyn Object + Marker2 + 'static)` + --> $DIR/coherence-impl-trait-for-marker-trait-negative.rs:27:1 + | +LL | impl !Send for dyn Object + Marker2 {} //~ ERROR E0321 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ can't implement cross-crate trait with a default impl for non-struct/enum type + +error: aborting due to 5 previous errors + +Some errors occurred: E0117, E0321, E0371. +For more information about an error, try `rustc --explain E0117`. diff --git a/src/test/ui/coherence/coherence-impl-trait-for-marker-trait-positive.rs b/src/test/ui/coherence/coherence-impl-trait-for-marker-trait-positive.rs new file mode 100644 index 0000000000000..6b5689e8260f0 --- /dev/null +++ b/src/test/ui/coherence/coherence-impl-trait-for-marker-trait-positive.rs @@ -0,0 +1,29 @@ +#![feature(optin_builtin_traits)] + +// Test for issue #56934 - that it is impossible to redundantly +// implement an auto-trait for a trait object type that contains it. + +// Positive impl variant. + +auto trait Marker1 {} +auto trait Marker2 {} + +trait Object: Marker1 {} + +// A supertrait marker is illegal... +impl Marker1 for dyn Object + Marker2 { } //~ ERROR E0371 +// ...and also a direct component. +impl Marker2 for dyn Object + Marker2 { } //~ ERROR E0371 + +// But implementing a marker if it is not present is OK. +impl Marker2 for dyn Object {} // OK + +// A non-principal trait-object type is orphan even in its crate. +unsafe impl Send for dyn Marker2 {} //~ ERROR E0117 + +// And impl'ing a remote marker for a local trait object is forbidden +// by one of these special orphan-like rules. +unsafe impl Send for dyn Object {} //~ ERROR E0321 +unsafe impl Send for dyn Object + Marker2 {} //~ ERROR E0321 + +fn main() { } diff --git a/src/test/ui/coherence/coherence-impl-trait-for-marker-trait-positive.stderr b/src/test/ui/coherence/coherence-impl-trait-for-marker-trait-positive.stderr new file mode 100644 index 0000000000000..4a8347613eb13 --- /dev/null +++ b/src/test/ui/coherence/coherence-impl-trait-for-marker-trait-positive.stderr @@ -0,0 +1,37 @@ +error[E0371]: the object type `(dyn Object + Marker2 + 'static)` automatically implements the trait `Marker1` + --> $DIR/coherence-impl-trait-for-marker-trait-positive.rs:14:1 + | +LL | impl Marker1 for dyn Object + Marker2 { } //~ ERROR E0371 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `(dyn Object + Marker2 + 'static)` automatically implements trait `Marker1` + +error[E0371]: the object type `(dyn Object + Marker2 + 'static)` automatically implements the trait `Marker2` + --> $DIR/coherence-impl-trait-for-marker-trait-positive.rs:16:1 + | +LL | impl Marker2 for dyn Object + Marker2 { } //~ ERROR E0371 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `(dyn Object + Marker2 + 'static)` automatically implements trait `Marker2` + +error[E0117]: only traits defined in the current crate can be implemented for arbitrary types + --> $DIR/coherence-impl-trait-for-marker-trait-positive.rs:22:1 + | +LL | unsafe impl Send for dyn Marker2 {} //~ ERROR E0117 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ impl doesn't use types inside crate + | + = note: the impl does not reference any types defined in this crate + = note: define and implement a trait or new type instead + +error[E0321]: cross-crate traits with a default impl, like `std::marker::Send`, can only be implemented for a struct/enum type, not `(dyn Object + 'static)` + --> $DIR/coherence-impl-trait-for-marker-trait-positive.rs:26:1 + | +LL | unsafe impl Send for dyn Object {} //~ ERROR E0321 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ can't implement cross-crate trait with a default impl for non-struct/enum type + +error[E0321]: cross-crate traits with a default impl, like `std::marker::Send`, can only be implemented for a struct/enum type, not `(dyn Object + Marker2 + 'static)` + --> $DIR/coherence-impl-trait-for-marker-trait-positive.rs:27:1 + | +LL | unsafe impl Send for dyn Object + Marker2 {} //~ ERROR E0321 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ can't implement cross-crate trait with a default impl for non-struct/enum type + +error: aborting due to 5 previous errors + +Some errors occurred: E0117, E0321, E0371. +For more information about an error, try `rustc --explain E0117`. From 791573271415a1f07c274d676da4fe39963ac92c Mon Sep 17 00:00:00 2001 From: Stjepan Glavina Date: Sun, 13 Jan 2019 21:24:15 +0100 Subject: [PATCH 06/19] Fix intradoc link and update issue number --- src/libcore/iter/mod.rs | 2 +- src/libcore/iter/sources.rs | 15 ++++++++------- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/src/libcore/iter/mod.rs b/src/libcore/iter/mod.rs index f647a61a584c6..cc173eff03e03 100644 --- a/src/libcore/iter/mod.rs +++ b/src/libcore/iter/mod.rs @@ -329,7 +329,7 @@ pub use self::sources::{RepeatWith, repeat_with}; pub use self::sources::{Empty, empty}; #[stable(feature = "iter_once", since = "1.2.0")] pub use self::sources::{Once, once}; -#[unstable(feature = "iter_once_with", issue = "0")] +#[unstable(feature = "iter_once_with", issue = "57581")] pub use self::sources::{OnceWith, once_with}; #[unstable(feature = "iter_unfold", issue = "55977")] pub use self::sources::{Unfold, unfold, Successors, successors}; diff --git a/src/libcore/iter/sources.rs b/src/libcore/iter/sources.rs index d183fa3a7c233..103d02da22f0e 100644 --- a/src/libcore/iter/sources.rs +++ b/src/libcore/iter/sources.rs @@ -385,12 +385,12 @@ pub fn once(value: T) -> Once { /// /// [`once_with`]: fn.once_with.html #[derive(Copy, Clone, Debug)] -#[unstable(feature = "iter_once_with", issue = "0")] +#[unstable(feature = "iter_once_with", issue = "57581")] pub struct OnceWith { gen: Option, } -#[unstable(feature = "iter_once_with", issue = "0")] +#[unstable(feature = "iter_once_with", issue = "57581")] impl A> Iterator for OnceWith { type Item = A; @@ -405,24 +405,24 @@ impl A> Iterator for OnceWith { } } -#[unstable(feature = "iter_once_with", issue = "0")] +#[unstable(feature = "iter_once_with", issue = "57581")] impl A> DoubleEndedIterator for OnceWith { fn next_back(&mut self) -> Option { self.next() } } -#[unstable(feature = "iter_once_with", issue = "0")] +#[unstable(feature = "iter_once_with", issue = "57581")] impl A> ExactSizeIterator for OnceWith { fn len(&self) -> usize { self.gen.iter().len() } } -#[unstable(feature = "iter_once_with", issue = "0")] +#[unstable(feature = "iter_once_with", issue = "57581")] impl A> FusedIterator for OnceWith {} -#[unstable(feature = "iter_once_with", issue = "0")] +#[unstable(feature = "iter_once_with", issue = "57581")] unsafe impl A> TrustedLen for OnceWith {} /// Creates an iterator that lazily generates a value exactly once by invoking @@ -436,6 +436,7 @@ unsafe impl A> TrustedLen for OnceWith {} /// Unlike [`once`], this function will lazily generate the value on request. /// /// [`once`]: fn.once.html +/// [`chain`]: trait.Iterator.html#method.chain /// /// # Examples /// @@ -480,7 +481,7 @@ unsafe impl A> TrustedLen for OnceWith {} /// } /// ``` #[inline] -#[unstable(feature = "iter_once_with", issue = "0")] +#[unstable(feature = "iter_once_with", issue = "57581")] pub fn once_with A>(gen: F) -> OnceWith { OnceWith { gen: Some(gen) } } From 28966e1a7ac509cebac4595e96f8d053b30fb946 Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Wed, 9 Jan 2019 15:20:56 +1100 Subject: [PATCH 07/19] Remove `TokenStream::Tree` variant. `TokenStream::Stream` can represent a token stream containing any number of token trees. `TokenStream::Tree` is the special case representing a single token tree. The latter doesn't occur all that often dynamically, so this commit removes it, which simplifies the code quite a bit. This change has mixed performance effects. - The size of `TokenStream` drops from 32 bytes to 8 bytes, and there is one less case for all the match statements. - The conversion of a `TokenTree` to a `TokenStream` now requires two allocations, for the creation of a single element Lrc>. (But a subsequent commit in this PR will reduce the main source of such conversions.) --- src/libsyntax/tokenstream.rs | 45 +++----------------------- src/libsyntax_ext/proc_macro_server.rs | 2 +- 2 files changed, 6 insertions(+), 41 deletions(-) diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index fb72ef9c956ce..95ff7728897e5 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -113,7 +113,7 @@ impl TokenTree { } pub fn joint(self) -> TokenStream { - TokenStream::Tree(self, Joint) + TokenStream::new(vec![(self, Joint)]) } /// Returns the opening delimiter as a token tree. @@ -146,7 +146,6 @@ impl TokenTree { #[derive(Clone, Debug)] pub enum TokenStream { Empty, - Tree(TokenTree, IsJoint), Stream(Lrc>), } @@ -154,7 +153,7 @@ pub type TreeAndJoint = (TokenTree, IsJoint); // `TokenStream` is used a lot. Make sure it doesn't unintentionally get bigger. #[cfg(target_arch = "x86_64")] -static_assert!(MEM_SIZE_OF_TOKEN_STREAM: mem::size_of::() == 32); +static_assert!(MEM_SIZE_OF_TOKEN_STREAM: mem::size_of::() == 8); #[derive(Clone, Copy, Debug, PartialEq)] pub enum IsJoint { @@ -201,7 +200,7 @@ impl TokenStream { impl From for TokenStream { fn from(tree: TokenTree) -> TokenStream { - TokenStream::Tree(tree, NonJoint) + TokenStream::new(vec![(tree, NonJoint)]) } } @@ -260,7 +259,6 @@ impl TokenStream { for stream in streams { match stream { TokenStream::Empty => {}, - TokenStream::Tree(tree, is_joint) => vec.push((tree, is_joint)), TokenStream::Stream(stream2) => vec.extend(stream2.iter().cloned()), } } @@ -269,13 +267,9 @@ impl TokenStream { } } - pub fn new(mut streams: Vec) -> TokenStream { + pub fn new(streams: Vec) -> TokenStream { match streams.len() { 0 => TokenStream::empty(), - 1 => { - let (tree, is_joint) = streams.pop().unwrap(); - TokenStream::Tree(tree, is_joint) - } _ => TokenStream::Stream(Lrc::new(streams)), } } @@ -283,7 +277,6 @@ impl TokenStream { pub fn append_to_tree_and_joint_vec(self, vec: &mut Vec) { match self { TokenStream::Empty => {} - TokenStream::Tree(tree, is_joint) => vec.push((tree, is_joint)), TokenStream::Stream(stream) => vec.extend(stream.iter().cloned()), } } @@ -351,7 +344,6 @@ impl TokenStream { pub fn map_enumerated TokenTree>(self, mut f: F) -> TokenStream { match self { TokenStream::Empty => TokenStream::Empty, - TokenStream::Tree(tree, is_joint) => TokenStream::Tree(f(0, tree), is_joint), TokenStream::Stream(stream) => TokenStream::Stream(Lrc::new( stream .iter() @@ -365,7 +357,6 @@ impl TokenStream { pub fn map TokenTree>(self, mut f: F) -> TokenStream { match self { TokenStream::Empty => TokenStream::Empty, - TokenStream::Tree(tree, is_joint) => TokenStream::Tree(f(tree), is_joint), TokenStream::Stream(stream) => TokenStream::Stream(Lrc::new( stream .iter() @@ -378,7 +369,6 @@ impl TokenStream { fn first_tree_and_joint(&self) -> Option<(TokenTree, IsJoint)> { match self { TokenStream::Empty => None, - TokenStream::Tree(ref tree, is_joint) => Some((tree.clone(), *is_joint)), TokenStream::Stream(ref stream) => Some(stream.first().unwrap().clone()) } } @@ -386,13 +376,6 @@ impl TokenStream { fn last_tree_if_joint(&self) -> Option { match self { TokenStream::Empty => None, - TokenStream::Tree(ref tree, is_joint) => { - if *is_joint == Joint { - Some(tree.clone()) - } else { - None - } - } TokenStream::Stream(ref stream) => { if let (tree, Joint) = stream.last().unwrap() { Some(tree.clone()) @@ -422,7 +405,7 @@ impl TokenStreamBuilder { self.push_all_but_last_tree(&last_stream); let glued_span = last_span.to(span); let glued_tt = TokenTree::Token(glued_span, glued_tok); - let glued_tokenstream = TokenStream::Tree(glued_tt, is_joint); + let glued_tokenstream = TokenStream::new(vec![(glued_tt, is_joint)]); self.0.push(glued_tokenstream); self.push_all_but_first_tree(&stream); return @@ -441,7 +424,6 @@ impl TokenStreamBuilder { let len = streams.len(); match len { 1 => {} - 2 => self.0.push(TokenStream::Tree(streams[0].0.clone(), streams[0].1)), _ => self.0.push(TokenStream::Stream(Lrc::new(streams[0 .. len - 1].to_vec()))), } } @@ -452,7 +434,6 @@ impl TokenStreamBuilder { let len = streams.len(); match len { 1 => {} - 2 => self.0.push(TokenStream::Tree(streams[1].0.clone(), streams[1].1)), _ => self.0.push(TokenStream::Stream(Lrc::new(streams[1 .. len].to_vec()))), } } @@ -481,14 +462,6 @@ impl Cursor { pub fn next_with_joint(&mut self) -> Option { match self.stream { TokenStream::Empty => None, - TokenStream::Tree(ref tree, ref is_joint) => { - if self.index == 0 { - self.index = 1; - Some((tree.clone(), *is_joint)) - } else { - None - } - } TokenStream::Stream(ref stream) => { if self.index < stream.len() { self.index += 1; @@ -513,13 +486,6 @@ impl Cursor { pub fn look_ahead(&self, n: usize) -> Option { match self.stream { TokenStream::Empty => None, - TokenStream::Tree(ref tree, _) => { - if n == 0 && self.index == 0 { - Some(tree.clone()) - } else { - None - } - } TokenStream::Stream(ref stream) => stream[self.index ..].get(n).map(|(tree, _)| tree.clone()), } @@ -542,7 +508,6 @@ impl From for ThinTokenStream { fn from(stream: TokenStream) -> ThinTokenStream { ThinTokenStream(match stream { TokenStream::Empty => None, - TokenStream::Tree(tree, is_joint) => Some(Lrc::new(vec![(tree, is_joint)])), TokenStream::Stream(stream) => Some(stream), }) } diff --git a/src/libsyntax_ext/proc_macro_server.rs b/src/libsyntax_ext/proc_macro_server.rs index 158cbc791ef50..7de9b9343a8fa 100644 --- a/src/libsyntax_ext/proc_macro_server.rs +++ b/src/libsyntax_ext/proc_macro_server.rs @@ -269,7 +269,7 @@ impl ToInternal for TokenTree { }; let tree = tokenstream::TokenTree::Token(span, token); - TokenStream::Tree(tree, if joint { Joint } else { NonJoint }) + TokenStream::new(vec![(tree, if joint { Joint } else { NonJoint })]) } } From ce0d9949b817267e88e8d366a8cee929abf1e4ba Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Wed, 9 Jan 2019 16:53:14 +1100 Subject: [PATCH 08/19] Remove `ThinTokenStream`. `TokenStream` is now almost identical to `ThinTokenStream`. This commit removes the latter, replacing it with the former. --- src/librustc/ich/impls_syntax.rs | 2 +- src/librustc_lint/builtin.rs | 2 +- src/libsyntax/ast.rs | 8 ++--- src/libsyntax/attr/mod.rs | 2 +- src/libsyntax/ext/quote.rs | 2 +- src/libsyntax/fold.rs | 2 +- src/libsyntax/parse/mod.rs | 6 ++-- src/libsyntax/parse/parser.rs | 12 +++---- src/libsyntax/print/pprust.rs | 2 +- src/libsyntax/tokenstream.rs | 55 ++------------------------------ src/libsyntax/visit.rs | 2 +- 11 files changed, 23 insertions(+), 72 deletions(-) diff --git a/src/librustc/ich/impls_syntax.rs b/src/librustc/ich/impls_syntax.rs index 70ec72d73bc6c..de567183a3c05 100644 --- a/src/librustc/ich/impls_syntax.rs +++ b/src/librustc/ich/impls_syntax.rs @@ -258,7 +258,7 @@ for tokenstream::TokenTree { tokenstream::TokenTree::Delimited(span, delim, ref tts) => { span.hash_stable(hcx, hasher); std_hash::Hash::hash(&delim, hasher); - for sub_tt in tts.stream().trees() { + for sub_tt in tts.trees() { sub_tt.hash_stable(hcx, hasher); } } diff --git a/src/librustc_lint/builtin.rs b/src/librustc_lint/builtin.rs index 5678f30dabccd..0fce166d828b0 100644 --- a/src/librustc_lint/builtin.rs +++ b/src/librustc_lint/builtin.rs @@ -1540,7 +1540,7 @@ impl KeywordIdents { _ => {}, } TokenTree::Delimited(_, _, tts) => { - self.check_tokens(cx, tts.stream()) + self.check_tokens(cx, tts) }, } } diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index e3a8980a975c1..1e91f4adc36d7 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -15,7 +15,7 @@ use rustc_target::spec::abi::Abi; use source_map::{dummy_spanned, respan, Spanned}; use symbol::{keywords, Symbol}; use syntax_pos::{Span, DUMMY_SP}; -use tokenstream::{ThinTokenStream, TokenStream}; +use tokenstream::TokenStream; use ThinVec; use rustc_data_structures::fx::FxHashSet; @@ -1216,7 +1216,7 @@ pub type Mac = Spanned; pub struct Mac_ { pub path: Path, pub delim: MacDelimiter, - pub tts: ThinTokenStream, + pub tts: TokenStream, } #[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Debug)] @@ -1228,13 +1228,13 @@ pub enum MacDelimiter { impl Mac_ { pub fn stream(&self) -> TokenStream { - self.tts.stream() + self.tts.clone() } } #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct MacroDef { - pub tokens: ThinTokenStream, + pub tokens: TokenStream, pub legacy: bool, } diff --git a/src/libsyntax/attr/mod.rs b/src/libsyntax/attr/mod.rs index d03563f8891aa..0f8ca5e7b9982 100644 --- a/src/libsyntax/attr/mod.rs +++ b/src/libsyntax/attr/mod.rs @@ -565,7 +565,7 @@ impl MetaItemKind { } Some(TokenTree::Delimited(_, delim, ref tts)) if delim == token::Paren => { tokens.next(); - tts.stream() + tts.clone() } _ => return Some(MetaItemKind::Word), }; diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index c3124144009ab..c01e7f538b90d 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -748,7 +748,7 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, quoted: bool) -> Vec { let mut stmts = statements_mk_tt(cx, &TokenTree::open_tt(span.open, delim), false); - stmts.extend(statements_mk_tts(cx, tts.stream())); + stmts.extend(statements_mk_tts(cx, tts.clone())); stmts.extend(statements_mk_tt(cx, &TokenTree::close_tt(span.close, delim), false)); stmts } diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index 8ac103856dcd1..a4c3b38f691ed 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -598,7 +598,7 @@ pub fn noop_fold_tt(tt: TokenTree, fld: &mut T) -> TokenTree { TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited( DelimSpan::from_pair(fld.new_span(span.open), fld.new_span(span.close)), delim, - fld.fold_tts(tts.stream()).into(), + fld.fold_tts(tts).into(), ), } } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index ba5676a65d7eb..759de578847a9 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -811,7 +811,7 @@ mod tests { ) if name_macro_rules.name == "macro_rules" && name_zip.name == "zip" => { - let tts = ¯o_tts.stream().trees().collect::>(); + let tts = ¯o_tts.trees().collect::>(); match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) { ( 3, @@ -820,7 +820,7 @@ mod tests { Some(&TokenTree::Delimited(_, second_delim, ref second_tts)), ) if macro_delim == token::Paren => { - let tts = &first_tts.stream().trees().collect::>(); + let tts = &first_tts.trees().collect::>(); match (tts.len(), tts.get(0), tts.get(1)) { ( 2, @@ -830,7 +830,7 @@ mod tests { if first_delim == token::Paren && ident.name == "a" => {}, _ => panic!("value 3: {:?} {:?}", first_delim, first_tts), } - let tts = &second_tts.stream().trees().collect::>(); + let tts = &second_tts.trees().collect::>(); match (tts.len(), tts.get(0), tts.get(1)) { ( 2, diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 5c8ed94731afb..6df95d539affb 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -46,7 +46,7 @@ use print::pprust; use ptr::P; use parse::PResult; use ThinVec; -use tokenstream::{self, DelimSpan, ThinTokenStream, TokenTree, TokenStream}; +use tokenstream::{self, DelimSpan, TokenTree, TokenStream}; use symbol::{Symbol, keywords}; use std::borrow::Cow; @@ -285,12 +285,12 @@ enum LastToken { } impl TokenCursorFrame { - fn new(sp: DelimSpan, delim: DelimToken, tts: &ThinTokenStream) -> Self { + fn new(sp: DelimSpan, delim: DelimToken, tts: &TokenStream) -> Self { TokenCursorFrame { delim: delim, span: sp, open_delim: delim == token::NoDelim, - tree_cursor: tts.stream().into_trees(), + tree_cursor: tts.clone().into_trees(), close_delim: delim == token::NoDelim, last_token: LastToken::Was(None), } @@ -2325,7 +2325,7 @@ impl<'a> Parser<'a> { }) } - fn expect_delimited_token_tree(&mut self) -> PResult<'a, (MacDelimiter, ThinTokenStream)> { + fn expect_delimited_token_tree(&mut self) -> PResult<'a, (MacDelimiter, TokenStream)> { let delim = match self.token { token::OpenDelim(delim) => delim, _ => { @@ -2345,7 +2345,7 @@ impl<'a> Parser<'a> { token::Brace => MacDelimiter::Brace, token::NoDelim => self.bug("unexpected no delimiter"), }; - Ok((delim, tts.stream().into())) + Ok((delim, tts.into())) } /// At the bottom (top?) of the precedence hierarchy, @@ -4633,7 +4633,7 @@ impl<'a> Parser<'a> { let ident = self.parse_ident()?; let tokens = if self.check(&token::OpenDelim(token::Brace)) { match self.parse_token_tree() { - TokenTree::Delimited(_, _, tts) => tts.stream(), + TokenTree::Delimited(_, _, tts) => tts, _ => unreachable!(), } } else if self.check(&token::OpenDelim(token::Paren)) { diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 2ad3d3a6d6487..c53594032a00a 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -807,7 +807,7 @@ pub trait PrintState<'a> { TokenTree::Delimited(_, delim, tts) => { self.writer().word(token_to_string(&token::OpenDelim(delim)))?; self.writer().space()?; - self.print_tts(tts.stream())?; + self.print_tts(tts)?; self.writer().space()?; self.writer().word(token_to_string(&token::CloseDelim(delim))) }, diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index 95ff7728897e5..d5c362490ca6a 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -41,7 +41,7 @@ pub enum TokenTree { /// A single token Token(Span, token::Token), /// A delimited sequence of token trees - Delimited(DelimSpan, DelimToken, ThinTokenStream), + Delimited(DelimSpan, DelimToken, TokenStream), } impl TokenTree { @@ -62,8 +62,7 @@ impl TokenTree { (&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => tk == tk2, (&TokenTree::Delimited(_, delim, ref tts), &TokenTree::Delimited(_, delim2, ref tts2)) => { - delim == delim2 && - tts.stream().eq_unspanned(&tts2.stream()) + delim == delim2 && tts.eq_unspanned(&tts2) } (_, _) => false, } @@ -81,8 +80,7 @@ impl TokenTree { } (&TokenTree::Delimited(_, delim, ref tts), &TokenTree::Delimited(_, delim2, ref tts2)) => { - delim == delim2 && - tts.stream().probably_equal_for_proc_macro(&tts2.stream()) + delim == delim2 && tts.probably_equal_for_proc_macro(&tts2) } (_, _) => false, } @@ -492,41 +490,6 @@ impl Cursor { } } -/// The `TokenStream` type is large enough to represent a single `TokenTree` without allocation. -/// `ThinTokenStream` is smaller, but needs to allocate to represent a single `TokenTree`. -/// We must use `ThinTokenStream` in `TokenTree::Delimited` to avoid infinite size due to recursion. -#[derive(Debug, Clone)] -pub struct ThinTokenStream(Option>>); - -impl ThinTokenStream { - pub fn stream(&self) -> TokenStream { - self.clone().into() - } -} - -impl From for ThinTokenStream { - fn from(stream: TokenStream) -> ThinTokenStream { - ThinTokenStream(match stream { - TokenStream::Empty => None, - TokenStream::Stream(stream) => Some(stream), - }) - } -} - -impl From for TokenStream { - fn from(stream: ThinTokenStream) -> TokenStream { - stream.0.map(TokenStream::Stream).unwrap_or_else(TokenStream::empty) - } -} - -impl Eq for ThinTokenStream {} - -impl PartialEq for ThinTokenStream { - fn eq(&self, other: &ThinTokenStream) -> bool { - TokenStream::from(self.clone()) == TokenStream::from(other.clone()) - } -} - impl fmt::Display for TokenStream { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str(&pprust::tokens_to_string(self.clone())) @@ -545,18 +508,6 @@ impl Decodable for TokenStream { } } -impl Encodable for ThinTokenStream { - fn encode(&self, encoder: &mut E) -> Result<(), E::Error> { - TokenStream::from(self.clone()).encode(encoder) - } -} - -impl Decodable for ThinTokenStream { - fn decode(decoder: &mut D) -> Result { - TokenStream::decode(decoder).map(Into::into) - } -} - #[derive(Debug, Copy, Clone, PartialEq, RustcEncodable, RustcDecodable)] pub struct DelimSpan { pub open: Span, diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs index 156546bbba94a..8cbd47ca70fde 100644 --- a/src/libsyntax/visit.rs +++ b/src/libsyntax/visit.rs @@ -832,7 +832,7 @@ pub fn walk_attribute<'a, V: Visitor<'a>>(visitor: &mut V, attr: &'a Attribute) pub fn walk_tt<'a, V: Visitor<'a>>(visitor: &mut V, tt: TokenTree) { match tt { TokenTree::Token(_, tok) => visitor.visit_token(tok), - TokenTree::Delimited(_, _, tts) => visitor.visit_tts(tts.stream()), + TokenTree::Delimited(_, _, tts) => visitor.visit_tts(tts), } } From ba31d83adc839768ed8fab7dea79d9f6bd6c58ac Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Thu, 10 Jan 2019 11:58:38 +1100 Subject: [PATCH 09/19] Avoid some `TokenTree`-to-`TokenStream` conversions. This avoids some allocations. --- src/libsyntax/parse/parser.rs | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 6df95d539affb..537d536ec62c1 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -46,7 +46,7 @@ use print::pprust; use ptr::P; use parse::PResult; use ThinVec; -use tokenstream::{self, DelimSpan, TokenTree, TokenStream}; +use tokenstream::{self, DelimSpan, TokenTree, TokenStream, TreeAndJoint}; use symbol::{Symbol, keywords}; use std::borrow::Cow; @@ -280,8 +280,8 @@ struct TokenCursorFrame { /// on the parser. #[derive(Clone)] enum LastToken { - Collecting(Vec), - Was(Option), + Collecting(Vec), + Was(Option), } impl TokenCursorFrame { @@ -7677,7 +7677,7 @@ impl<'a> Parser<'a> { &mut self.token_cursor.stack[prev].last_token }; - // Pull our the toekns that we've collected from the call to `f` above + // Pull out the tokens that we've collected from the call to `f` above. let mut collected_tokens = match *last_token { LastToken::Collecting(ref mut v) => mem::replace(v, Vec::new()), LastToken::Was(_) => panic!("our vector went away?"), @@ -7696,10 +7696,9 @@ impl<'a> Parser<'a> { // call. In that case we need to record all the tokens we collected in // our parent list as well. To do that we push a clone of our stream // onto the previous list. - let stream = collected_tokens.into_iter().collect::(); match prev_collecting { Some(mut list) => { - list.push(stream.clone()); + list.extend(collected_tokens.iter().cloned()); list.extend(extra_token); *last_token = LastToken::Collecting(list); } @@ -7708,7 +7707,7 @@ impl<'a> Parser<'a> { } } - Ok((ret?, stream)) + Ok((ret?, TokenStream::new(collected_tokens))) } pub fn parse_item(&mut self) -> PResult<'a, Option>> { From 959c870d03f174dd1cf0fd557bd9d631386eb75c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Fri, 8 Jun 2018 19:14:03 +0200 Subject: [PATCH 10/19] Make privacy checking, intrinsic checking and liveness checking incremental --- src/librustc/dep_graph/dep_node.rs | 3 ++ src/librustc/hir/map/mod.rs | 15 ++++++++++ src/librustc/middle/intrinsicck.rs | 20 ++++++++++++-- src/librustc/middle/liveness.rs | 19 +++++++++++-- src/librustc/ty/query/config.rs | 27 ++++++++++++++++++ src/librustc/ty/query/mod.rs | 6 ++++ src/librustc/ty/query/plumbing.rs | 3 ++ src/librustc_driver/driver.rs | 2 ++ src/librustc_privacy/lib.rs | 44 +++++++++++++++++++++--------- 9 files changed, 121 insertions(+), 18 deletions(-) diff --git a/src/librustc/dep_graph/dep_node.rs b/src/librustc/dep_graph/dep_node.rs index 427fe51e6ff9c..d1067b70778ee 100644 --- a/src/librustc/dep_graph/dep_node.rs +++ b/src/librustc/dep_graph/dep_node.rs @@ -476,6 +476,9 @@ define_dep_nodes!( <'tcx> [] CheckModLoops(DefId), [] CheckModUnstableApiUsage(DefId), [] CheckModItemTypes(DefId), + [] CheckModPrivacy(DefId), + [] CheckModIntrinsics(DefId), + [] CheckModLiveness(DefId), [] CollectModItemTypes(DefId), [] Reachability, diff --git a/src/librustc/hir/map/mod.rs b/src/librustc/hir/map/mod.rs index 513e18b137371..1ac0109b627b8 100644 --- a/src/librustc/hir/map/mod.rs +++ b/src/librustc/hir/map/mod.rs @@ -507,6 +507,21 @@ impl<'hir> Map<'hir> { &self.forest.krate.attrs } + pub fn get_module(&self, module: DefId) -> (&'hir Mod, Span, NodeId) + { + let node_id = self.as_local_node_id(module).unwrap(); + self.read(node_id); + match self.find_entry(node_id).unwrap().node { + Node::Item(&Item { + span, + node: ItemKind::Mod(ref m), + .. + }) => (m, span, node_id), + Node::Crate => (&self.forest.krate.module, self.forest.krate.span, node_id), + _ => panic!("not a module") + } + } + pub fn visit_item_likes_in_module(&self, module: DefId, visitor: &mut V) where V: ItemLikeVisitor<'hir> { diff --git a/src/librustc/middle/intrinsicck.rs b/src/librustc/middle/intrinsicck.rs index 1716daaa107c4..a0f7954eb0c55 100644 --- a/src/librustc/middle/intrinsicck.rs +++ b/src/librustc/middle/intrinsicck.rs @@ -2,6 +2,7 @@ use hir::def::Def; use hir::def_id::DefId; use ty::{self, Ty, TyCtxt}; use ty::layout::{LayoutError, Pointer, SizeSkeleton, VariantIdx}; +use ty::query::{Providers, queries}; use rustc_target::spec::abi::Abi::RustIntrinsic; use rustc_data_structures::indexed_vec::Idx; @@ -10,10 +11,23 @@ use hir::intravisit::{self, Visitor, NestedVisitorMap}; use hir; pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { - let mut visitor = ItemVisitor { - tcx, + for &module in tcx.hir().krate().modules.keys() { + queries::check_mod_intrinsics::ensure(tcx, tcx.hir().local_def_id(module)); + } +} + +fn check_mod_intrinsics<'tcx>(tcx: TyCtxt<'_, 'tcx, 'tcx>, module_def_id: DefId) { + tcx.hir().visit_item_likes_in_module( + module_def_id, + &mut ItemVisitor { tcx }.as_deep_visitor() + ); +} + +pub fn provide(providers: &mut Providers<'_>) { + *providers = Providers { + check_mod_intrinsics, + ..*providers }; - tcx.hir().krate().visit_all_item_likes(&mut visitor.as_deep_visitor()); } struct ItemVisitor<'a, 'tcx: 'a> { diff --git a/src/librustc/middle/liveness.rs b/src/librustc/middle/liveness.rs index a78cf1a471b4b..0a1802a4e12c9 100644 --- a/src/librustc/middle/liveness.rs +++ b/src/librustc/middle/liveness.rs @@ -100,6 +100,7 @@ use self::VarKind::*; use hir::def::*; use hir::Node; use ty::{self, TyCtxt}; +use ty::query::{Providers, queries}; use lint; use errors::Applicability; use util::nodemap::{NodeMap, HirIdMap, HirIdSet}; @@ -114,8 +115,9 @@ use syntax::ptr::P; use syntax::symbol::keywords; use syntax_pos::Span; -use hir::{Expr, HirId}; use hir; +use hir::{Expr, HirId}; +use hir::def_id::DefId; use hir::intravisit::{self, Visitor, FnKind, NestedVisitorMap}; /// For use with `propagate_through_loop`. @@ -179,11 +181,24 @@ impl<'a, 'tcx> Visitor<'tcx> for IrMaps<'a, 'tcx> { fn visit_arm(&mut self, a: &'tcx hir::Arm) { visit_arm(self, a); } } +fn check_mod_liveness<'tcx>(tcx: TyCtxt<'_, 'tcx, 'tcx>, module_def_id: DefId) { + tcx.hir().visit_item_likes_in_module(module_def_id, &mut IrMaps::new(tcx).as_deep_visitor()); +} + pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { - tcx.hir().krate().visit_all_item_likes(&mut IrMaps::new(tcx).as_deep_visitor()); + for &module in tcx.hir().krate().modules.keys() { + queries::check_mod_liveness::ensure(tcx, tcx.hir().local_def_id(module)); + } tcx.sess.abort_if_errors(); } +pub fn provide(providers: &mut Providers<'_>) { + *providers = Providers { + check_mod_liveness, + ..*providers + }; +} + impl fmt::Debug for LiveNode { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "ln({})", self.get()) diff --git a/src/librustc/ty/query/config.rs b/src/librustc/ty/query/config.rs index ca5d1f6bd3203..c20846aebb877 100644 --- a/src/librustc/ty/query/config.rs +++ b/src/librustc/ty/query/config.rs @@ -109,6 +109,33 @@ impl<'tcx> QueryDescription<'tcx> for queries::check_mod_item_types<'tcx> { } } +impl<'tcx> QueryDescription<'tcx> for queries::check_mod_privacy<'tcx> { + fn describe( + tcx: TyCtxt<'_, '_, '_>, + key: DefId, + ) -> Cow<'static, str> { + format!("checking privacy in {}", key.describe_as_module(tcx)).into() + } +} + +impl<'tcx> QueryDescription<'tcx> for queries::check_mod_intrinsics<'tcx> { + fn describe( + tcx: TyCtxt<'_, '_, '_>, + key: DefId, + ) -> Cow<'static, str> { + format!("checking intrinsics in {}", key.describe_as_module(tcx)).into() + } +} + +impl<'tcx> QueryDescription<'tcx> for queries::check_mod_liveness<'tcx> { + fn describe( + tcx: TyCtxt<'_, '_, '_>, + key: DefId, + ) -> Cow<'static, str> { + format!("checking liveness of variables in {}", key.describe_as_module(tcx)).into() + } +} + impl<'tcx> QueryDescription<'tcx> for queries::collect_mod_item_types<'tcx> { fn describe( tcx: TyCtxt<'_, '_, '_>, diff --git a/src/librustc/ty/query/mod.rs b/src/librustc/ty/query/mod.rs index 39d76ceed9507..88c20547a2108 100644 --- a/src/librustc/ty/query/mod.rs +++ b/src/librustc/ty/query/mod.rs @@ -264,6 +264,12 @@ define_queries! { <'tcx> [] fn check_mod_item_types: CheckModItemTypes(DefId) -> (), + [] fn check_mod_privacy: CheckModPrivacy(DefId) -> (), + + [] fn check_mod_intrinsics: CheckModIntrinsics(DefId) -> (), + + [] fn check_mod_liveness: CheckModLiveness(DefId) -> (), + [] fn collect_mod_item_types: CollectModItemTypes(DefId) -> (), /// Caches CoerceUnsized kinds for impls on custom types. diff --git a/src/librustc/ty/query/plumbing.rs b/src/librustc/ty/query/plumbing.rs index af23bf3c5901f..1a20144a134eb 100644 --- a/src/librustc/ty/query/plumbing.rs +++ b/src/librustc/ty/query/plumbing.rs @@ -1266,6 +1266,9 @@ pub fn force_from_dep_node<'a, 'gcx, 'lcx>(tcx: TyCtxt<'a, 'gcx, 'lcx>, DepKind::CheckModLoops => { force!(check_mod_loops, def_id!()); } DepKind::CheckModUnstableApiUsage => { force!(check_mod_unstable_api_usage, def_id!()); } DepKind::CheckModItemTypes => { force!(check_mod_item_types, def_id!()); } + DepKind::CheckModPrivacy => { force!(check_mod_privacy, def_id!()); } + DepKind::CheckModIntrinsics => { force!(check_mod_intrinsics, def_id!()); } + DepKind::CheckModLiveness => { force!(check_mod_liveness, def_id!()); } DepKind::CollectModItemTypes => { force!(collect_mod_item_types, def_id!()); } DepKind::Reachability => { force!(reachable_set, LOCAL_CRATE); } DepKind::MirKeys => { force!(mir_keys, LOCAL_CRATE); } diff --git a/src/librustc_driver/driver.rs b/src/librustc_driver/driver.rs index 380f9afd68de6..95c0facb6fd42 100644 --- a/src/librustc_driver/driver.rs +++ b/src/librustc_driver/driver.rs @@ -1168,6 +1168,8 @@ pub fn default_provide(providers: &mut ty::query::Providers) { ty::provide(providers); traits::provide(providers); stability::provide(providers); + middle::intrinsicck::provide(providers); + middle::liveness::provide(providers); reachable::provide(providers); rustc_passes::provide(providers); rustc_traits::provide(providers); diff --git a/src/librustc_privacy/lib.rs b/src/librustc_privacy/lib.rs index 5015ed027cc3c..3c6a17aea3f49 100644 --- a/src/librustc_privacy/lib.rs +++ b/src/librustc_privacy/lib.rs @@ -22,12 +22,12 @@ use rustc::lint; use rustc::middle::privacy::{AccessLevel, AccessLevels}; use rustc::ty::{self, TyCtxt, Ty, TraitRef, TypeFoldable, GenericParamDefKind}; use rustc::ty::fold::TypeVisitor; -use rustc::ty::query::Providers; +use rustc::ty::query::{Providers, queries}; use rustc::ty::subst::Substs; use rustc::util::nodemap::NodeSet; use rustc_data_structures::fx::FxHashSet; use rustc_data_structures::sync::Lrc; -use syntax::ast::{self, CRATE_NODE_ID, Ident}; +use syntax::ast::{self, DUMMY_NODE_ID, Ident}; use syntax::attr; use syntax::symbol::keywords; use syntax_pos::Span; @@ -782,6 +782,10 @@ impl<'a, 'tcx> Visitor<'tcx> for NamePrivacyVisitor<'a, 'tcx> { NestedVisitorMap::All(&self.tcx.hir()) } + fn visit_mod(&mut self, _m: &'tcx hir::Mod, _s: Span, _n: ast::NodeId) { + // Don't visit modules inside + } + fn visit_nested_body(&mut self, body: hir::BodyId) { let orig_tables = mem::replace(&mut self.tables, self.tcx.body_tables(body)); let body = self.tcx.hir().body(body); @@ -917,6 +921,10 @@ impl<'a, 'tcx> Visitor<'tcx> for TypePrivacyVisitor<'a, 'tcx> { NestedVisitorMap::All(&self.tcx.hir()) } + fn visit_mod(&mut self, _m: &'tcx hir::Mod, _s: Span, _n: ast::NodeId) { + // Don't visit modules inside + } + fn visit_nested_body(&mut self, body: hir::BodyId) { let orig_tables = mem::replace(&mut self.tables, self.tcx.body_tables(body)); let orig_in_body = mem::replace(&mut self.in_body, true); @@ -1654,6 +1662,7 @@ impl<'a, 'tcx> Visitor<'tcx> for PrivateItemsInPublicInterfacesVisitor<'a, 'tcx> pub fn provide(providers: &mut Providers) { *providers = Providers { privacy_access_levels, + check_mod_privacy, ..*providers }; } @@ -1662,34 +1671,43 @@ pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Lrc { tcx.privacy_access_levels(LOCAL_CRATE) } -fn privacy_access_levels<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - krate: CrateNum) - -> Lrc { - assert_eq!(krate, LOCAL_CRATE); - - let krate = tcx.hir().krate(); +fn check_mod_privacy<'tcx>(tcx: TyCtxt<'_, 'tcx, 'tcx>, module_def_id: DefId) { let empty_tables = ty::TypeckTables::empty(None); // Check privacy of names not checked in previous compilation stages. let mut visitor = NamePrivacyVisitor { tcx, tables: &empty_tables, - current_item: CRATE_NODE_ID, + current_item: DUMMY_NODE_ID, empty_tables: &empty_tables, }; - intravisit::walk_crate(&mut visitor, krate); + let (module, span, node_id) = tcx.hir().get_module(module_def_id); + intravisit::walk_mod(&mut visitor, module, node_id); // Check privacy of explicitly written types and traits as well as // inferred types of expressions and patterns. let mut visitor = TypePrivacyVisitor { tcx, tables: &empty_tables, - current_item: DefId::local(CRATE_DEF_INDEX), + current_item: module_def_id, in_body: false, - span: krate.span, + span, empty_tables: &empty_tables, }; - intravisit::walk_crate(&mut visitor, krate); + intravisit::walk_mod(&mut visitor, module, node_id); +} + +fn privacy_access_levels<'tcx>( + tcx: TyCtxt<'_, 'tcx, 'tcx>, + krate: CrateNum, +) -> Lrc { + assert_eq!(krate, LOCAL_CRATE); + + let krate = tcx.hir().krate(); + + for &module in tcx.hir().krate().modules.keys() { + queries::check_mod_privacy::ensure(tcx, tcx.hir().local_def_id(module)); + } // Build up a set of all exported items in the AST. This is a set of all // items which are reachable from external crates based on visibility. From e449f3d62927887c01fe8b16bfcb4282d007fca2 Mon Sep 17 00:00:00 2001 From: Stjepan Glavina Date: Mon, 14 Jan 2019 00:45:57 +0100 Subject: [PATCH 11/19] Fix failing test --- src/libcore/tests/iter.rs | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/src/libcore/tests/iter.rs b/src/libcore/tests/iter.rs index b62f55b2cd003..e2242ecc8cfbb 100644 --- a/src/libcore/tests/iter.rs +++ b/src/libcore/tests/iter.rs @@ -1,3 +1,4 @@ +use core::cell::Cell; use core::iter::*; use core::{i8, i16, isize}; use core::usize; @@ -1908,19 +1909,19 @@ fn test_once() { #[test] fn test_once_with() { - let mut count = 0; + let mut count = Cell::new(0); let mut it = once_with(|| { - count += 1; + count.set(count.get() + 1); 42 }); - assert_eq!(count, 0); + assert_eq!(count.get(), 0); assert_eq!(it.next(), Some(42)); - assert_eq!(count, 1); + assert_eq!(count.get(), 1); assert_eq!(it.next(), None); - assert_eq!(count, 1); + assert_eq!(count.get(), 1); assert_eq!(it.next(), None); - assert_eq!(count, 1); + assert_eq!(count.get(), 1); } #[test] From 728572440171d8d9c0557c89c3d71cc8d7cf6c2e Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Fri, 11 Jan 2019 10:36:54 +1100 Subject: [PATCH 12/19] Make `TokenStream` use `Option`. Because that's the more typical way of representing an all-or-nothing type. --- src/libsyntax/tokenstream.rs | 95 +++++++++++++++++------------------- 1 file changed, 45 insertions(+), 50 deletions(-) diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index d5c362490ca6a..f5d2d6f18ee87 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -141,11 +141,13 @@ impl TokenTree { /// The goal is for procedural macros to work with `TokenStream`s and `TokenTree`s /// instead of a representation of the abstract syntax tree. /// Today's `TokenTree`s can still contain AST via `Token::Interpolated` for back-compat. +/// +/// The use of `Option` is an optimization that avoids the need for an +/// allocation when the stream is empty. However, it is not guaranteed that an +/// empty stream is represented with `None`; it may be represented as a `Some` +/// around an empty `Vec`. #[derive(Clone, Debug)] -pub enum TokenStream { - Empty, - Stream(Lrc>), -} +pub struct TokenStream(Option>>); pub type TreeAndJoint = (TokenTree, IsJoint); @@ -166,7 +168,7 @@ impl TokenStream { /// separating the two arguments with a comma for diagnostic suggestions. pub(crate) fn add_comma(&self) -> Option<(TokenStream, Span)> { // Used to suggest if a user writes `foo!(a b);` - if let TokenStream::Stream(ref stream) = self { + if let Some(ref stream) = self.0 { let mut suggestion = None; let mut iter = stream.iter().enumerate().peekable(); while let Some((pos, ts)) = iter.next() { @@ -230,7 +232,7 @@ impl PartialEq for TokenStream { impl TokenStream { pub fn len(&self) -> usize { - if let TokenStream::Stream(ref slice) = self { + if let Some(ref slice) = self.0 { slice.len() } else { 0 @@ -238,13 +240,13 @@ impl TokenStream { } pub fn empty() -> TokenStream { - TokenStream::Empty + TokenStream(None) } pub fn is_empty(&self) -> bool { - match self { - TokenStream::Empty => true, - _ => false, + match self.0 { + None => true, + Some(ref stream) => stream.is_empty(), } } @@ -255,9 +257,9 @@ impl TokenStream { _ => { let mut vec = vec![]; for stream in streams { - match stream { - TokenStream::Empty => {}, - TokenStream::Stream(stream2) => vec.extend(stream2.iter().cloned()), + match stream.0 { + None => {}, + Some(stream2) => vec.extend(stream2.iter().cloned()), } } TokenStream::new(vec) @@ -267,15 +269,14 @@ impl TokenStream { pub fn new(streams: Vec) -> TokenStream { match streams.len() { - 0 => TokenStream::empty(), - _ => TokenStream::Stream(Lrc::new(streams)), + 0 => TokenStream(None), + _ => TokenStream(Some(Lrc::new(streams))), } } pub fn append_to_tree_and_joint_vec(self, vec: &mut Vec) { - match self { - TokenStream::Empty => {} - TokenStream::Stream(stream) => vec.extend(stream.iter().cloned()), + if let Some(stream) = self.0 { + vec.extend(stream.iter().cloned()); } } @@ -340,41 +341,36 @@ impl TokenStream { } pub fn map_enumerated TokenTree>(self, mut f: F) -> TokenStream { - match self { - TokenStream::Empty => TokenStream::Empty, - TokenStream::Stream(stream) => TokenStream::Stream(Lrc::new( + TokenStream(self.0.map(|stream| { + Lrc::new( stream .iter() .enumerate() .map(|(i, (tree, is_joint))| (f(i, tree.clone()), *is_joint)) - .collect() - )), - } + .collect()) + })) } pub fn map TokenTree>(self, mut f: F) -> TokenStream { - match self { - TokenStream::Empty => TokenStream::Empty, - TokenStream::Stream(stream) => TokenStream::Stream(Lrc::new( + TokenStream(self.0.map(|stream| { + Lrc::new( stream .iter() .map(|(tree, is_joint)| (f(tree.clone()), *is_joint)) - .collect() - )), - } + .collect()) + })) } - fn first_tree_and_joint(&self) -> Option<(TokenTree, IsJoint)> { - match self { - TokenStream::Empty => None, - TokenStream::Stream(ref stream) => Some(stream.first().unwrap().clone()) - } + fn first_tree_and_joint(&self) -> Option { + self.0.as_ref().map(|stream| { + stream.first().unwrap().clone() + }) } fn last_tree_if_joint(&self) -> Option { - match self { - TokenStream::Empty => None, - TokenStream::Stream(ref stream) => { + match self.0 { + None => None, + Some(ref stream) => { if let (tree, Joint) = stream.last().unwrap() { Some(tree.clone()) } else { @@ -418,21 +414,21 @@ impl TokenStreamBuilder { } fn push_all_but_last_tree(&mut self, stream: &TokenStream) { - if let TokenStream::Stream(ref streams) = stream { + if let Some(ref streams) = stream.0 { let len = streams.len(); match len { 1 => {} - _ => self.0.push(TokenStream::Stream(Lrc::new(streams[0 .. len - 1].to_vec()))), + _ => self.0.push(TokenStream(Some(Lrc::new(streams[0 .. len - 1].to_vec())))), } } } fn push_all_but_first_tree(&mut self, stream: &TokenStream) { - if let TokenStream::Stream(ref streams) = stream { + if let Some(ref streams) = stream.0 { let len = streams.len(); match len { 1 => {} - _ => self.0.push(TokenStream::Stream(Lrc::new(streams[1 .. len].to_vec()))), + _ => self.0.push(TokenStream(Some(Lrc::new(streams[1 .. len].to_vec())))), } } } @@ -458,9 +454,9 @@ impl Cursor { } pub fn next_with_joint(&mut self) -> Option { - match self.stream { - TokenStream::Empty => None, - TokenStream::Stream(ref stream) => { + match self.stream.0 { + None => None, + Some(ref stream) => { if self.index < stream.len() { self.index += 1; Some(stream[self.index - 1].clone()) @@ -476,16 +472,15 @@ impl Cursor { return; } let index = self.index; - let stream = mem::replace(&mut self.stream, TokenStream::Empty); + let stream = mem::replace(&mut self.stream, TokenStream(None)); *self = TokenStream::from_streams(vec![stream, new_stream]).into_trees(); self.index = index; } pub fn look_ahead(&self, n: usize) -> Option { - match self.stream { - TokenStream::Empty => None, - TokenStream::Stream(ref stream) => - stream[self.index ..].get(n).map(|(tree, _)| tree.clone()), + match self.stream.0 { + None => None, + Some(ref stream) => stream[self.index ..].get(n).map(|(tree, _)| tree.clone()), } } } From f2dbdc4302b12c126953ac6ffb8609cb86659f22 Mon Sep 17 00:00:00 2001 From: Aaron Hill Date: Mon, 14 Jan 2019 01:16:27 -0500 Subject: [PATCH 13/19] Add 'rustc-env:RUST_BACKTRACE=0' to const-pat-ice test This ensures that the test passes, regardless of what the user has set RUST_BACKTRACE to. --- src/test/ui/pattern/const-pat-ice.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/test/ui/pattern/const-pat-ice.rs b/src/test/ui/pattern/const-pat-ice.rs index 6496a2ab69f5c..865c54be1ad7b 100644 --- a/src/test/ui/pattern/const-pat-ice.rs +++ b/src/test/ui/pattern/const-pat-ice.rs @@ -1,4 +1,5 @@ // failure-status: 101 +// rustc-env:RUST_BACKTRACE=0 // This is a repro test for an ICE in our pattern handling of constants. From 7c083a8fede186f2fc39a1d0e30ee156417473b0 Mon Sep 17 00:00:00 2001 From: Stjepan Glavina Date: Mon, 14 Jan 2019 12:23:50 +0100 Subject: [PATCH 14/19] Remove unnecessary mut --- src/libcore/tests/iter.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libcore/tests/iter.rs b/src/libcore/tests/iter.rs index e2242ecc8cfbb..3944bc749d029 100644 --- a/src/libcore/tests/iter.rs +++ b/src/libcore/tests/iter.rs @@ -1909,7 +1909,7 @@ fn test_once() { #[test] fn test_once_with() { - let mut count = Cell::new(0); + let count = Cell::new(0); let mut it = once_with(|| { count.set(count.get() + 1); 42 From 84718c1999482f703ab7fc58a70ffc86269efac8 Mon Sep 17 00:00:00 2001 From: Stjepan Glavina Date: Mon, 14 Jan 2019 17:20:41 +0100 Subject: [PATCH 15/19] Add feature(iter_once_with) --- src/libcore/iter/sources.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/libcore/iter/sources.rs b/src/libcore/iter/sources.rs index 103d02da22f0e..59220b320918b 100644 --- a/src/libcore/iter/sources.rs +++ b/src/libcore/iter/sources.rs @@ -443,6 +443,8 @@ unsafe impl A> TrustedLen for OnceWith {} /// Basic usage: /// /// ``` +/// #![feature(iter_once_with)] +/// /// use std::iter; /// /// // one is the loneliest number From 3a1f0131a63a32a82f22b1c4ae04f8177730588f Mon Sep 17 00:00:00 2001 From: Stjepan Glavina Date: Mon, 14 Jan 2019 17:36:34 +0100 Subject: [PATCH 16/19] Add another feature(iter_once_with) --- src/libcore/iter/sources.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/libcore/iter/sources.rs b/src/libcore/iter/sources.rs index 59220b320918b..2590fa6023a53 100644 --- a/src/libcore/iter/sources.rs +++ b/src/libcore/iter/sources.rs @@ -461,6 +461,8 @@ unsafe impl A> TrustedLen for OnceWith {} /// `.foorc`: /// /// ```no_run +/// #![feature(iter_once_with)] +/// /// use std::iter; /// use std::fs; /// use std::path::PathBuf; From d808f938bc47d72fd5f7ff879a33e6be9bb6a499 Mon Sep 17 00:00:00 2001 From: timvisee Date: Mon, 14 Jan 2019 21:20:01 +0100 Subject: [PATCH 17/19] Simplify 'product' factorial example --- src/libcore/iter/iterator.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libcore/iter/iterator.rs b/src/libcore/iter/iterator.rs index 640af74817282..0ad29afbadeac 100644 --- a/src/libcore/iter/iterator.rs +++ b/src/libcore/iter/iterator.rs @@ -2358,7 +2358,7 @@ pub trait Iterator { /// /// ``` /// fn factorial(n: u32) -> u32 { - /// (1..).take_while(|&i| i <= n).product() + /// (1..=n).product() /// } /// assert_eq!(factorial(0), 1); /// assert_eq!(factorial(1), 1); From d38a59f8b5ad8ddbed5294bb4755618d7c2802aa Mon Sep 17 00:00:00 2001 From: Ariel Ben-Yehuda Date: Tue, 15 Jan 2019 00:26:50 +0200 Subject: [PATCH 18/19] fix test output changing in rebase --- .../coherence-impl-trait-for-marker-trait-negative.stderr | 2 +- .../coherence-impl-trait-for-marker-trait-positive.stderr | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/test/ui/coherence/coherence-impl-trait-for-marker-trait-negative.stderr b/src/test/ui/coherence/coherence-impl-trait-for-marker-trait-negative.stderr index 6e146760db5e3..c8a146cdd4456 100644 --- a/src/test/ui/coherence/coherence-impl-trait-for-marker-trait-negative.stderr +++ b/src/test/ui/coherence/coherence-impl-trait-for-marker-trait-negative.stderr @@ -16,7 +16,7 @@ error[E0117]: only traits defined in the current crate can be implemented for ar LL | impl !Send for dyn Marker2 {} //~ ERROR E0117 | ^^^^^^^^^^^^^^^^^^^^^^^^^^ impl doesn't use types inside crate | - = note: the impl does not reference any types defined in this crate + = note: the impl does not reference only types defined in this crate = note: define and implement a trait or new type instead error[E0321]: cross-crate traits with a default impl, like `std::marker::Send`, can only be implemented for a struct/enum type, not `(dyn Object + 'static)` diff --git a/src/test/ui/coherence/coherence-impl-trait-for-marker-trait-positive.stderr b/src/test/ui/coherence/coherence-impl-trait-for-marker-trait-positive.stderr index 4a8347613eb13..78ca2f5279d63 100644 --- a/src/test/ui/coherence/coherence-impl-trait-for-marker-trait-positive.stderr +++ b/src/test/ui/coherence/coherence-impl-trait-for-marker-trait-positive.stderr @@ -16,7 +16,7 @@ error[E0117]: only traits defined in the current crate can be implemented for ar LL | unsafe impl Send for dyn Marker2 {} //~ ERROR E0117 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ impl doesn't use types inside crate | - = note: the impl does not reference any types defined in this crate + = note: the impl does not reference only types defined in this crate = note: define and implement a trait or new type instead error[E0321]: cross-crate traits with a default impl, like `std::marker::Send`, can only be implemented for a struct/enum type, not `(dyn Object + 'static)` From 0d695ff33089c91a68c4be0877a53a216b991f10 Mon Sep 17 00:00:00 2001 From: Guillaume Gomez Date: Tue, 15 Jan 2019 00:41:41 +0100 Subject: [PATCH 19/19] Fix crates filtering box not being filled --- src/librustdoc/html/static/main.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/librustdoc/html/static/main.js b/src/librustdoc/html/static/main.js index 75b0f5df0d8b3..e75b471a8e84f 100644 --- a/src/librustdoc/html/static/main.js +++ b/src/librustdoc/html/static/main.js @@ -2422,7 +2422,7 @@ if (!DOMTokenList.prototype.remove) { return; } var crates_text = []; - if (crates.length > 1) { + if (Object.keys(crates).length > 1) { for (var crate in crates) { if (crates.hasOwnProperty(crate)) { crates_text.push(crate);