From cb5593a4480e397713f2813c93ebb88848497086 Mon Sep 17 00:00:00 2001 From: Daniel Hofstetter Date: Wed, 22 Oct 2014 16:16:05 +0200 Subject: [PATCH 01/47] Guide: Change >= to > in closure --- src/doc/guide.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/doc/guide.md b/src/doc/guide.md index 7a12ee98f44ba..6e75062b1f5fa 100644 --- a/src/doc/guide.md +++ b/src/doc/guide.md @@ -4347,7 +4347,7 @@ is one: ```{rust} let greater_than_forty_two = range(0i, 100i) - .find(|x| *x >= 42); + .find(|x| *x > 42); match greater_than_forty_two { Some(_) => println!("We got some numbers!"), From de06faf889f0939c7d9b24aeb724b46403b8dba4 Mon Sep 17 00:00:00 2001 From: Niko Matsakis Date: Wed, 22 Oct 2014 11:35:53 -0400 Subject: [PATCH 02/47] Use local cache when there are unbound type variables and where clauses in scope. Fixes #18209. --- src/librustc/middle/traits/doc.rs | 128 +++++++++++++++++++ src/librustc/middle/traits/select.rs | 51 ++++---- src/librustc/middle/ty.rs | 65 +++++----- src/test/run-pass/trait-cache-issue-18209.rs | 27 ++++ 4 files changed, 212 insertions(+), 59 deletions(-) create mode 100644 src/test/run-pass/trait-cache-issue-18209.rs diff --git a/src/librustc/middle/traits/doc.rs b/src/librustc/middle/traits/doc.rs index f24121d9a3a5f..a8fcdb360546b 100644 --- a/src/librustc/middle/traits/doc.rs +++ b/src/librustc/middle/traits/doc.rs @@ -279,4 +279,132 @@ selection. This is because it must account for the transformed self type of the receiver and various other complications. The procedure is described in `select.rs` in the "METHOD MATCHING" section. +# Caching and subtle considerations therewith + +In general we attempt to cache the results of trait selection. This +is a somewhat complex process. Part of the reason for this is that we +want to be able to cache results even when all the types in the trait +reference are not fully known. In that case, it may happen that the +trait selection process is also influencing type variables, so we have +to be able to not only cache the *result* of the selection process, +but *reply* its effects on the type variables. + +## An example + +The high-level idea of how the cache works is that we first replace +all unbound inference variables with skolemized versions. Therefore, +if we had a trait reference `uint : Foo<$1>`, where `$n` is an unbound +inference variable, we might replace it with `uint : Foo<%0>`, where +`%n` is a skolemized type. We would then look this up in the cache. +If we found a hit, the hit would tell us the immediate next step to +take in the selection process: i.e., apply impl #22, or apply where +clause `X : Foo`. Let's say in this case there is no hit. +Therefore, we search through impls and where clauses and so forth, and +we come to the conclusion that the only possible impl is this one, +with def-id 22: + + impl Foo for uint { ... } // Impl #22 + +We would then record in the cache `uint : Foo<%0> ==> +ImplCandidate(22)`. Next we would confirm `ImplCandidate(22)`, which +would (as a side-effect) unify `$1` with `int`. + +Now, at some later time, we might come along and see a `uint : +Foo<$3>`. When skolemized, this would yield `uint : Foo<%0>`, just as +before, and hence the cache lookup would succeed, yielding +`ImplCandidate(22)`. We would confirm `ImplCandidate(22)` which would +(as a side-effect) unify `$3` with `int`. + +## Where clauses and the local vs global cache + +One subtle interaction is that the results of trait lookup will vary +depending on what where clauses are in scope. Therefore, we actually +have *two* caches, a local and a global cache. The local cache is +attached to the `ParameterEnvironment` and the global cache attached +to the `tcx`. We use the local cache whenever the result might depend +on the where clauses that are in scope. The determination of which +cache to use is done by the method `pick_candidate_cache` in +`select.rs`. + +There are two cases where we currently use the local cache. The +current rules are probably more conservative than necessary. + +### Trait references that involve parameter types + +The most obvious case where you need the local environment is +when the trait reference includes parameter types. For example, +consider the following function: + + impl Vec { + fn foo(x: T) + where T : Foo + { ... } + + fn bar(x: T) + { ... } + } + +If there is an obligation `T : Foo`, or `int : Bar`, or whatever, +clearly the results from `foo` and `bar` are potentially different, +since the set of where clauses in scope are different. + +### Trait references with unbound variables when where clauses are in scope + +There is another less obvious interaction which involves unbound variables +where *only* where clauses are in scope (no impls). This manifested as +issue #18209 (`run-pass/trait-cache-issue-18209.rs`). Consider +this snippet: + +``` +pub trait Foo { + fn load_from() -> Box; + fn load() -> Box { + Foo::load_from() + } +} +``` + +The default method will incur an obligation `$0 : Foo` from the call +to `load_from`. If there are no impls, this can be eagerly resolved to +`VtableParam(Self : Foo)` and cached. Because the trait reference +doesn't involve any parameters types (only the resolution does), this +result was stored in the global cache, causing later calls to +`Foo::load_from()` to get nonsense. + +To fix this, we always use the local cache if there are unbound +variables and where clauses in scope. This is more conservative than +necessary as far as I can tell. However, it still seems to be a simple +rule and I observe ~99% hit rate on rustc, so it doesn't seem to hurt +us in particular. + +Here is an example of the kind of subtle case that I would be worried +about with a more complex rule (although this particular case works +out ok). Imagine the trait reference doesn't directly reference a +where clause, but the where clause plays a role in the winnowing +phase. Something like this: + +``` +pub trait Foo { ... } +pub trait Bar { ... } +impl Foo for T { ... } // Impl A +impl Foo for uint { ... } // Impl B +``` + +Now, in some function, we have no where clauses in scope, and we have +an obligation `$1 : Foo<$0>`. We might then conclude that `$0=char` +and `$1=uint`: this is because for impl A to apply, `uint:Bar` would +have to hold, and we know it does not or else the coherence check +would have failed. So we might enter into our global cache: `$1 : +Foo<$0> => Impl B`. Then we come along in a different scope, where a +generic type `A` is around with the bound `A:Bar`. Now suddenly the +impl is viable. + +The flaw in this imaginary DOOMSDAY SCENARIO is that we would not +currently conclude that `$1 : Foo<$0>` implies that `$0 == uint` and +`$1 == char`, even though it is true that (absent type parameters) +there is no other type the user could enter. However, it is not +*completely* implausible that we *could* draw this conclusion in the +future; we wouldn't have to guess types, in particular, we could be +led by the impls. + */ diff --git a/src/librustc/middle/traits/select.rs b/src/librustc/middle/traits/select.rs index f923cf1e5903b..aa183dabaa018 100644 --- a/src/librustc/middle/traits/select.rs +++ b/src/librustc/middle/traits/select.rs @@ -844,19 +844,36 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { cache_skol_trait_ref: &Rc) -> &SelectionCache { + // High-level idea: we have to decide whether to consult the + // cache that is specific to this scope, or to consult the + // global cache. We want the cache that is specific to this + // scope whenever where clauses might affect the result. + // If the trait refers to any parameters in scope, then use - // the cache of the param-environment. This is because the - // result will depend on the where clauses that are in - // scope. Otherwise, use the generic tcx cache, since the - // result holds across all environments. + // the cache of the param-environment. if cache_skol_trait_ref.input_types().iter().any( |&t| ty::type_has_self(t) || ty::type_has_params(t)) { - &self.param_env.selection_cache - } else { - &self.tcx().selection_cache + return &self.param_env.selection_cache; } + + // If the trait refers to unbound type variables, and there + // are where clauses in scope, then use the local environment. + // If there are no where clauses in scope, which is a very + // common case, then we can use the global environment. + // See the discussion in doc.rs for more details. + if + !self.param_env.caller_obligations.is_empty() + && + cache_skol_trait_ref.input_types().iter().any( + |&t| ty::type_has_ty_infer(t)) + { + return &self.param_env.selection_cache; + } + + // Otherwise, we can use the global cache. + &self.tcx().selection_cache } fn check_candidate_cache(&mut self, @@ -1935,26 +1952,6 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { util::obligations_for_generics(self.tcx(), cause, recursion_depth, &impl_generics, impl_substs) } - - fn contains_skolemized_types(&self, - ty: ty::t) - -> bool - { - /*! - * True if the type contains skolemized variables. - */ - - let mut found_skol = false; - - ty::walk_ty(ty, |t| { - match ty::get(t).sty { - ty::ty_infer(ty::SkolemizedTy(_)) => { found_skol = true; } - _ => { } - } - }); - - found_skol - } } impl Repr for Candidate { diff --git a/src/librustc/middle/ty.rs b/src/librustc/middle/ty.rs index a7ce93279bd83..52ec97ab647be 100644 --- a/src/librustc/middle/ty.rs +++ b/src/librustc/middle/ty.rs @@ -585,18 +585,18 @@ pub struct ctxt<'tcx> { pub repr_hint_cache: RefCell>>>, } -pub enum tbox_flag { - has_params = 1, - has_self = 2, - needs_infer = 4, - has_regions = 8, - has_ty_err = 16, - has_ty_bot = 32, - - // a meta-pub flag: subst may be required if the type has parameters, a self - // type, or references bound regions - needs_subst = 1 | 2 | 8 -} +// Flags that we track on types. These flags are propagated upwards +// through the type during type construction, so that we can quickly +// check whether the type has various kinds of types in it without +// recursing over the type itself. +const HAS_PARAMS: uint = 1; +const HAS_SELF: uint = 2; +const HAS_TY_INFER: uint = 4; +const HAS_RE_INFER: uint = 8; +const HAS_REGIONS: uint = 16; +const HAS_TY_ERR: uint = 32; +const HAS_TY_BOT: uint = 64; +const NEEDS_SUBST: uint = HAS_PARAMS | HAS_SELF | HAS_REGIONS; pub type t_box = &'static t_box_; @@ -631,15 +631,16 @@ pub fn get(t: t) -> t_box { } } -pub fn tbox_has_flag(tb: t_box, flag: tbox_flag) -> bool { - (tb.flags & (flag as uint)) != 0u +fn tbox_has_flag(tb: t_box, flag: uint) -> bool { + (tb.flags & flag) != 0u } pub fn type_has_params(t: t) -> bool { - tbox_has_flag(get(t), has_params) + tbox_has_flag(get(t), HAS_PARAMS) } -pub fn type_has_self(t: t) -> bool { tbox_has_flag(get(t), has_self) } +pub fn type_has_self(t: t) -> bool { tbox_has_flag(get(t), HAS_SELF) } +pub fn type_has_ty_infer(t: t) -> bool { tbox_has_flag(get(t), HAS_TY_INFER) } pub fn type_needs_infer(t: t) -> bool { - tbox_has_flag(get(t), needs_infer) + tbox_has_flag(get(t), HAS_TY_INFER | HAS_RE_INFER) } pub fn type_id(t: t) -> uint { get(t).id } @@ -910,13 +911,13 @@ mod primitives { pub static TY_BOT: t_box_ = t_box_ { sty: super::ty_bot, id: 16, - flags: super::has_ty_bot as uint, + flags: super::HAS_TY_BOT, }; pub static TY_ERR: t_box_ = t_box_ { sty: super::ty_err, id: 17, - flags: super::has_ty_err as uint, + flags: super::HAS_TY_ERR, }; pub const LAST_PRIMITIVE_ID: uint = 18; @@ -1579,9 +1580,9 @@ pub fn mk_t(cx: &ctxt, st: sty) -> t { let mut flags = 0u; fn rflags(r: Region) -> uint { - (has_regions as uint) | { + HAS_REGIONS | { match r { - ty::ReInfer(_) => needs_infer as uint, + ty::ReInfer(_) => HAS_RE_INFER, _ => 0u } } @@ -1610,22 +1611,22 @@ pub fn mk_t(cx: &ctxt, st: sty) -> t { &ty_str => {} // You might think that we could just return ty_err for // any type containing ty_err as a component, and get - // rid of the has_ty_err flag -- likewise for ty_bot (with + // rid of the HAS_TY_ERR flag -- likewise for ty_bot (with // the exception of function types that return bot). // But doing so caused sporadic memory corruption, and // neither I (tjc) nor nmatsakis could figure out why, // so we're doing it this way. - &ty_bot => flags |= has_ty_bot as uint, - &ty_err => flags |= has_ty_err as uint, + &ty_bot => flags |= HAS_TY_BOT, + &ty_err => flags |= HAS_TY_ERR, &ty_param(ref p) => { if p.space == subst::SelfSpace { - flags |= has_self as uint; + flags |= HAS_SELF; } else { - flags |= has_params as uint; + flags |= HAS_PARAMS; } } &ty_unboxed_closure(_, ref region) => flags |= rflags(*region), - &ty_infer(_) => flags |= needs_infer as uint, + &ty_infer(_) => flags |= HAS_TY_INFER, &ty_enum(_, ref substs) | &ty_struct(_, ref substs) => { flags |= sflags(substs); } @@ -1648,7 +1649,7 @@ pub fn mk_t(cx: &ctxt, st: sty) -> t { for a in f.sig.inputs.iter() { flags |= get(*a).flags; } flags |= get(f.sig.output).flags; // T -> _|_ is *not* _|_ ! - flags &= !(has_ty_bot as uint); + flags &= !HAS_TY_BOT; } &ty_closure(ref f) => { match f.store { @@ -1660,7 +1661,7 @@ pub fn mk_t(cx: &ctxt, st: sty) -> t { for a in f.sig.inputs.iter() { flags |= get(*a).flags; } flags |= get(f.sig.output).flags; // T -> _|_ is *not* _|_ ! - flags &= !(has_ty_bot as uint); + flags &= !HAS_TY_BOT; flags |= flags_for_bounds(&f.bounds); } } @@ -1979,15 +1980,15 @@ impl ItemSubsts { pub fn type_is_nil(ty: t) -> bool { get(ty).sty == ty_nil } pub fn type_is_bot(ty: t) -> bool { - (get(ty).flags & (has_ty_bot as uint)) != 0 + (get(ty).flags & HAS_TY_BOT) != 0 } pub fn type_is_error(ty: t) -> bool { - (get(ty).flags & (has_ty_err as uint)) != 0 + (get(ty).flags & HAS_TY_ERR) != 0 } pub fn type_needs_subst(ty: t) -> bool { - tbox_has_flag(get(ty), needs_subst) + tbox_has_flag(get(ty), NEEDS_SUBST) } pub fn trait_ref_contains_error(tref: &ty::TraitRef) -> bool { diff --git a/src/test/run-pass/trait-cache-issue-18209.rs b/src/test/run-pass/trait-cache-issue-18209.rs new file mode 100644 index 0000000000000..a5efb32079de8 --- /dev/null +++ b/src/test/run-pass/trait-cache-issue-18209.rs @@ -0,0 +1,27 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Test that the cache results from the default method do not pollute +// the cache for the later call in `load()`. +// +// See issue #18209. + +pub trait Foo { + fn load_from() -> Box; + fn load() -> Box { + Foo::load_from() + } +} + +pub fn load() -> Box { + Foo::load() +} + +fn main() { } From ebdc3046a4419b6d97e1c865705ed59ee1058f5a Mon Sep 17 00:00:00 2001 From: areski Date: Thu, 23 Oct 2014 00:44:17 +0200 Subject: [PATCH 03/47] Fixed single quote around string slice and simplify example This patch contains a fix for: - single quote around string slice - string: String is confusing for newbies and it's more readble if the argument name is different that the argument type name --- src/doc/guide-strings.md | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/doc/guide-strings.md b/src/doc/guide-strings.md index 44fc0d8304475..1c7cb1a3b4eb9 100644 --- a/src/doc/guide-strings.md +++ b/src/doc/guide-strings.md @@ -14,8 +14,8 @@ Rust has two main types of strings: `&str` and `String`. # &str -The first kind is a `&str`. This is pronounced a 'string slice.' String literals -are of the type `&str`: +The first kind is a `&str`. This is pronounced a 'string slice'. +String literals are of the type `&str`: ```{rust} let string = "Hello there."; @@ -121,8 +121,8 @@ Both of these lines will print `12`. To compare a String to a constant string, prefer `as_slice()`... ```{rust} -fn compare(string: String) { - if string.as_slice() == "Hello" { +fn compare(x: String) { + if x.as_slice() == "Hello" { println!("yes"); } } @@ -131,8 +131,8 @@ fn compare(string: String) { ... over `to_string()`: ```{rust} -fn compare(string: String) { - if string == "Hello".to_string() { +fn compare(x: String) { + if x == "Hello".to_string() { println!("yes"); } } From 70fe20a69827a2b1abfef338c83f9ec9be7c9376 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B6rn=20Steinbrink?= Date: Wed, 22 Oct 2014 21:23:26 +0200 Subject: [PATCH 04/47] Fix codegen breaking aliasing rules for functions with sret results This reverts commit a0ec902e239b2219edf1a18b036dd32c18d3be42 "Avoid unnecessary temporary on assignments". Leaving out the temporary for the functions return value can lead to a situation that conflicts with rust's aliasing rules. Given this: ````rust fn func(f: &mut Foo) -> Foo { /* ... */ } fn bar() { let mut foo = Foo { /* ... */ }; foo = func(&mut foo); } ```` We effectively get two mutable references to the same variable `foo` at the same time. One for the parameter `f`, and one for the hidden out-pointer. So we can't just `trans_into` the destination directly, but must use `trans` to get a new temporary slot from which the result can be copied. --- src/librustc/middle/trans/expr.rs | 4 +-- src/test/run-pass/out-pointer-aliasing.rs | 30 +++++++++++++++++++++++ 2 files changed, 32 insertions(+), 2 deletions(-) create mode 100644 src/test/run-pass/out-pointer-aliasing.rs diff --git a/src/librustc/middle/trans/expr.rs b/src/librustc/middle/trans/expr.rs index d638286a9c5bd..a7b9c137331b8 100644 --- a/src/librustc/middle/trans/expr.rs +++ b/src/librustc/middle/trans/expr.rs @@ -940,6 +940,7 @@ fn trans_rvalue_stmt_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, controlflow::trans_loop(bcx, expr.id, &**body) } ast::ExprAssign(ref dst, ref src) => { + let src_datum = unpack_datum!(bcx, trans(bcx, &**src)); let dst_datum = unpack_datum!(bcx, trans_to_lvalue(bcx, &**dst, "assign")); if ty::type_needs_drop(bcx.tcx(), dst_datum.ty) { @@ -960,7 +961,6 @@ fn trans_rvalue_stmt_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, // We could avoid this intermediary with some analysis // to determine whether `dst` may possibly own `src`. debuginfo::set_source_location(bcx.fcx, expr.id, expr.span); - let src_datum = unpack_datum!(bcx, trans(bcx, &**src)); let src_datum = unpack_datum!( bcx, src_datum.to_rvalue_datum(bcx, "ExprAssign")); bcx = glue::drop_ty(bcx, @@ -969,7 +969,7 @@ fn trans_rvalue_stmt_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, Some(NodeInfo { id: expr.id, span: expr.span })); src_datum.store_to(bcx, dst_datum.val) } else { - trans_into(bcx, &**src, SaveIn(dst_datum.to_llref())) + src_datum.store_to(bcx, dst_datum.val) } } ast::ExprAssignOp(op, ref dst, ref src) => { diff --git a/src/test/run-pass/out-pointer-aliasing.rs b/src/test/run-pass/out-pointer-aliasing.rs new file mode 100644 index 0000000000000..2a44df7a1b566 --- /dev/null +++ b/src/test/run-pass/out-pointer-aliasing.rs @@ -0,0 +1,30 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +pub struct Foo { + f1: int, + _f2: int, +} + +#[inline(never)] +pub fn foo(f: &mut Foo) -> Foo { + let ret = *f; + f.f1 = 0; + ret +} + +pub fn main() { + let mut f = Foo { + f1: 8, + _f2: 9, + }; + f = foo(&mut f); + assert_eq!(f.f1, 8); +} From 0c3ad8be5bae6d3df6044a07171a9938cb700009 Mon Sep 17 00:00:00 2001 From: Steve Klabnik Date: Thu, 23 Oct 2014 04:35:35 -0400 Subject: [PATCH 05/47] build the crates guide --- configure | 1 + mk/docs.mk | 2 +- src/doc/guide-crates.md | 2 +- src/doc/index.md | 1 + src/doc/po4a.conf | 1 + 5 files changed, 5 insertions(+), 2 deletions(-) diff --git a/configure b/configure index 05bfdb3e9fd8a..ff1c1f4db016b 100755 --- a/configure +++ b/configure @@ -928,6 +928,7 @@ do make_dir $h/test/doc-guide-container make_dir $h/test/doc-guide-tasks make_dir $h/test/doc-guide-plugin + make_dir $h/test/doc-guide-crates make_dir $h/test/doc-rust done diff --git a/mk/docs.mk b/mk/docs.mk index ab73a72f00a83..9dbcb2c9bbcd5 100644 --- a/mk/docs.mk +++ b/mk/docs.mk @@ -27,7 +27,7 @@ ###################################################################### DOCS := index intro tutorial guide guide-ffi guide-macros guide-lifetimes \ guide-tasks guide-container guide-pointers guide-testing \ - guide-plugin complement-bugreport \ + guide-plugin guide-crates complement-bugreport \ complement-lang-faq complement-design-faq complement-project-faq rust \ rustdoc guide-unsafe guide-strings reference diff --git a/src/doc/guide-crates.md b/src/doc/guide-crates.md index 8705bdd278ec7..50d76371cc51e 100644 --- a/src/doc/guide-crates.md +++ b/src/doc/guide-crates.md @@ -452,7 +452,7 @@ fn main() { Rust will give us a compile-time error: -```{rust,ignore} +```{notrust,ignore} Compiling phrases v0.0.1 (file:///home/you/projects/phrases) /home/you/projects/phrases/src/main.rs:4:5: 4:40 error: a value named `hello` has already been imported in this module /home/you/projects/phrases/src/main.rs:4 use phrases::japanese::greetings::hello; diff --git a/src/doc/index.md b/src/doc/index.md index ad548d3a8f93c..8d54550a9f988 100644 --- a/src/doc/index.md +++ b/src/doc/index.md @@ -57,6 +57,7 @@ a guide that can help you out: * [Strings](guide-strings.html) * [Pointers](guide-pointers.html) * [References and Lifetimes](guide-lifetimes.html) +* [Crates and modules](guide-crates.html) * [Tasks and Communication](guide-tasks.html) * [Foreign Function Interface](guide-ffi.html) * [Writing Unsafe and Low-Level Code](guide-unsafe.html) diff --git a/src/doc/po4a.conf b/src/doc/po4a.conf index 6fd4c95794ee7..25f1a5fd3e2e4 100644 --- a/src/doc/po4a.conf +++ b/src/doc/po4a.conf @@ -19,6 +19,7 @@ [type: text] src/doc/guide-tasks.md $lang:doc/l10n/$lang/guide-tasks.md [type: text] src/doc/guide-testing.md $lang:doc/l10n/$lang/guide-testing.md [type: text] src/doc/guide-unsafe.md $lang:doc/l10n/$lang/guide-unsafe.md +[type: text] src/doc/guide-unsafe.md $lang:doc/l10n/$lang/guide-crates.md [type: text] src/doc/guide.md $lang:doc/l10n/$lang/guide.md [type: text] src/doc/index.md $lang:doc/l10n/$lang/index.md [type: text] src/doc/intro.md $lang:doc/l10n/$lang/intro.md From 941d90d97f8ab8658fca613c0ff79cde8ddf8fb5 Mon Sep 17 00:00:00 2001 From: Simon Sapin Date: Thu, 23 Oct 2014 23:56:33 +0900 Subject: [PATCH 06/47] Add syntax::print::pprint::view_item_to_string MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit … similar to the existing `item_to_string`. There may be more missing like this. --- src/libsyntax/print/pprust.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index cdcbeedddb239..5265c193fba7c 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -224,6 +224,10 @@ pub fn item_to_string(i: &ast::Item) -> String { $to_string(|s| s.print_item(i)) } +pub fn view_item_to_string(i: &ast::ViewItem) -> String { + $to_string(|s| s.print_view_item(i)) +} + pub fn generics_to_string(generics: &ast::Generics) -> String { $to_string(|s| s.print_generics(generics)) } From 0b8bd6f6672739ca874b4d9fd8122dc30610cd26 Mon Sep 17 00:00:00 2001 From: Robin Gloster Date: Thu, 23 Oct 2014 17:20:57 +0200 Subject: [PATCH 07/47] plugin-guide: nicer creation of the parser --- src/doc/guide-plugin.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/doc/guide-plugin.md b/src/doc/guide-plugin.md index 3830a2126e172..d560a5c3b6831 100644 --- a/src/doc/guide-plugin.md +++ b/src/doc/guide-plugin.md @@ -151,8 +151,7 @@ higher-level syntax elements like expressions: fn expand_foo(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree]) -> Box { - let mut parser = - parse::new_parser_from_tts(cx.parse_sess(), cx.cfg(), args.to_slice()) + let mut parser = cx.new_parser_from_tts(args); let expr: P = parser.parse_expr(); ``` From 398b28722d1d94325398b5a05e0708eb33f9335f Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Thu, 23 Oct 2014 09:38:37 -0700 Subject: [PATCH 08/47] Register new snapshots This in theory enables uncommenting IndexMut implementations, but upon doing so the compiler immediately segfaulted in stage1, so I'll leave those to a later time. --- src/snapshots.txt | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/src/snapshots.txt b/src/snapshots.txt index 4d88fd6a03164..f6eea15d8cfcf 100644 --- a/src/snapshots.txt +++ b/src/snapshots.txt @@ -1,3 +1,12 @@ +S 2014-10-22 d44ea72 + freebsd-x86_64 8bf5ee7c1ca8ab880800cf3a535e16bb7ffbf9e8 + linux-i386 1fc8302b405406a3fc183b23c8397bef5a56c52a + linux-x86_64 3e04d8197a96b0c858e4e2763b3893df35ae2fb3 + macos-i386 b9823771ae6237a3c1c19eb2e98a2372ce23439d + macos-x86_64 3cf9fc1cd252a80430d8673e35a1256674e122ae + winnt-i386 5a6d2ad82a31deffad5b6a17487a8cd5c21f7636 + winnt-x86_64 7468b87eb5be238993ccd41ad74bbd88dd176d31 + S 2014-10-10 78a7676 freebsd-x86_64 511061af382e2e837a6d615823e1a952e8281483 linux-i386 0644637db852db8a6c603ded0531ccaa60291bd3 From 53ac852bf0cef1db971d27dd9c733c639b85430d Mon Sep 17 00:00:00 2001 From: Markus Siemens Date: Wed, 22 Oct 2014 22:23:22 +0200 Subject: [PATCH 09/47] Fail immediately if linking returns status code != 0 --- src/librustc/back/write.rs | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/src/librustc/back/write.rs b/src/librustc/back/write.rs index f1cd8b52e5ed3..474e8e45d94c2 100644 --- a/src/librustc/back/write.rs +++ b/src/librustc/back/write.rs @@ -714,7 +714,13 @@ pub fn run_passes(sess: &Session, .stdout(::std::io::process::InheritFd(1)) .stderr(::std::io::process::InheritFd(2)); match cmd.status() { - Ok(_) => {}, + Ok(status) => { + if !status.success() { + sess.err(format!("linking of {} with `{}` failed", + output_path.display(), cmd).as_slice()); + sess.abort_if_errors(); + } + }, Err(e) => { sess.err(format!("could not exec the linker `{}`: {}", pname, From d7e910ec9060a9748e221c2325209e54aa2da018 Mon Sep 17 00:00:00 2001 From: Jakub Bukaj Date: Thu, 23 Oct 2014 23:01:31 +0200 Subject: [PATCH 10/47] Update .mailmap --- .mailmap | 100 +++++++++++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 98 insertions(+), 2 deletions(-) diff --git a/.mailmap b/.mailmap index 637b9ec8c4efb..f529fe1bb78c3 100644 --- a/.mailmap +++ b/.mailmap @@ -5,8 +5,104 @@ # email addresses. # -Elly Jones -ILyoan +Aaron Todd +Ahmed Charles +Alex Lyon +Alex Rønne Petersen +Andreas Gal +Andrew Poelstra +Anton Löfgren +Ariel Ben-Yehuda +Austin Seipp +Ben Alpert +Benjamin Jackman +Björn Steinbrink +blake2-ppc +Boris Egorov +Brian Anderson +Brian Dawn +Carl-Anton Ingmarsson +Carol Willing +Chris Pressey +Clark Gaebel +David Klein +David Manescu +Damien Schoof +Derek Chiang Derek Chiang (Enchi Jiang) +Dylan Braithwaite +Eduardo Bautista +Eduardo Bautista <=> +Elliott Slaughter +Elly Fong-Jones +Eric Holk +Eric Holk +Eric Holmes +Eric Reed +Erick Tryzelaar +Evgeny Sologubov +Falco Hirschenberger +Gareth Daniel Smith +Georges Dubus +Graham Fawcett +Graydon Hoare +Heather +Heather +Ilyong Cho +J. J. Weber +Jakub Bukaj +Jakub Bukaj +James Deng +James Miller +Jason Orendorff +Jason Orendorff +Jeremy Letang +Jihyun Yu jihyun +Jihyun Yu +John Clements +Jorge Aparicio +Jonathan Bailey Junyoung Cho +Jyun-Yan You +Kang Seonghoon +Keegan McAllister +Kyeongwoon Lee +Lee Wondong +Lennart Kudling +Lindsey Kuper +Lindsey Kuper +Luqman Aden +Luqman Aden +Luke Metz +Makoto Nakashima +Makoto Nakashima gifnksm +Margaret Meyerhofer +Mark Sinclair +Mark Sinclair =Mark Sinclair <=125axel125@gmail.com> +Matt Brubeck +Matthew Auld +Matthew McPherrin Matthijs Hofstra +Michael Williams +Michael Woerister +Michael Woerister +Neil Pankey +Philipp Brüschweiler +Philipp Brüschweiler +Pradeep Kumar +Richard Diamond Rob Arnold +Robert Gawdzik Robert Gawdzik ☢ +Robert Millar +Ryan Scheel +Seonghyun Kim +Simon Barber-Dueck Simon BD +Simon Sapin +startling +Steven Fackler +Steven Stewart-Gallus +Tim Chevalier +Torsten Weber +William Ting +Youngsoo Son +Zack Corr +Zack Slayton From e4b913f60f109e0646f532a9b74e9dbd8ac06b1b Mon Sep 17 00:00:00 2001 From: Nick Cameron Date: Fri, 24 Oct 2014 11:25:35 +1300 Subject: [PATCH 11/47] s/vtable2/vtable --- src/librustc/middle/typeck/check/method.rs | 2 +- src/librustc/middle/typeck/check/mod.rs | 14 +++++++------- src/librustc/middle/typeck/check/regionck.rs | 4 ++-- .../middle/typeck/check/{vtable2.rs => vtable.rs} | 0 src/librustc/middle/typeck/check/wf.rs | 4 ++-- 5 files changed, 12 insertions(+), 12 deletions(-) rename src/librustc/middle/typeck/check/{vtable2.rs => vtable.rs} (100%) diff --git a/src/librustc/middle/typeck/check/method.rs b/src/librustc/middle/typeck/check/method.rs index 4560c51946494..b768bb8f2d65a 100644 --- a/src/librustc/middle/typeck/check/method.rs +++ b/src/librustc/middle/typeck/check/method.rs @@ -88,7 +88,7 @@ use middle::ty; use middle::typeck::astconv::AstConv; use middle::typeck::check::{FnCtxt, NoPreference, PreferMutLvalue}; use middle::typeck::check::{impl_self_ty}; -use middle::typeck::check::vtable2::select_fcx_obligations_where_possible; +use middle::typeck::check::vtable::select_fcx_obligations_where_possible; use middle::typeck::check; use middle::typeck::infer; use middle::typeck::{MethodCall, MethodCallee}; diff --git a/src/librustc/middle/typeck/check/mod.rs b/src/librustc/middle/typeck/check/mod.rs index 5f7b31e573ade..596e30f00cd5b 100644 --- a/src/librustc/middle/typeck/check/mod.rs +++ b/src/librustc/middle/typeck/check/mod.rs @@ -139,7 +139,7 @@ use syntax::visit::Visitor; use syntax; pub mod _match; -pub mod vtable2; // New trait code +pub mod vtable; pub mod writeback; pub mod regionmanip; pub mod regionck; @@ -409,7 +409,7 @@ fn check_bare_fn(ccx: &CrateCtxt, let fcx = check_fn(ccx, fn_ty.fn_style, id, &fn_ty.sig, decl, id, body, &inh); - vtable2::select_all_fcx_obligations_or_error(&fcx); + vtable::select_all_fcx_obligations_or_error(&fcx); regionck::regionck_fn(&fcx, id, body); writeback::resolve_type_vars_in_fn(&fcx, decl, body); } @@ -1377,7 +1377,7 @@ fn check_cast(fcx: &FnCtxt, if ty::type_is_trait(t_1) { // This will be looked up later on. - vtable2::check_object_cast(fcx, cast_expr, e, t_1); + vtable::check_object_cast(fcx, cast_expr, e, t_1); fcx.write_ty(id, t_1); return } @@ -1682,7 +1682,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { ty::UnsizeVtable(ref ty_trait, self_ty) => { // If the type is `Foo+'a`, ensures that the type // being cast to `Foo+'a` implements `Foo`: - vtable2::register_object_cast_obligations(self, + vtable::register_object_cast_obligations(self, span, ty_trait, self_ty); @@ -2565,7 +2565,7 @@ fn check_argument_types<'a>(fcx: &FnCtxt, // an "opportunistic" vtable resolution of any trait // bounds on the call. if check_blocks { - vtable2::select_fcx_obligations_where_possible(fcx); + vtable::select_fcx_obligations_where_possible(fcx); } // For variadic functions, we don't have a declared type for all of @@ -4037,7 +4037,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt, ast::ExprForLoop(ref pat, ref head, ref block, _) => { check_expr(fcx, &**head); let typ = lookup_method_for_for_loop(fcx, &**head, expr.id); - vtable2::select_fcx_obligations_where_possible(fcx); + vtable::select_fcx_obligations_where_possible(fcx); let pcx = pat_ctxt { fcx: fcx, @@ -4744,7 +4744,7 @@ pub fn check_const_with_ty(fcx: &FnCtxt, check_expr_with_hint(fcx, e, declty); demand::coerce(fcx, e.span, declty, e); - vtable2::select_all_fcx_obligations_or_error(fcx); + vtable::select_all_fcx_obligations_or_error(fcx); regionck::regionck_expr(fcx, e); writeback::resolve_type_vars_in_expr(fcx, e); } diff --git a/src/librustc/middle/typeck/check/regionck.rs b/src/librustc/middle/typeck/check/regionck.rs index 76074120c0edb..7cca4f241d1e6 100644 --- a/src/librustc/middle/typeck/check/regionck.rs +++ b/src/librustc/middle/typeck/check/regionck.rs @@ -126,7 +126,7 @@ use middle::ty; use middle::typeck::astconv::AstConv; use middle::typeck::check::FnCtxt; use middle::typeck::check::regionmanip; -use middle::typeck::check::vtable2; +use middle::typeck::check::vtable; use middle::typeck::infer::resolve_and_force_all_but_regions; use middle::typeck::infer::resolve_type; use middle::typeck::infer; @@ -172,7 +172,7 @@ pub fn regionck_fn(fcx: &FnCtxt, id: ast::NodeId, blk: &ast::Block) { // Region checking a fn can introduce new trait obligations, // particularly around closure bounds. - vtable2::select_all_fcx_obligations_or_error(fcx); + vtable::select_all_fcx_obligations_or_error(fcx); fcx.infcx().resolve_regions_and_report_errors(); } diff --git a/src/librustc/middle/typeck/check/vtable2.rs b/src/librustc/middle/typeck/check/vtable.rs similarity index 100% rename from src/librustc/middle/typeck/check/vtable2.rs rename to src/librustc/middle/typeck/check/vtable.rs diff --git a/src/librustc/middle/typeck/check/wf.rs b/src/librustc/middle/typeck/check/wf.rs index dc79fd4aa328c..3c594fbf2d366 100644 --- a/src/librustc/middle/typeck/check/wf.rs +++ b/src/librustc/middle/typeck/check/wf.rs @@ -14,7 +14,7 @@ use middle::traits; use middle::ty; use middle::ty_fold::{TypeFolder, TypeFoldable}; use middle::typeck::astconv::AstConv; -use middle::typeck::check::{FnCtxt, Inherited, blank_fn_ctxt, vtable2, regionck}; +use middle::typeck::check::{FnCtxt, Inherited, blank_fn_ctxt, vtable, regionck}; use middle::typeck::check::regionmanip::replace_late_bound_regions; use middle::typeck::CrateCtxt; use util::ppaux::Repr; @@ -100,7 +100,7 @@ impl<'ccx, 'tcx> CheckTypeWellFormedVisitor<'ccx, 'tcx> { let inh = Inherited::new(ccx.tcx, param_env); let fcx = blank_fn_ctxt(ccx, &inh, polytype.ty, item.id); f(self, &fcx); - vtable2::select_all_fcx_obligations_or_error(&fcx); + vtable::select_all_fcx_obligations_or_error(&fcx); regionck::regionck_item(&fcx, item); } From 4a8d712345f30cba4f33bce9e0ece0eac64e8764 Mon Sep 17 00:00:00 2001 From: Niko Matsakis Date: Fri, 24 Oct 2014 10:20:02 -0400 Subject: [PATCH 12/47] Use type-safe wrapper for TypeFlags --- src/librustc/middle/traits/doc.rs | 2 +- src/librustc/middle/ty.rs | 103 +++++++++++++++++------------- 2 files changed, 59 insertions(+), 46 deletions(-) diff --git a/src/librustc/middle/traits/doc.rs b/src/librustc/middle/traits/doc.rs index a8fcdb360546b..c014bc0c164f2 100644 --- a/src/librustc/middle/traits/doc.rs +++ b/src/librustc/middle/traits/doc.rs @@ -287,7 +287,7 @@ want to be able to cache results even when all the types in the trait reference are not fully known. In that case, it may happen that the trait selection process is also influencing type variables, so we have to be able to not only cache the *result* of the selection process, -but *reply* its effects on the type variables. +but *replay* its effects on the type variables. ## An example diff --git a/src/librustc/middle/ty.rs b/src/librustc/middle/ty.rs index 52ec97ab647be..4a081c0db3733 100644 --- a/src/librustc/middle/ty.rs +++ b/src/librustc/middle/ty.rs @@ -589,14 +589,19 @@ pub struct ctxt<'tcx> { // through the type during type construction, so that we can quickly // check whether the type has various kinds of types in it without // recursing over the type itself. -const HAS_PARAMS: uint = 1; -const HAS_SELF: uint = 2; -const HAS_TY_INFER: uint = 4; -const HAS_RE_INFER: uint = 8; -const HAS_REGIONS: uint = 16; -const HAS_TY_ERR: uint = 32; -const HAS_TY_BOT: uint = 64; -const NEEDS_SUBST: uint = HAS_PARAMS | HAS_SELF | HAS_REGIONS; +bitflags! { + flags TypeFlags: u32 { + const NO_TYPE_FLAGS = 0b0, + const HAS_PARAMS = 0b1, + const HAS_SELF = 0b10, + const HAS_TY_INFER = 0b100, + const HAS_RE_INFER = 0b1000, + const HAS_REGIONS = 0b10000, + const HAS_TY_ERR = 0b100000, + const HAS_TY_BOT = 0b1000000, + const NEEDS_SUBST = HAS_PARAMS.bits | HAS_SELF.bits | HAS_REGIONS.bits, + } +} pub type t_box = &'static t_box_; @@ -604,7 +609,13 @@ pub type t_box = &'static t_box_; pub struct t_box_ { pub sty: sty, pub id: uint, - pub flags: uint, + pub flags: TypeFlags, +} + +impl fmt::Show for TypeFlags { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}", self.bits) + } } // To reduce refcounting cost, we're representing types as unsafe pointers @@ -631,8 +642,8 @@ pub fn get(t: t) -> t_box { } } -fn tbox_has_flag(tb: t_box, flag: uint) -> bool { - (tb.flags & flag) != 0u +fn tbox_has_flag(tb: t_box, flag: TypeFlags) -> bool { + tb.flags.intersects(flag) } pub fn type_has_params(t: t) -> bool { tbox_has_flag(get(t), HAS_PARAMS) @@ -887,7 +898,7 @@ mod primitives { pub static $name: t_box_ = t_box_ { sty: $sty, id: $id, - flags: 0, + flags: super::NO_TYPE_FLAGS, }; ) ) @@ -1578,32 +1589,32 @@ pub fn mk_t(cx: &ctxt, st: sty) -> t { _ => () } - let mut flags = 0u; - fn rflags(r: Region) -> uint { + let mut flags = NO_TYPE_FLAGS; + fn rflags(r: Region) -> TypeFlags { HAS_REGIONS | { match r { ty::ReInfer(_) => HAS_RE_INFER, - _ => 0u + _ => NO_TYPE_FLAGS, } } } - fn sflags(substs: &Substs) -> uint { - let mut f = 0u; + fn sflags(substs: &Substs) -> TypeFlags { + let mut f = NO_TYPE_FLAGS; let mut i = substs.types.iter(); for tt in i { - f |= get(*tt).flags; + f = f | get(*tt).flags; } match substs.regions { subst::ErasedRegions => {} subst::NonerasedRegions(ref regions) => { for r in regions.iter() { - f |= rflags(*r) + f = f | rflags(*r) } } } return f; } - fn flags_for_bounds(bounds: &ExistentialBounds) -> uint { + fn flags_for_bounds(bounds: &ExistentialBounds) -> TypeFlags { rflags(bounds.region_bound) } match &st { @@ -1616,53 +1627,53 @@ pub fn mk_t(cx: &ctxt, st: sty) -> t { // But doing so caused sporadic memory corruption, and // neither I (tjc) nor nmatsakis could figure out why, // so we're doing it this way. - &ty_bot => flags |= HAS_TY_BOT, - &ty_err => flags |= HAS_TY_ERR, + &ty_bot => flags = flags | HAS_TY_BOT, + &ty_err => flags = flags | HAS_TY_ERR, &ty_param(ref p) => { if p.space == subst::SelfSpace { - flags |= HAS_SELF; + flags = flags | HAS_SELF; } else { - flags |= HAS_PARAMS; + flags = flags | HAS_PARAMS; } } - &ty_unboxed_closure(_, ref region) => flags |= rflags(*region), - &ty_infer(_) => flags |= HAS_TY_INFER, + &ty_unboxed_closure(_, ref region) => flags = flags | rflags(*region), + &ty_infer(_) => flags = flags | HAS_TY_INFER, &ty_enum(_, ref substs) | &ty_struct(_, ref substs) => { - flags |= sflags(substs); + flags = flags | sflags(substs); } &ty_trait(box TyTrait { ref substs, ref bounds, .. }) => { - flags |= sflags(substs); - flags |= flags_for_bounds(bounds); + flags = flags | sflags(substs); + flags = flags | flags_for_bounds(bounds); } &ty_uniq(tt) | &ty_vec(tt, _) | &ty_open(tt) => { - flags |= get(tt).flags + flags = flags | get(tt).flags } &ty_ptr(ref m) => { - flags |= get(m.ty).flags; + flags = flags | get(m.ty).flags; } &ty_rptr(r, ref m) => { - flags |= rflags(r); - flags |= get(m.ty).flags; + flags = flags | rflags(r); + flags = flags | get(m.ty).flags; } - &ty_tup(ref ts) => for tt in ts.iter() { flags |= get(*tt).flags; }, + &ty_tup(ref ts) => for tt in ts.iter() { flags = flags | get(*tt).flags; }, &ty_bare_fn(ref f) => { - for a in f.sig.inputs.iter() { flags |= get(*a).flags; } - flags |= get(f.sig.output).flags; + for a in f.sig.inputs.iter() { flags = flags | get(*a).flags; } + flags = flags | get(f.sig.output).flags; // T -> _|_ is *not* _|_ ! - flags &= !HAS_TY_BOT; + flags = flags - HAS_TY_BOT; } &ty_closure(ref f) => { match f.store { RegionTraitStore(r, _) => { - flags |= rflags(r); + flags = flags | rflags(r); } _ => {} } - for a in f.sig.inputs.iter() { flags |= get(*a).flags; } - flags |= get(f.sig.output).flags; + for a in f.sig.inputs.iter() { flags = flags | get(*a).flags; } + flags = flags | get(f.sig.output).flags; // T -> _|_ is *not* _|_ ! - flags &= !HAS_TY_BOT; - flags |= flags_for_bounds(&f.bounds); + flags = flags - HAS_TY_BOT; + flags = flags | flags_for_bounds(&f.bounds); } } @@ -1977,14 +1988,16 @@ impl ItemSubsts { // Type utilities -pub fn type_is_nil(ty: t) -> bool { get(ty).sty == ty_nil } +pub fn type_is_nil(ty: t) -> bool { + get(ty).sty == ty_nil +} pub fn type_is_bot(ty: t) -> bool { - (get(ty).flags & HAS_TY_BOT) != 0 + get(ty).flags.intersects(HAS_TY_BOT) } pub fn type_is_error(ty: t) -> bool { - (get(ty).flags & HAS_TY_ERR) != 0 + get(ty).flags.intersects(HAS_TY_ERR) } pub fn type_needs_subst(ty: t) -> bool { From 87a5f0ddf477d1c86ac6e016feefd7ae9c574226 Mon Sep 17 00:00:00 2001 From: Clark Gaebel Date: Fri, 24 Oct 2014 19:31:17 -0700 Subject: [PATCH 13/47] Make the Vec data structure layout match raw::Slice. Fixes #18302 r? @thestinger --- src/libcollections/slice.rs | 2 +- src/libcollections/string.rs | 2 +- src/libcollections/vec.rs | 18 +++++++++--------- src/libsyntax/owned_slice.rs | 2 +- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/src/libcollections/slice.rs b/src/libcollections/slice.rs index d061e60a42265..d4115df7da478 100644 --- a/src/libcollections/slice.rs +++ b/src/libcollections/slice.rs @@ -292,7 +292,7 @@ impl BoxedSlice for Box<[T]> { #[experimental] fn into_vec(mut self) -> Vec { unsafe { - let xs = Vec::from_raw_parts(self.len(), self.len(), self.as_mut_ptr()); + let xs = Vec::from_raw_parts(self.as_mut_ptr(), self.len(), self.len()); mem::forget(self); xs } diff --git a/src/libcollections/string.rs b/src/libcollections/string.rs index fa45dee7cdea7..c44a03b05cd35 100644 --- a/src/libcollections/string.rs +++ b/src/libcollections/string.rs @@ -780,7 +780,7 @@ pub mod raw { #[inline] pub unsafe fn from_parts(buf: *mut u8, length: uint, capacity: uint) -> String { String { - vec: Vec::from_raw_parts(length, capacity, buf), + vec: Vec::from_raw_parts(buf, length, capacity), } } diff --git a/src/libcollections/vec.rs b/src/libcollections/vec.rs index e608a7d22dcf5..c57a465df3780 100644 --- a/src/libcollections/vec.rs +++ b/src/libcollections/vec.rs @@ -103,9 +103,9 @@ use slice::{Items, MutItems}; #[unsafe_no_drop_flag] #[stable] pub struct Vec { + ptr: *mut T, len: uint, cap: uint, - ptr: *mut T } impl Vec { @@ -125,7 +125,7 @@ impl Vec { // non-null value which is fine since we never call deallocate on the ptr // if cap is 0. The reason for this is because the pointer of a slice // being NULL would break the null pointer optimization for enums. - Vec { len: 0, cap: 0, ptr: EMPTY as *mut T } + Vec { ptr: EMPTY as *mut T, len: 0, cap: 0 } } /// Constructs a new, empty `Vec` with the specified capacity. @@ -159,14 +159,14 @@ impl Vec { #[stable] pub fn with_capacity(capacity: uint) -> Vec { if mem::size_of::() == 0 { - Vec { len: 0, cap: uint::MAX, ptr: EMPTY as *mut T } + Vec { ptr: EMPTY as *mut T, len: 0, cap: uint::MAX } } else if capacity == 0 { Vec::new() } else { let size = capacity.checked_mul(&mem::size_of::()) .expect("capacity overflow"); let ptr = unsafe { allocate(size, mem::min_align_of::()) }; - Vec { len: 0, cap: capacity, ptr: ptr as *mut T } + Vec { ptr: ptr as *mut T, len: 0, cap: capacity } } } @@ -237,9 +237,9 @@ impl Vec { /// } /// ``` #[experimental] - pub unsafe fn from_raw_parts(length: uint, capacity: uint, - ptr: *mut T) -> Vec { - Vec { len: length, cap: capacity, ptr: ptr } + pub unsafe fn from_raw_parts(ptr: *mut T, length: uint, + capacity: uint) -> Vec { + Vec { ptr: ptr, len: length, cap: capacity } } /// Consumes the `Vec`, partitioning it based on a predicate. @@ -1680,7 +1680,7 @@ impl<'a, T> Drop for DerefVec<'a, T> { pub fn as_vec<'a, T>(x: &'a [T]) -> DerefVec<'a, T> { unsafe { DerefVec { - x: Vec::from_raw_parts(x.len(), x.len(), x.as_ptr() as *mut T), + x: Vec::from_raw_parts(x.as_ptr() as *mut T, x.len(), x.len()), l: ContravariantLifetime::<'a> } } @@ -1929,7 +1929,7 @@ impl Vec { let vec_cap = pv.vec.capacity(); let vec_ptr = pv.vec.as_mut_ptr() as *mut U; mem::forget(pv); - Vec::from_raw_parts(vec_len, vec_cap, vec_ptr) + Vec::from_raw_parts(vec_ptr, vec_len, vec_cap) } } else { // Put the `Vec` into the `PartialVecZeroSized` structure and diff --git a/src/libsyntax/owned_slice.rs b/src/libsyntax/owned_slice.rs index e5c37e5041abf..4f09b34557c74 100644 --- a/src/libsyntax/owned_slice.rs +++ b/src/libsyntax/owned_slice.rs @@ -74,7 +74,7 @@ impl OwnedSlice { pub fn into_vec(self) -> Vec { // null is ok, because len == 0 in that case, as required by Vec. unsafe { - let ret = Vec::from_raw_parts(self.len, self.len, self.data); + let ret = Vec::from_raw_parts(self.data, self.len, self.len); // the vector owns the allocation now mem::forget(self); ret From 122199909d608193983fdd3751f947d7a13e8516 Mon Sep 17 00:00:00 2001 From: Daniel Hofstetter Date: Sat, 25 Oct 2014 16:22:04 +0200 Subject: [PATCH 14/47] Guide: Fix off-by-one error --- src/doc/guide.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/doc/guide.md b/src/doc/guide.md index c7b8e42b28cde..811d600df31f2 100644 --- a/src/doc/guide.md +++ b/src/doc/guide.md @@ -4474,7 +4474,7 @@ range(1i, 100i).map(|x| x + 1i); `map` is called upon another iterator, and produces a new iterator where each element reference has the closure it's been given as an argument called on it. -So this would give us the numbers from `2-101`. Well, almost! If you +So this would give us the numbers from `2-100`. Well, almost! If you compile the example, you'll get a warning: ```{notrust,ignore} From 81b7e629183aac0968fa6d8cd7e3c0f7fa2613e3 Mon Sep 17 00:00:00 2001 From: Ariel Ben-Yehuda Date: Sat, 25 Oct 2014 20:18:32 +0300 Subject: [PATCH 15/47] Prevent pointer -> int casts in constexprs These cause issues, as addresses aren't fixed at compile-time. Fixes #18294 --- src/librustc/diagnostics.rs | 1 + src/librustc/middle/check_const.rs | 14 ++++++++++---- src/test/compile-fail/issue-18294.rs | 15 +++++++++++++++ 3 files changed, 26 insertions(+), 4 deletions(-) create mode 100644 src/test/compile-fail/issue-18294.rs diff --git a/src/librustc/diagnostics.rs b/src/librustc/diagnostics.rs index 601a9a73c3d3a..9db79074d1fd1 100644 --- a/src/librustc/diagnostics.rs +++ b/src/librustc/diagnostics.rs @@ -36,6 +36,7 @@ register_diagnostics!( E0015, E0016, E0017, + E0018, E0019, E0020, E0022, diff --git a/src/librustc/middle/check_const.rs b/src/librustc/middle/check_const.rs index d6b9bbded4ff8..6cf1a93b40b7b 100644 --- a/src/librustc/middle/check_const.rs +++ b/src/librustc/middle/check_const.rs @@ -119,12 +119,18 @@ fn check_expr(v: &mut CheckCrateVisitor, e: &Expr) -> bool { } } ExprLit(_) => (), - ExprCast(_, _) => { - let ety = ty::expr_ty(v.tcx, e); - if !ty::type_is_numeric(ety) && !ty::type_is_unsafe_ptr(ety) { + ExprCast(ref from, _) => { + let toty = ty::expr_ty(v.tcx, e); + let fromty = ty::expr_ty(v.tcx, &**from); + if !ty::type_is_numeric(toty) && !ty::type_is_unsafe_ptr(toty) { span_err!(v.tcx.sess, e.span, E0012, "can not cast to `{}` in a constant expression", - ppaux::ty_to_string(v.tcx, ety)); + ppaux::ty_to_string(v.tcx, toty)); + } + if ty::type_is_unsafe_ptr(fromty) && ty::type_is_numeric(toty) { + span_err!(v.tcx.sess, e.span, E0018, + "can not cast a pointer to an integer in a constant \ + expression"); } } ExprPath(ref pth) => { diff --git a/src/test/compile-fail/issue-18294.rs b/src/test/compile-fail/issue-18294.rs new file mode 100644 index 0000000000000..ca4cf526f07a4 --- /dev/null +++ b/src/test/compile-fail/issue-18294.rs @@ -0,0 +1,15 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +fn main() { + const X: u32 = 1; + const Y: uint = &X as *const u32 as uint; //~ ERROR E0018 + println!("{}", Y); +} From 3942ab92f08db0f62aabdb1fb9a9f05ad34cc56d Mon Sep 17 00:00:00 2001 From: Daniel Micay Date: Sat, 25 Oct 2014 13:28:17 -0400 Subject: [PATCH 16/47] make the core::raw struct representation defined Closes #18313 --- src/libcore/raw.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/libcore/raw.rs b/src/libcore/raw.rs index a62e2ecdca032..fe365b43ca6d1 100644 --- a/src/libcore/raw.rs +++ b/src/libcore/raw.rs @@ -21,18 +21,21 @@ use mem; /// The representation of a Rust slice +#[repr(C)] pub struct Slice { pub data: *const T, pub len: uint, } /// The representation of a Rust closure +#[repr(C)] pub struct Closure { pub code: *mut (), pub env: *mut (), } /// The representation of a Rust procedure (`proc()`) +#[repr(C)] pub struct Procedure { pub code: *mut (), pub env: *mut (), @@ -42,6 +45,7 @@ pub struct Procedure { /// /// This struct does not have a `Repr` implementation /// because there is no way to refer to all trait objects generically. +#[repr(C)] pub struct TraitObject { pub data: *mut (), pub vtable: *mut (), @@ -60,4 +64,3 @@ pub trait Repr { impl<'a, T> Repr> for &'a [T] {} impl<'a> Repr> for &'a str {} - From 019a982f5164efdc8b89f6afccdc83fea10a07ca Mon Sep 17 00:00:00 2001 From: Piotr Szotkowski Date: Sat, 25 Oct 2014 22:50:38 +0200 Subject: [PATCH 17/47] Guide: drop :d formatting where unnecessary --- src/doc/guide.md | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/src/doc/guide.md b/src/doc/guide.md index 6ef76219fa97f..bf52783ddd962 100644 --- a/src/doc/guide.md +++ b/src/doc/guide.md @@ -1130,12 +1130,12 @@ fn main() { let y = Missing; match x { - Value(n) => println!("x is {:d}", n), + Value(n) => println!("x is {}", n), Missing => println!("x is missing!"), } match y { - Value(n) => println!("y is {:d}", n), + Value(n) => println!("y is {}", n), Missing => println!("y is missing!"), } } @@ -1301,7 +1301,7 @@ Instead, it looks like this: ```{rust} for x in range(0i, 10i) { - println!("{:d}", x); + println!("{}", x); } ``` @@ -1408,7 +1408,7 @@ iteration: This will only print the odd numbers: for x in range(0i, 10i) { if x % 2 == 0 { continue; } - println!("{:d}", x); + println!("{}", x); } ``` @@ -1677,12 +1677,12 @@ fn main() { let y = Missing; match x { - Value(n) => println!("x is {:d}", n), + Value(n) => println!("x is {}", n), Missing => println!("x is missing!"), } match y { - Value(n) => println!("y is {:d}", n), + Value(n) => println!("y is {}", n), Missing => println!("y is missing!"), } } @@ -4217,7 +4217,7 @@ Remember Rust's `for` loop? Here's an example: ```{rust} for x in range(0i, 10i) { - println!("{:d}", x); + println!("{}", x); } ``` From 6a224541455809bf787563a1ec907e6c8b1271d1 Mon Sep 17 00:00:00 2001 From: Piotr Szotkowski Date: Sat, 25 Oct 2014 22:53:00 +0200 Subject: [PATCH 18/47] =?UTF-8?q?Guide:=20Cargo=20now=20adds=20bang=20and?= =?UTF-8?q?=20drops=20the=20semicolon=20for=20=E2=80=98Hello,=20world?= =?UTF-8?q?=E2=80=99?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/doc/guide.md | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/src/doc/guide.md b/src/doc/guide.md index 6ef76219fa97f..274eaddf69805 100644 --- a/src/doc/guide.md +++ b/src/doc/guide.md @@ -1793,7 +1793,7 @@ Finally, Cargo generated a hello, world for us. Check out `src/main.rs`: ```{rust} fn main() { - println!("Hello, world!"); + println!("Hello, world!") } ``` @@ -2682,12 +2682,12 @@ like this: ``` fn main() { - println!("Hello, world!"); + println!("Hello, world!") } mod hello { fn print_hello() { - println!("Hello, world!"); + println!("Hello, world!") } } ``` @@ -2721,7 +2721,7 @@ fn main() { mod hello { fn print_hello() { - println!("Hello, world!"); + println!("Hello, world!") } } ``` @@ -2744,7 +2744,7 @@ fn main() { mod hello { pub fn print_hello() { - println!("Hello, world!"); + println!("Hello, world!") } } ``` @@ -2923,7 +2923,7 @@ $ cargo test Compiling testing v0.0.1 (file:///home/you/projects/testing) /home/you/projects/testing/src/main.rs:1:1: 3:2 warning: code is never used: `main`, #[warn(dead_code)] on by default /home/you/projects/testing/src/main.rs:1 fn main() { -/home/you/projects/testing/src/main.rs:2 println!("Hello, world"); +/home/you/projects/testing/src/main.rs:2 println!("Hello, world!") /home/you/projects/testing/src/main.rs:3 } running 0 tests @@ -2962,7 +2962,7 @@ You can run all of your tests with `cargo test`. This runs both your tests in ```{notrust,ignore} /home/you/projects/testing/src/main.rs:1:1: 3:2 warning: code is never used: `main`, #[warn(dead_code)] on by default /home/you/projects/testing/src/main.rs:1 fn main() { -/home/you/projects/testing/src/main.rs:2 println!("Hello, world"); +/home/you/projects/testing/src/main.rs:2 println!("Hello, world!") /home/you/projects/testing/src/main.rs:3 } ``` @@ -3051,7 +3051,7 @@ function. Change your `src/main.rs` to look like this: ```{rust} #[cfg(not(test))] fn main() { - println!("Hello, world"); + println!("Hello, world!") } ``` @@ -3133,7 +3133,7 @@ extern crate testing; #[cfg(not(test))] fn main() { - println!("Hello, world"); + println!("Hello, world!") } ``` From eb903b484389259cdfcae068776d306ee1293d66 Mon Sep 17 00:00:00 2001 From: Piotr Szotkowski Date: Sat, 25 Oct 2014 22:57:11 +0200 Subject: [PATCH 19/47] Guide: update Testing output and fix contents to match --- src/doc/guide.md | 115 +++++++++++++++++++++++++++-------------------- 1 file changed, 66 insertions(+), 49 deletions(-) diff --git a/src/doc/guide.md b/src/doc/guide.md index 274eaddf69805..cd7478938187a 100644 --- a/src/doc/guide.md +++ b/src/doc/guide.md @@ -2921,15 +2921,11 @@ it `false`, so this test should fail. Let's try it! ```{notrust,ignore} $ cargo test Compiling testing v0.0.1 (file:///home/you/projects/testing) -/home/you/projects/testing/src/main.rs:1:1: 3:2 warning: code is never used: `main`, #[warn(dead_code)] on by default +/home/you/projects/testing/src/main.rs:1:1: 3:2 warning: function is never used: `main`, #[warn(dead_code)] on by default /home/you/projects/testing/src/main.rs:1 fn main() { /home/you/projects/testing/src/main.rs:2 println!("Hello, world!") /home/you/projects/testing/src/main.rs:3 } - -running 0 tests - -test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured - + Running target/lib-654ce120f310a3a5 running 1 test test foo ... FAILED @@ -2946,7 +2942,7 @@ failures: test result: FAILED. 0 passed; 1 failed; 0 ignored; 0 measured -task '
' failed at 'Some tests failed', /home/you/src/rust/src/libtest/lib.rs:242 +task '
' failed at 'Some tests failed', /home/you/src/rust/src/libtest/lib.rs:243 ``` Lots of output! Let's break this down: @@ -2960,7 +2956,7 @@ You can run all of your tests with `cargo test`. This runs both your tests in `tests`, as well as the tests you put inside of your crate. ```{notrust,ignore} -/home/you/projects/testing/src/main.rs:1:1: 3:2 warning: code is never used: `main`, #[warn(dead_code)] on by default +/home/you/projects/testing/src/main.rs:1:1: 3:2 warning: function is never used: `main`, #[warn(dead_code)] on by default /home/you/projects/testing/src/main.rs:1 fn main() { /home/you/projects/testing/src/main.rs:2 println!("Hello, world!") /home/you/projects/testing/src/main.rs:3 } @@ -2974,18 +2970,8 @@ We'll turn this lint off for just this function soon. For now, just ignore this output. ```{notrust,ignore} -running 0 tests - -test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured -``` + Running target/lib-654ce120f310a3a5 -Wait a minute, zero tests? Didn't we define one? Yup. This output is from -attempting to run the tests in our crate, of which we don't have any. -You'll note that Rust reports on several kinds of tests: passed, failed, -ignored, and measured. The 'measured' tests refer to benchmark tests, which -we'll cover soon enough! - -```{notrust,ignore} running 1 test test foo ... FAILED ``` @@ -3008,7 +2994,7 @@ failures: test result: FAILED. 0 passed; 1 failed; 0 ignored; 0 measured -task '
' failed at 'Some tests failed', /home/you/src/rust/src/libtest/lib.rs:242 +task '
' failed at 'Some tests failed', /home/you/src/rust/src/libtest/lib.rs:243 ``` After all the tests run, Rust will show us any output from our failed tests. @@ -3029,24 +3015,25 @@ And then try to run our tests again: ```{notrust,ignore} $ cargo test Compiling testing v0.0.1 (file:///home/you/projects/testing) -/home/you/projects/testing/src/main.rs:1:1: 3:2 warning: code is never used: `main`, #[warn(dead_code)] on by default -/home/you/projects/testing/src/main.rs:1 fn main() { -/home/you/projects/testing/src/main.rs:2 println!("Hello, world"); -/home/you/projects/testing/src/main.rs:3 } - -running 0 tests - -test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured - + Running target/lib-654ce120f310a3a5 running 1 test test foo ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured + + Running target/testing-6d7518593c7c3ee5 + +running 0 tests + +test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured ``` -Nice! Our test passes, as we expected. Let's get rid of that warning for our `main` -function. Change your `src/main.rs` to look like this: +Nice! Our test passes, as we expected. Note how we didn't get the +`main` warning this time? This is because `src/main.rs` didn't +need recompiling, but we'll get that warning again if we +change (and recompile) that file. Let's get rid of that +warning; change your `src/main.rs` to look like this: ```{rust} #[cfg(not(test))] @@ -3062,21 +3049,24 @@ our tests, it sets things up so that `cfg(test)` is true. But we want to only include `main` when it's _not_ true. So we use `not` to negate things: `cfg(not(test))` will only compile our code when the `cfg(test)` is false. -With this attribute, we won't get the warning: +With this attribute we won't get the warning (even +though `src/main.rs` gets recompiled this time): ```{notrust,ignore} $ cargo test Compiling testing v0.0.1 (file:///home/you/projects/testing) - -running 0 tests - -test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured - + Running target/lib-654ce120f310a3a5 running 1 test test foo ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured + + Running target/testing-6d7518593c7c3ee5 + +running 0 tests + +test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured ``` Nice. Okay, let's write a real test now. Change your `tests/lib.rs` @@ -3156,21 +3146,30 @@ Let's give it a run: ```{ignore,notrust} $ cargo test Compiling testing v0.0.1 (file:///home/you/projects/testing) + Running target/lib-654ce120f310a3a5 + +running 1 test +test math_checks_out ... ok + +test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured + + Running target/testing-6d7518593c7c3ee5 running 0 tests test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured + Running target/testing-8a94b31f7fd2e8fe running 0 tests test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured + Doc-tests testing -running 1 test -test math_checks_out ... ok +running 0 tests -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured +test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured ``` Great! One test passed. We've got an integration test showing that our public @@ -3196,21 +3195,30 @@ If you run `cargo test`, you should get the same output: ```{ignore,notrust} $ cargo test Compiling testing v0.0.1 (file:///home/you/projects/testing) + Running target/lib-654ce120f310a3a5 + +running 1 test +test math_checks_out ... ok + +test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured + + Running target/testing-6d7518593c7c3ee5 running 0 tests test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured + Running target/testing-8a94b31f7fd2e8fe running 0 tests test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured + Doc-tests testing -running 1 test -test math_checks_out ... ok +running 0 tests -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured +test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured ``` If we tried to write a test for these two new functions, it wouldn't @@ -3283,6 +3291,20 @@ Let's give it a shot: ```{ignore,notrust} $ cargo test Compiling testing v0.0.1 (file:///home/you/projects/testing) + Running target/lib-654ce120f310a3a5 + +running 1 test +test math_checks_out ... ok + +test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured + + Running target/testing-6d7518593c7c3ee5 + +running 0 tests + +test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured + + Running target/testing-8a94b31f7fd2e8fe running 2 tests test test::test_times_four ... ok @@ -3290,16 +3312,11 @@ test test::test_add_three ... ok test result: ok. 2 passed; 0 failed; 0 ignored; 0 measured + Doc-tests testing running 0 tests test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured - - -running 1 test -test math_checks_out ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured ``` Cool! We now have two tests of our internal functions. You'll note that there From 2c744c7f32a737be1c01a4fd717f18bef278a93c Mon Sep 17 00:00:00 2001 From: Jakub Bukaj Date: Sun, 26 Oct 2014 00:07:41 +0200 Subject: [PATCH 20/47] Add test cases for E-needstest issues --- src/test/compile-fail/issue-12863.rs | 17 +++++++++++ src/test/compile-fail/issue-14721.rs | 15 ++++++++++ src/test/compile-fail/issue-16683.rs | 18 ++++++++++++ src/test/compile-fail/issue-17551.rs | 18 ++++++++++++ src/test/compile-fail/issue-18118.rs | 17 +++++++++++ src/test/compile-fail/issue-18252.rs | 19 ++++++++++++ src/test/compile-fail/issue-6991.rs | 15 ++++++++++ src/test/compile-fail/issue-7867.rs | 27 +++++++++++++++++ src/test/run-pass/closure-syntax.rs | 6 ++++ src/test/run-pass/issue-12028.rs | 44 ++++++++++++++++++++++++++++ src/test/run-pass/issue-14901.rs | 27 +++++++++++++++++ src/test/run-pass/issue-16560.rs | 25 ++++++++++++++++ src/test/run-pass/issue-16668.rs | 30 +++++++++++++++++++ 13 files changed, 278 insertions(+) create mode 100644 src/test/compile-fail/issue-12863.rs create mode 100644 src/test/compile-fail/issue-14721.rs create mode 100644 src/test/compile-fail/issue-16683.rs create mode 100644 src/test/compile-fail/issue-17551.rs create mode 100644 src/test/compile-fail/issue-18118.rs create mode 100644 src/test/compile-fail/issue-18252.rs create mode 100644 src/test/compile-fail/issue-6991.rs create mode 100644 src/test/compile-fail/issue-7867.rs create mode 100644 src/test/run-pass/issue-12028.rs create mode 100644 src/test/run-pass/issue-14901.rs create mode 100644 src/test/run-pass/issue-16560.rs create mode 100644 src/test/run-pass/issue-16668.rs diff --git a/src/test/compile-fail/issue-12863.rs b/src/test/compile-fail/issue-12863.rs new file mode 100644 index 0000000000000..07676679ef18f --- /dev/null +++ b/src/test/compile-fail/issue-12863.rs @@ -0,0 +1,17 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +mod foo { pub fn bar() {} } + +fn main() { + match () { + foo::bar => {} //~ ERROR `bar` is not an enum variant, struct or const + } +} diff --git a/src/test/compile-fail/issue-14721.rs b/src/test/compile-fail/issue-14721.rs new file mode 100644 index 0000000000000..92add18f9413c --- /dev/null +++ b/src/test/compile-fail/issue-14721.rs @@ -0,0 +1,15 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +fn main() { + let foo = "str"; + println!("{}", foo.desc); //~ ERROR attempted access of field `desc` on type `&str`, + // but no field with that name was found +} diff --git a/src/test/compile-fail/issue-16683.rs b/src/test/compile-fail/issue-16683.rs new file mode 100644 index 0000000000000..d9dfaac572066 --- /dev/null +++ b/src/test/compile-fail/issue-16683.rs @@ -0,0 +1,18 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +trait T<'a> { + fn a(&'a self) -> &'a bool; + fn b(&self) { + self.a(); //~ ERROR mismatched types: expected `&'a Self`, found `&Self` (lifetime mismatch) + } +} + +fn main() {} diff --git a/src/test/compile-fail/issue-17551.rs b/src/test/compile-fail/issue-17551.rs new file mode 100644 index 0000000000000..197319b6d4340 --- /dev/null +++ b/src/test/compile-fail/issue-17551.rs @@ -0,0 +1,18 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(unboxed_closures)] + +struct B; + +fn main() { + let foo = B; //~ ERROR unable to infer enough type information to locate the impl of the trait + let closure = |:| foo; +} diff --git a/src/test/compile-fail/issue-18118.rs b/src/test/compile-fail/issue-18118.rs new file mode 100644 index 0000000000000..4497c8088c31c --- /dev/null +++ b/src/test/compile-fail/issue-18118.rs @@ -0,0 +1,17 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +pub fn main() { + static z: &'static int = { + let p = 3; + &p +//~^ ERROR cannot borrow a local variable inside a static block, define a separate static instead + }; +} diff --git a/src/test/compile-fail/issue-18252.rs b/src/test/compile-fail/issue-18252.rs new file mode 100644 index 0000000000000..c884f02892f16 --- /dev/null +++ b/src/test/compile-fail/issue-18252.rs @@ -0,0 +1,19 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(struct_variant)] + +enum Foo { + Variant { x: uint } +} + +fn main() { + let f = Variant(42u); //~ ERROR expected function, found `Foo` +} diff --git a/src/test/compile-fail/issue-6991.rs b/src/test/compile-fail/issue-6991.rs new file mode 100644 index 0000000000000..a5d23c70bd5e8 --- /dev/null +++ b/src/test/compile-fail/issue-6991.rs @@ -0,0 +1,15 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +static x: &'static uint = &1; +static y: uint = *x; +//~^ ERROR cannot refer to other statics by value, +// use the address-of operator or a constant instead +fn main() {} diff --git a/src/test/compile-fail/issue-7867.rs b/src/test/compile-fail/issue-7867.rs new file mode 100644 index 0000000000000..0ab551642a0c4 --- /dev/null +++ b/src/test/compile-fail/issue-7867.rs @@ -0,0 +1,27 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +enum A { B, C } + +mod foo { pub fn bar() {} } + +fn main() { + match (true, false) { + B => (), //~ ERROR expected `(bool,bool)`, found `A` (expected tuple, found enum A) + _ => () + } + + match &Some(42i) { + Some(x) => (), //~ ERROR expected `&core::option::Option`, + // found `core::option::Option<>` + None => () //~ ERROR expected `&core::option::Option`, + // found `core::option::Option<>` + } +} diff --git a/src/test/run-pass/closure-syntax.rs b/src/test/run-pass/closure-syntax.rs index b5a94a02b346b..9d98a7ac12f9b 100644 --- a/src/test/run-pass/closure-syntax.rs +++ b/src/test/run-pass/closure-syntax.rs @@ -9,6 +9,9 @@ // except according to those terms. #![allow(dead_code)] +#![feature(unboxed_closures, unboxed_closure_sugar)] + +// compile-flags:-g fn foo() {} @@ -82,6 +85,9 @@ fn bar<'b>() { // issue #13490 let _ = || -> ! loop {}; let _ = proc() -> ! loop {}; + + // issue #17021 + let c = box |&:| {}; } struct B; diff --git a/src/test/run-pass/issue-12028.rs b/src/test/run-pass/issue-12028.rs new file mode 100644 index 0000000000000..4d64103e50224 --- /dev/null +++ b/src/test/run-pass/issue-12028.rs @@ -0,0 +1,44 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +trait Hash { + fn hash2(&self, hasher: &H) -> u64; +} + +trait Stream { + fn input(&mut self, bytes: &[u8]); + fn result(&self) -> u64; +} + +trait StreamHasher { + fn stream(&self) -> S; +} + +////////////////////////////////////////////////////////////////////////////// + +trait StreamHash>: Hash { + fn input_stream(&self, stream: &mut S); +} + +impl> Hash for u8 { + fn hash2(&self, hasher: &H) -> u64 { + let mut stream = hasher.stream(); + self.input_stream(&mut stream); + stream.result() + } +} + +impl> StreamHash for u8 { + fn input_stream(&self, stream: &mut S) { + stream.input([*self]); + } +} + +fn main() {} diff --git a/src/test/run-pass/issue-14901.rs b/src/test/run-pass/issue-14901.rs new file mode 100644 index 0000000000000..f93347f4366c6 --- /dev/null +++ b/src/test/run-pass/issue-14901.rs @@ -0,0 +1,27 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use std::io::Reader; + +enum Wrapper<'a> { + WrapReader(&'a Reader + 'a) +} + +trait Wrap<'a> { + fn wrap(self) -> Wrapper<'a>; +} + +impl<'a, R: Reader> Wrap<'a> for &'a mut R { + fn wrap(self) -> Wrapper<'a> { + WrapReader(self as &'a mut Reader) + } +} + +pub fn main() {} diff --git a/src/test/run-pass/issue-16560.rs b/src/test/run-pass/issue-16560.rs new file mode 100644 index 0000000000000..77eba0245b1c3 --- /dev/null +++ b/src/test/run-pass/issue-16560.rs @@ -0,0 +1,25 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(unboxed_closures)] + +use std::mem; + +fn main() { + let y = 0u8; + let closure = move |&: x| y + x; + + // Check that both closures are capturing by value + assert_eq!(1, mem::size_of_val(&closure)); + + spawn(proc() { + let ok = closure; + }) +} diff --git a/src/test/run-pass/issue-16668.rs b/src/test/run-pass/issue-16668.rs new file mode 100644 index 0000000000000..1bfa79b8a110d --- /dev/null +++ b/src/test/run-pass/issue-16668.rs @@ -0,0 +1,30 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(unboxed_closures)] + +struct Parser<'a, I, O> { + parse: Box> + 'a> +} + +impl<'a, I, O: 'a> Parser<'a, I, O> { + fn compose(mut self, mut rhs: Parser) -> Parser<'a, I, K> { + Parser { + parse: box move |&mut: x: I| { + match self.parse.call_mut((x,)) { + Ok(r) => rhs.parse.call_mut((r,)), + Err(e) => Err(e) + } + } + } + } +} + +fn main() {} From 971d776aa5a678672eb3d37f2f507664aacd2440 Mon Sep 17 00:00:00 2001 From: Brendan Zabarauskas Date: Wed, 22 Oct 2014 16:37:20 +1100 Subject: [PATCH 21/47] Add Span and separate open/close delims to TTDelim This came up when working [on the gl-rs generator extension](https://github.com/bjz/gl-rs/blob/990383de801bd2e233159d5be07c9b5622827620/src/gl_generator/lib.rs#L135-L146). The new definition of `TTDelim` adds an associated `Span` that covers the whole token tree and enforces the invariant that a delimited sequence of token trees must have an opening and closing delimiter. A `get_span` method has also been added to `TokenTree` type to make it easier to implement better error messages for syntax extensions. --- src/libsyntax/ast.rs | 31 +++++++++++++++-- src/libsyntax/ext/log_syntax.rs | 8 ++--- src/libsyntax/ext/quote.rs | 17 +++++---- src/libsyntax/ext/tt/macro_rules.rs | 20 +++-------- src/libsyntax/ext/tt/transcribe.rs | 54 +++++++++++++++++------------ src/libsyntax/fold.rs | 12 ++++++- src/libsyntax/parse/mod.rs | 37 ++++++++++---------- src/libsyntax/parse/parser.rs | 36 +++++++++++-------- src/libsyntax/print/pprust.rs | 8 ++++- 9 files changed, 135 insertions(+), 88 deletions(-) diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 8eaee7282d197..be316ba9f4d89 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -592,6 +592,20 @@ pub enum CaptureClause { CaptureByRef, } +/// A token that delimits a sequence of token trees +#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +pub struct Delimiter { + pub span: Span, + pub token: ::parse::token::Token, +} + +impl Delimiter { + /// Convert the delimiter to a `TTTok` + pub fn to_tt(&self) -> TokenTree { + TTTok(self.span, self.token.clone()) + } +} + /// When the main rust parser encounters a syntax-extension invocation, it /// parses the arguments to the invocation as a token-tree. This is a very /// loose structure, such that all sorts of different AST-fragments can @@ -611,10 +625,9 @@ pub enum CaptureClause { pub enum TokenTree { /// A single token TTTok(Span, ::parse::token::Token), - /// A delimited sequence (the delimiters appear as the first - /// and last elements of the vector) + /// A delimited sequence of token trees // FIXME(eddyb) #6308 Use Rc<[TokenTree]> after DST. - TTDelim(Rc>), + TTDelim(Span, Delimiter, Rc>, Delimiter), // These only make sense for right-hand-sides of MBE macros: @@ -628,6 +641,18 @@ pub enum TokenTree { TTNonterminal(Span, Ident) } +impl TokenTree { + /// Returns the `Span` corresponding to this token tree. + pub fn get_span(&self) -> Span { + match *self { + TTTok(span, _) => span, + TTDelim(span, _, _, _) => span, + TTSeq(span, _, _, _) => span, + TTNonterminal(span, _) => span, + } + } +} + // Matchers are nodes defined-by and recognized-by the main rust parser and // language, but they're only ever found inside syntax-extension invocations; // indeed, the only thing that ever _activates_ the rules in the rust parser diff --git a/src/libsyntax/ext/log_syntax.rs b/src/libsyntax/ext/log_syntax.rs index 8df5746e412d4..30301e3b8cc92 100644 --- a/src/libsyntax/ext/log_syntax.rs +++ b/src/libsyntax/ext/log_syntax.rs @@ -13,16 +13,14 @@ use codemap; use ext::base; use print; -use std::rc::Rc; - pub fn expand_syntax_ext<'cx>(cx: &'cx mut base::ExtCtxt, sp: codemap::Span, - tt: &[ast::TokenTree]) + tts: &[ast::TokenTree]) -> Box { cx.print_backtrace(); - println!("{}", print::pprust::tt_to_string(&ast::TTDelim( - Rc::new(tt.iter().map(|x| (*x).clone()).collect())))); + + println!("{}", print::pprust::tts_to_string(tts)); // any so that `log_syntax` can be invoked as an expression and item. base::DummyResult::any(sp) diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index 84775c12d641f..783c08a44436e 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -637,7 +637,7 @@ fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P { } -fn mk_tt(cx: &ExtCtxt, sp: Span, tt: &ast::TokenTree) -> Vec> { +fn mk_tt(cx: &ExtCtxt, _: Span, tt: &ast::TokenTree) -> Vec> { match *tt { ast::TTTok(sp, ref tok) => { let e_sp = cx.expr_ident(sp, id_ext("_sp")); @@ -650,13 +650,16 @@ fn mk_tt(cx: &ExtCtxt, sp: Span, tt: &ast::TokenTree) -> Vec> { id_ext("push"), vec!(e_tok)); vec!(cx.stmt_expr(e_push)) - } - - ast::TTDelim(ref tts) => mk_tts(cx, sp, tts.as_slice()), + }, + ast::TTDelim(sp, ref open, ref tts, ref close) => { + let mut stmts = vec![]; + stmts.extend(mk_tt(cx, sp, &open.to_tt()).into_iter()); + stmts.extend(tts.iter().flat_map(|tt| mk_tt(cx, sp, tt).into_iter())); + stmts.extend(mk_tt(cx, sp, &close.to_tt()).into_iter()); + stmts + }, ast::TTSeq(..) => fail!("TTSeq in quote!"), - ast::TTNonterminal(sp, ident) => { - // tt.extend($ident.to_tokens(ext_cx).into_iter()) let e_to_toks = @@ -674,7 +677,7 @@ fn mk_tt(cx: &ExtCtxt, sp: Span, tt: &ast::TokenTree) -> Vec> { vec!(e_to_toks)); vec!(cx.stmt_expr(e_push)) - } + }, } } diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 91db3a9d8df50..fbfe10d004e06 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -147,13 +147,9 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, rhses: &[Rc]) -> Box { if cx.trace_macros() { - println!("{}! {} {} {}", + println!("{}! {{ {} }}", token::get_ident(name), - "{", - print::pprust::tt_to_string(&TTDelim(Rc::new(arg.iter() - .map(|x| (*x).clone()) - .collect()))), - "}"); + print::pprust::tts_to_string(arg)); } // Which arm's failure should we report? (the one furthest along) @@ -175,15 +171,9 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, // okay, what's your transcriber? MatchedNonterminal(NtTT(ref tt)) => { match **tt { - // cut off delimiters; don't parse 'em - TTDelim(ref tts) => { - (*tts).slice(1u,(*tts).len()-1u) - .iter() - .map(|x| (*x).clone()) - .collect() - } - _ => cx.span_fatal( - sp, "macro rhs must be delimited") + // ignore delimiters + TTDelim(_, _, ref tts, _) => (**tts).clone(), + _ => cx.span_fatal(sp, "macro rhs must be delimited"), } }, _ => cx.span_bug(sp, "bad thing in rhs") diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 35ec37d842af1..472b24be81b93 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -18,6 +18,7 @@ use parse::token; use parse::lexer::TokenAndSpan; use std::rc::Rc; +use std::ops::Add; use std::collections::HashMap; ///an unzipping of `TokenTree`s @@ -104,37 +105,41 @@ enum LockstepIterSize { LisContradiction(String), } -fn lis_merge(lhs: LockstepIterSize, rhs: LockstepIterSize) -> LockstepIterSize { - match lhs { - LisUnconstrained => rhs.clone(), - LisContradiction(_) => lhs.clone(), - LisConstraint(l_len, l_id) => match rhs { - LisUnconstrained => lhs.clone(), - LisContradiction(_) => rhs.clone(), - LisConstraint(r_len, _) if l_len == r_len => lhs.clone(), - LisConstraint(r_len, r_id) => { - let l_n = token::get_ident(l_id); - let r_n = token::get_ident(r_id); - LisContradiction(format!("inconsistent lockstep iteration: \ - '{}' has {} items, but '{}' has {}", - l_n, l_len, r_n, r_len).to_string()) - } +impl Add for LockstepIterSize { + fn add(&self, other: &LockstepIterSize) -> LockstepIterSize { + match *self { + LisUnconstrained => other.clone(), + LisContradiction(_) => self.clone(), + LisConstraint(l_len, l_id) => match *other { + LisUnconstrained => self.clone(), + LisContradiction(_) => other.clone(), + LisConstraint(r_len, _) if l_len == r_len => self.clone(), + LisConstraint(r_len, r_id) => { + let l_n = token::get_ident(l_id); + let r_n = token::get_ident(r_id); + LisContradiction(format!("inconsistent lockstep iteration: \ + '{}' has {} items, but '{}' has {}", + l_n, l_len, r_n, r_len).to_string()) + } + }, } } } fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize { match *t { - TTDelim(ref tts) | TTSeq(_, ref tts, _, _) => { - tts.iter().fold(LisUnconstrained, |lis, tt| { - lis_merge(lis, lockstep_iter_size(tt, r)) + // The opening and closing delimiters are both tokens, so they are + // treated as `LisUnconstrained`. + TTDelim(_, _, ref tts, _) | TTSeq(_, ref tts, _, _) => { + tts.iter().fold(LisUnconstrained, |size, tt| { + size + lockstep_iter_size(tt, r) }) - } + }, TTTok(..) => LisUnconstrained, TTNonterminal(_, name) => match *lookup_cur_matched(r, name) { MatchedNonterminal(_) => LisUnconstrained, MatchedSeq(ref ads, _) => LisConstraint(ads.len(), name) - } + }, } } @@ -197,9 +202,14 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { (*frame.forest)[frame.idx].clone() }; match t { - TTDelim(tts) => { + TTDelim(_, open, delimed_tts, close) => { + let mut tts = vec![]; + tts.push(open.to_tt()); + tts.extend(delimed_tts.iter().map(|x| (*x).clone())); + tts.push(close.to_tt()); + r.stack.push(TtFrame { - forest: tts, + forest: Rc::new(tts), idx: 0, dotdotdoted: false, sep: None diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index ceef190f5d453..ddb2ab49f8b52 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -571,7 +571,17 @@ pub fn noop_fold_tt(tt: &TokenTree, fld: &mut T) -> TokenTree { match *tt { TTTok(span, ref tok) => TTTok(span, fld.fold_token(tok.clone())), - TTDelim(ref tts) => TTDelim(Rc::new(fld.fold_tts(tts.as_slice()))), + TTDelim(span, ref open, ref tts, ref close) => + TTDelim(span, + Delimiter { + span: open.span, + token: fld.fold_token(open.token.clone()) + }, + Rc::new(fld.fold_tts(tts.as_slice())), + Delimiter { + span: close.span, + token: fld.fold_token(close.token.clone()) + }), TTSeq(span, ref pattern, ref sep, is_optional) => TTSeq(span, Rc::new(fld.fold_tts(pattern.as_slice())), diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 2d7d32cd9eacc..1c99b608f7aa3 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -788,35 +788,34 @@ mod test { } // check the token-tree-ization of macros - #[test] fn string_to_tts_macro () { + #[test] + fn string_to_tts_macro () { let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_string()); let tts: &[ast::TokenTree] = tts.as_slice(); match tts { - [ast::TTTok(_,_), - ast::TTTok(_,token::NOT), - ast::TTTok(_,_), - ast::TTDelim(ref delim_elts)] => { + [ast::TTTok(_, _), + ast::TTTok(_, token::NOT), + ast::TTTok(_, _), + ast::TTDelim(_, ast::TTTok(_, token::LPAREN), + ref delim_elts, + ast::TTTok(_, token::RPAREN))] => { let delim_elts: &[ast::TokenTree] = delim_elts.as_slice(); match delim_elts { - [ast::TTTok(_,token::LPAREN), - ast::TTDelim(ref first_set), - ast::TTTok(_,token::FAT_ARROW), - ast::TTDelim(ref second_set), - ast::TTTok(_,token::RPAREN)] => { + [ast::TTDelim(_, ast::TTTok(_, token::LPAREN), + ref first_set, + ast::TTTok(_, token::RPAREN)), + ast::TTTok(_, token::FAT_ARROW), + ast::TTDelim(_, ast::TTTok(_, token::LPAREN), + ref second_set, + ast::TTTok(_, token::RPAREN))] => { let first_set: &[ast::TokenTree] = first_set.as_slice(); match first_set { - [ast::TTTok(_,token::LPAREN), - ast::TTTok(_,token::DOLLAR), - ast::TTTok(_,_), - ast::TTTok(_,token::RPAREN)] => { + [ast::TTTok(_, token::DOLLAR), ast::TTTok(_, _)] => { let second_set: &[ast::TokenTree] = second_set.as_slice(); match second_set { - [ast::TTTok(_,token::LPAREN), - ast::TTTok(_,token::DOLLAR), - ast::TTTok(_,_), - ast::TTTok(_,token::RPAREN)] => { + [ast::TTTok(_, token::DOLLAR), ast::TTTok(_, _)] => { assert_eq!("correct","correct") } _ => assert_eq!("wrong 4","correct") @@ -837,7 +836,7 @@ mod test { _ => { error!("failing value: {}",tts); assert_eq!("wrong 1","correct"); - } + }, } } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 5abf79836f5b2..005ed2e7ed374 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -48,7 +48,7 @@ use ast::{StmtExpr, StmtSemi, StmtMac, StructDef, StructField}; use ast::{StructVariantKind, BiSub}; use ast::StrStyle; use ast::{SelfExplicit, SelfRegion, SelfStatic, SelfValue}; -use ast::{TokenTree, TraitItem, TraitRef, TTDelim, TTSeq, TTTok}; +use ast::{Delimiter, TokenTree, TraitItem, TraitRef, TTDelim, TTSeq, TTTok}; use ast::{TTNonterminal, TupleVariantKind, Ty, Ty_, TyBot}; use ast::{TypeField, TyFixedLengthVec, TyClosure, TyProc, TyBareFn}; use ast::{TyTypeof, TyInfer, TypeMethod}; @@ -2574,16 +2574,11 @@ impl<'a> Parser<'a> { } } _ => { - parse_any_tt_tok(p) + TTTok(p.span, p.bump_and_get()) } } } - // turn the next token into a TTTok: - fn parse_any_tt_tok(p: &mut Parser) -> TokenTree { - TTTok(p.span, p.bump_and_get()) - } - match (&self.token, token::close_delimiter_for(&self.token)) { (&token::EOF, _) => { let open_braces = self.open_braces.clone(); @@ -2595,21 +2590,32 @@ impl<'a> Parser<'a> { self.fatal("this file contains an un-closed delimiter "); } (_, Some(close_delim)) => { + // The span for beginning of the delimited section + let pre_span = self.span; + // Parse the open delimiter. self.open_braces.push(self.span); - let mut result = vec!(parse_any_tt_tok(self)); + let open = Delimiter { + span: self.span, + token: self.bump_and_get(), + }; - let trees = - self.parse_seq_to_before_end(&close_delim, - seq_sep_none(), - |p| p.parse_token_tree()); - result.extend(trees.into_iter()); + // Parse the token trees within the delimeters + let tts = self.parse_seq_to_before_end( + &close_delim, seq_sep_none(), |p| p.parse_token_tree() + ); // Parse the close delimiter. - result.push(parse_any_tt_tok(self)); + let close = Delimiter { + span: self.span, + token: self.bump_and_get(), + }; self.open_braces.pop().unwrap(); - TTDelim(Rc::new(result)) + // Expand to cover the entire delimited token tree + let span = Span { hi: self.span.hi, ..pre_span }; + + TTDelim(span, open, Rc::new(tts), close) } _ => parse_non_delim_tt_tok(self) } diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index b63f9b0120b9e..4f4b153d3a964 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -1020,7 +1020,13 @@ impl<'a> State<'a> { /// expression arguments as expressions). It can be done! I think. pub fn print_tt(&mut self, tt: &ast::TokenTree) -> IoResult<()> { match *tt { - ast::TTDelim(ref tts) => self.print_tts(tts.as_slice()), + ast::TTDelim(_, ref open, ref tts, ref close) => { + try!(word(&mut self.s, parse::token::to_string(&open.token).as_slice())); + try!(space(&mut self.s)); + try!(self.print_tts(tts.as_slice())); + try!(space(&mut self.s)); + word(&mut self.s, parse::token::to_string(&close.token).as_slice()) + }, ast::TTTok(_, ref tk) => { try!(word(&mut self.s, parse::token::to_string(tk).as_slice())); match *tk { From ec3f0201e76b5cf689f3e8e6418435c3e6d9271c Mon Sep 17 00:00:00 2001 From: Brendan Zabarauskas Date: Wed, 22 Oct 2014 23:35:32 +1100 Subject: [PATCH 22/47] Rename TokenTree variants for clarity This should be clearer, and fits in better with the `TTNonterminal` variant. Renames: - `TTTok` -> `TTToken` - `TTDelim` -> `TTDelimited` - `TTSeq` -> `TTSequence` --- src/doc/guide-plugin.md | 4 +-- src/libsyntax/ast.rs | 27 ++++++++------- src/libsyntax/diagnostics/plugin.rs | 12 +++---- src/libsyntax/ext/base.rs | 4 +-- src/libsyntax/ext/concat_idents.rs | 4 +-- src/libsyntax/ext/quote.rs | 8 ++--- src/libsyntax/ext/trace_macros.rs | 4 +-- src/libsyntax/ext/tt/macro_rules.rs | 4 +-- src/libsyntax/ext/tt/transcribe.rs | 20 +++++------ src/libsyntax/fold.rs | 36 ++++++++++---------- src/libsyntax/parse/mod.rs | 50 ++++++++++++++-------------- src/libsyntax/parse/parser.rs | 10 +++--- src/libsyntax/print/pprust.rs | 6 ++-- src/test/auxiliary/roman_numerals.rs | 4 +-- 14 files changed, 98 insertions(+), 95 deletions(-) diff --git a/src/doc/guide-plugin.md b/src/doc/guide-plugin.md index 3830a2126e172..9bf1d29569ca4 100644 --- a/src/doc/guide-plugin.md +++ b/src/doc/guide-plugin.md @@ -56,7 +56,7 @@ extern crate rustc; use syntax::codemap::Span; use syntax::parse::token::{IDENT, get_ident}; -use syntax::ast::{TokenTree, TTTok}; +use syntax::ast::{TokenTree, TTToken}; use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacExpr}; use syntax::ext::build::AstBuilder; // trait for expr_uint use rustc::plugin::Registry; @@ -71,7 +71,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree]) ("I", 1)]; let text = match args { - [TTTok(_, IDENT(s, _))] => get_ident(s).to_string(), + [TTToken(_, IDENT(s, _))] => get_ident(s).to_string(), _ => { cx.span_err(sp, "argument should be a single identifier"); return DummyResult::any(sp); diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index be316ba9f4d89..36373638099d2 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -24,6 +24,9 @@ use std::fmt::Show; use std::rc::Rc; use serialize::{Encodable, Decodable, Encoder, Decoder}; +#[cfg(stage0)] +pub use self::TTToken as TTTok; + // FIXME #6993: in librustc, uses of "ident" should be replaced // by just "Name". @@ -600,9 +603,9 @@ pub struct Delimiter { } impl Delimiter { - /// Convert the delimiter to a `TTTok` + /// Convert the delimiter to a `TTToken` pub fn to_tt(&self) -> TokenTree { - TTTok(self.span, self.token.clone()) + TTToken(self.span, self.token.clone()) } } @@ -614,9 +617,9 @@ impl Delimiter { /// If the syntax extension is an MBE macro, it will attempt to match its /// LHS "matchers" against the provided token tree, and if it finds a /// match, will transcribe the RHS token tree, splicing in any captured -/// macro_parser::matched_nonterminals into the TTNonterminals it finds. +/// `macro_parser::matched_nonterminals` into the `TTNonterminal`s it finds. /// -/// The RHS of an MBE macro is the only place a TTNonterminal or TTSeq +/// The RHS of an MBE macro is the only place a `TTNonterminal` or `TTSequence` /// makes any real sense. You could write them elsewhere but nothing /// else knows what to do with them, so you'll probably get a syntax /// error. @@ -624,18 +627,18 @@ impl Delimiter { #[doc="For macro invocations; parsing is delegated to the macro"] pub enum TokenTree { /// A single token - TTTok(Span, ::parse::token::Token), + TTToken(Span, ::parse::token::Token), /// A delimited sequence of token trees // FIXME(eddyb) #6308 Use Rc<[TokenTree]> after DST. - TTDelim(Span, Delimiter, Rc>, Delimiter), + TTDelimited(Span, Delimiter, Rc>, Delimiter), // These only make sense for right-hand-sides of MBE macros: - /// A kleene-style repetition sequence with a span, a TTForest, + /// A kleene-style repetition sequence with a span, a `TTForest`, /// an optional separator, and a boolean where true indicates /// zero or more (..), and false indicates one or more (+). // FIXME(eddyb) #6308 Use Rc<[TokenTree]> after DST. - TTSeq(Span, Rc>, Option<::parse::token::Token>, bool), + TTSequence(Span, Rc>, Option<::parse::token::Token>, bool), /// A syntactic variable that will be filled in by macro expansion. TTNonterminal(Span, Ident) @@ -645,10 +648,10 @@ impl TokenTree { /// Returns the `Span` corresponding to this token tree. pub fn get_span(&self) -> Span { match *self { - TTTok(span, _) => span, - TTDelim(span, _, _, _) => span, - TTSeq(span, _, _, _) => span, - TTNonterminal(span, _) => span, + TTToken(span, _) => span, + TTDelimited(span, _, _, _) => span, + TTSequence(span, _, _, _) => span, + TTNonterminal(span, _) => span, } } } diff --git a/src/libsyntax/diagnostics/plugin.rs b/src/libsyntax/diagnostics/plugin.rs index d3c39284f5582..8ea08c58d065c 100644 --- a/src/libsyntax/diagnostics/plugin.rs +++ b/src/libsyntax/diagnostics/plugin.rs @@ -50,7 +50,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt, token_tree: &[TokenTree]) -> Box { let code = match token_tree { - [ast::TTTok(_, token::IDENT(code, _))] => code, + [ast::TTToken(_, token::IDENT(code, _))] => code, _ => unreachable!() }; with_registered_diagnostics(|diagnostics| { @@ -82,12 +82,12 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt, token_tree: &[TokenTree]) -> Box { let (code, description) = match token_tree { - [ast::TTTok(_, token::IDENT(ref code, _))] => { + [ast::TTToken(_, token::IDENT(ref code, _))] => { (code, None) }, - [ast::TTTok(_, token::IDENT(ref code, _)), - ast::TTTok(_, token::COMMA), - ast::TTTok(_, token::LIT_STR_RAW(description, _))] => { + [ast::TTToken(_, token::IDENT(ref code, _)), + ast::TTToken(_, token::COMMA), + ast::TTToken(_, token::LIT_STR_RAW(description, _))] => { (code, Some(description)) } _ => unreachable!() @@ -110,7 +110,7 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt, token_tree: &[TokenTree]) -> Box { let name = match token_tree { - [ast::TTTok(_, token::IDENT(ref name, _))] => name, + [ast::TTToken(_, token::IDENT(ref name, _))] => name, _ => unreachable!() }; diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 5cc2fe03618c4..b5cc2d95890bb 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -684,8 +684,8 @@ pub fn get_single_str_from_tts(cx: &ExtCtxt, cx.span_err(sp, format!("{} takes 1 argument.", name).as_slice()); } else { match tts[0] { - ast::TTTok(_, token::LIT_STR(ident)) => return Some(parse::str_lit(ident.as_str())), - ast::TTTok(_, token::LIT_STR_RAW(ident, _)) => { + ast::TTToken(_, token::LIT_STR(ident)) => return Some(parse::str_lit(ident.as_str())), + ast::TTToken(_, token::LIT_STR_RAW(ident, _)) => { return Some(parse::raw_str_lit(ident.as_str())) } _ => { diff --git a/src/libsyntax/ext/concat_idents.rs b/src/libsyntax/ext/concat_idents.rs index 145412caa0bfe..e6befdd2aac92 100644 --- a/src/libsyntax/ext/concat_idents.rs +++ b/src/libsyntax/ext/concat_idents.rs @@ -23,7 +23,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree] for (i, e) in tts.iter().enumerate() { if i & 1 == 1 { match *e { - ast::TTTok(_, token::COMMA) => (), + ast::TTToken(_, token::COMMA) => (), _ => { cx.span_err(sp, "concat_idents! expecting comma."); return DummyResult::expr(sp); @@ -31,7 +31,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree] } } else { match *e { - ast::TTTok(_, token::IDENT(ident,_)) => { + ast::TTToken(_, token::IDENT(ident,_)) => { res_str.push_str(token::get_ident(ident).get()) } _ => { diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index 783c08a44436e..93bd66d6eeba6 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -639,10 +639,10 @@ fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P { fn mk_tt(cx: &ExtCtxt, _: Span, tt: &ast::TokenTree) -> Vec> { match *tt { - ast::TTTok(sp, ref tok) => { + ast::TTToken(sp, ref tok) => { let e_sp = cx.expr_ident(sp, id_ext("_sp")); let e_tok = cx.expr_call(sp, - mk_ast_path(cx, sp, "TTTok"), + mk_ast_path(cx, sp, "TTToken"), vec!(e_sp, mk_token(cx, sp, tok))); let e_push = cx.expr_method_call(sp, @@ -651,14 +651,14 @@ fn mk_tt(cx: &ExtCtxt, _: Span, tt: &ast::TokenTree) -> Vec> { vec!(e_tok)); vec!(cx.stmt_expr(e_push)) }, - ast::TTDelim(sp, ref open, ref tts, ref close) => { + ast::TTDelimited(sp, ref open, ref tts, ref close) => { let mut stmts = vec![]; stmts.extend(mk_tt(cx, sp, &open.to_tt()).into_iter()); stmts.extend(tts.iter().flat_map(|tt| mk_tt(cx, sp, tt).into_iter())); stmts.extend(mk_tt(cx, sp, &close.to_tt()).into_iter()); stmts }, - ast::TTSeq(..) => fail!("TTSeq in quote!"), + ast::TTSequence(..) => fail!("TTSequence in quote!"), ast::TTNonterminal(sp, ident) => { // tt.extend($ident.to_tokens(ext_cx).into_iter()) diff --git a/src/libsyntax/ext/trace_macros.rs b/src/libsyntax/ext/trace_macros.rs index 1f50eb933bb4e..4c3846731f432 100644 --- a/src/libsyntax/ext/trace_macros.rs +++ b/src/libsyntax/ext/trace_macros.rs @@ -20,10 +20,10 @@ pub fn expand_trace_macros(cx: &mut ExtCtxt, tt: &[ast::TokenTree]) -> Box { match tt { - [ast::TTTok(_, ref tok)] if is_keyword(keywords::True, tok) => { + [ast::TTToken(_, ref tok)] if is_keyword(keywords::True, tok) => { cx.set_trace_macros(true); } - [ast::TTTok(_, ref tok)] if is_keyword(keywords::False, tok) => { + [ast::TTToken(_, ref tok)] if is_keyword(keywords::False, tok) => { cx.set_trace_macros(false); } _ => cx.span_err(sp, "trace_macros! accepts only `true` or `false`"), diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index fbfe10d004e06..4a3828a8043fb 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use ast::{Ident, Matcher_, Matcher, MatchTok, MatchNonterminal, MatchSeq, TTDelim}; +use ast::{Ident, Matcher_, Matcher, MatchTok, MatchNonterminal, MatchSeq, TTDelimited}; use ast; use codemap::{Span, Spanned, DUMMY_SP}; use ext::base::{ExtCtxt, MacResult, MacroDef}; @@ -172,7 +172,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, MatchedNonterminal(NtTT(ref tt)) => { match **tt { // ignore delimiters - TTDelim(_, _, ref tts, _) => (**tts).clone(), + TTDelimited(_, _, ref tts, _) => (**tts).clone(), _ => cx.span_fatal(sp, "macro rhs must be delimited"), } }, diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 472b24be81b93..e705c4d8b33c7 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -9,7 +9,7 @@ // except according to those terms. use ast; -use ast::{TokenTree, TTDelim, TTTok, TTSeq, TTNonterminal, Ident}; +use ast::{TokenTree, TTDelimited, TTToken, TTSequence, TTNonterminal, Ident}; use codemap::{Span, DUMMY_SP}; use diagnostic::SpanHandler; use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal}; @@ -45,7 +45,7 @@ pub struct TtReader<'a> { } /// This can do Macro-By-Example transcription. On the other hand, if -/// `src` contains no `TTSeq`s and `TTNonterminal`s, `interp` can (and +/// `src` contains no `TTSequence`s and `TTNonterminal`s, `interp` can (and /// should) be none. pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler, interp: Option>>, @@ -130,12 +130,12 @@ fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize { match *t { // The opening and closing delimiters are both tokens, so they are // treated as `LisUnconstrained`. - TTDelim(_, _, ref tts, _) | TTSeq(_, ref tts, _, _) => { + TTDelimited(_, _, ref tts, _) | TTSequence(_, ref tts, _, _) => { tts.iter().fold(LisUnconstrained, |size, tt| { size + lockstep_iter_size(tt, r) }) }, - TTTok(..) => LisUnconstrained, + TTToken(..) => LisUnconstrained, TTNonterminal(_, name) => match *lookup_cur_matched(r, name) { MatchedNonterminal(_) => LisUnconstrained, MatchedSeq(ref ads, _) => LisConstraint(ads.len(), name) @@ -194,15 +194,15 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { } } } - loop { /* because it's easiest, this handles `TTDelim` not starting - with a `TTTok`, even though it won't happen */ + loop { /* because it's easiest, this handles `TTDelimited` not starting + with a `TTToken`, even though it won't happen */ let t = { let frame = r.stack.last().unwrap(); // FIXME(pcwalton): Bad copy. (*frame.forest)[frame.idx].clone() }; match t { - TTDelim(_, open, delimed_tts, close) => { + TTDelimited(_, open, delimed_tts, close) => { let mut tts = vec![]; tts.push(open.to_tt()); tts.extend(delimed_tts.iter().map(|x| (*x).clone())); @@ -216,15 +216,15 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { }); // if this could be 0-length, we'd need to potentially recur here } - TTTok(sp, tok) => { + TTToken(sp, tok) => { r.cur_span = sp; r.cur_tok = tok; r.stack.last_mut().unwrap().idx += 1; return ret_val; } - TTSeq(sp, tts, sep, zerok) => { + TTSequence(sp, tts, sep, zerok) => { // FIXME(pcwalton): Bad copy. - match lockstep_iter_size(&TTSeq(sp, tts.clone(), sep.clone(), zerok), r) { + match lockstep_iter_size(&TTSequence(sp, tts.clone(), sep.clone(), zerok), r) { LisUnconstrained => { r.sp_diag.span_fatal( sp.clone(), /* blame macro writer */ diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index ddb2ab49f8b52..9cffce74a095a 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -569,24 +569,24 @@ pub fn noop_fold_arg(Arg {id, pat, ty}: Arg, fld: &mut T) -> Arg { pub fn noop_fold_tt(tt: &TokenTree, fld: &mut T) -> TokenTree { match *tt { - TTTok(span, ref tok) => - TTTok(span, fld.fold_token(tok.clone())), - TTDelim(span, ref open, ref tts, ref close) => - TTDelim(span, - Delimiter { - span: open.span, - token: fld.fold_token(open.token.clone()) - }, - Rc::new(fld.fold_tts(tts.as_slice())), - Delimiter { - span: close.span, - token: fld.fold_token(close.token.clone()) - }), - TTSeq(span, ref pattern, ref sep, is_optional) => - TTSeq(span, - Rc::new(fld.fold_tts(pattern.as_slice())), - sep.clone().map(|tok| fld.fold_token(tok)), - is_optional), + TTToken(span, ref tok) => + TTToken(span, fld.fold_token(tok.clone())), + TTDelimited(span, ref open, ref tts, ref close) => + TTDelimited(span, + Delimiter { + span: open.span, + token: fld.fold_token(open.token.clone()) + }, + Rc::new(fld.fold_tts(tts.as_slice())), + Delimiter { + span: close.span, + token: fld.fold_token(close.token.clone()) + }), + TTSequence(span, ref pattern, ref sep, is_optional) => + TTSequence(span, + Rc::new(fld.fold_tts(pattern.as_slice())), + sep.clone().map(|tok| fld.fold_token(tok)), + is_optional), TTNonterminal(sp,ref ident) => TTNonterminal(sp,fld.fold_ident(*ident)) } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 1c99b608f7aa3..a2e4028232100 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -793,29 +793,29 @@ mod test { let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_string()); let tts: &[ast::TokenTree] = tts.as_slice(); match tts { - [ast::TTTok(_, _), - ast::TTTok(_, token::NOT), - ast::TTTok(_, _), - ast::TTDelim(_, ast::TTTok(_, token::LPAREN), + [ast::TTToken(_, _), + ast::TTToken(_, token::NOT), + ast::TTToken(_, _), + ast::TTDelimited(_, ast::TTToken(_, token::LPAREN), ref delim_elts, - ast::TTTok(_, token::RPAREN))] => { + ast::TTToken(_, token::RPAREN))] => { let delim_elts: &[ast::TokenTree] = delim_elts.as_slice(); match delim_elts { - [ast::TTDelim(_, ast::TTTok(_, token::LPAREN), + [ast::TTDelimited(_, ast::TTToken(_, token::LPAREN), ref first_set, - ast::TTTok(_, token::RPAREN)), - ast::TTTok(_, token::FAT_ARROW), - ast::TTDelim(_, ast::TTTok(_, token::LPAREN), + ast::TTToken(_, token::RPAREN)), + ast::TTToken(_, token::FAT_ARROW), + ast::TTDelimited(_, ast::TTToken(_, token::LPAREN), ref second_set, - ast::TTTok(_, token::RPAREN))] => { + ast::TTToken(_, token::RPAREN))] => { let first_set: &[ast::TokenTree] = first_set.as_slice(); match first_set { - [ast::TTTok(_, token::DOLLAR), ast::TTTok(_, _)] => { + [ast::TTToken(_, token::DOLLAR), ast::TTToken(_, _)] => { let second_set: &[ast::TokenTree] = second_set.as_slice(); match second_set { - [ast::TTTok(_, token::DOLLAR), ast::TTTok(_, _)] => { + [ast::TTToken(_, token::DOLLAR), ast::TTToken(_, _)] => { assert_eq!("correct","correct") } _ => assert_eq!("wrong 4","correct") @@ -845,7 +845,7 @@ mod test { assert_eq!(json::encode(&tts), "[\ {\ - \"variant\":\"TTTok\",\ + \"variant\":\"TTToken\",\ \"fields\":[\ null,\ {\ @@ -858,7 +858,7 @@ mod test { ]\ },\ {\ - \"variant\":\"TTTok\",\ + \"variant\":\"TTToken\",\ \"fields\":[\ null,\ {\ @@ -871,18 +871,18 @@ mod test { ]\ },\ {\ - \"variant\":\"TTDelim\",\ + \"variant\":\"TTDelimited\",\ \"fields\":[\ [\ {\ - \"variant\":\"TTTok\",\ + \"variant\":\"TTToken\",\ \"fields\":[\ null,\ \"LPAREN\"\ ]\ },\ {\ - \"variant\":\"TTTok\",\ + \"variant\":\"TTToken\",\ \"fields\":[\ null,\ {\ @@ -895,14 +895,14 @@ mod test { ]\ },\ {\ - \"variant\":\"TTTok\",\ + \"variant\":\"TTToken\",\ \"fields\":[\ null,\ \"COLON\"\ ]\ },\ {\ - \"variant\":\"TTTok\",\ + \"variant\":\"TTToken\",\ \"fields\":[\ null,\ {\ @@ -915,7 +915,7 @@ mod test { ]\ },\ {\ - \"variant\":\"TTTok\",\ + \"variant\":\"TTToken\",\ \"fields\":[\ null,\ \"RPAREN\"\ @@ -925,18 +925,18 @@ mod test { ]\ },\ {\ - \"variant\":\"TTDelim\",\ + \"variant\":\"TTDelimited\",\ \"fields\":[\ [\ {\ - \"variant\":\"TTTok\",\ + \"variant\":\"TTToken\",\ \"fields\":[\ null,\ \"LBRACE\"\ ]\ },\ {\ - \"variant\":\"TTTok\",\ + \"variant\":\"TTToken\",\ \"fields\":[\ null,\ {\ @@ -949,14 +949,14 @@ mod test { ]\ },\ {\ - \"variant\":\"TTTok\",\ + \"variant\":\"TTToken\",\ \"fields\":[\ null,\ \"SEMI\"\ ]\ },\ {\ - \"variant\":\"TTTok\",\ + \"variant\":\"TTToken\",\ \"fields\":[\ null,\ \"RBRACE\"\ diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 005ed2e7ed374..1ed7baa13b42d 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -48,7 +48,7 @@ use ast::{StmtExpr, StmtSemi, StmtMac, StructDef, StructField}; use ast::{StructVariantKind, BiSub}; use ast::StrStyle; use ast::{SelfExplicit, SelfRegion, SelfStatic, SelfValue}; -use ast::{Delimiter, TokenTree, TraitItem, TraitRef, TTDelim, TTSeq, TTTok}; +use ast::{Delimiter, TokenTree, TraitItem, TraitRef, TTDelimited, TTSequence, TTToken}; use ast::{TTNonterminal, TupleVariantKind, Ty, Ty_, TyBot}; use ast::{TypeField, TyFixedLengthVec, TyClosure, TyProc, TyBareFn}; use ast::{TyTypeof, TyInfer, TypeMethod}; @@ -2526,7 +2526,7 @@ impl<'a> Parser<'a> { /// parse a single token tree from the input. pub fn parse_token_tree(&mut self) -> TokenTree { // FIXME #6994: currently, this is too eager. It - // parses token trees but also identifies TTSeq's + // parses token trees but also identifies TTSequence's // and TTNonterminal's; it's too early to know yet // whether something will be a nonterminal or a seq // yet. @@ -2568,13 +2568,13 @@ impl<'a> Parser<'a> { let seq = match seq { Spanned { node, .. } => node, }; - TTSeq(mk_sp(sp.lo, p.span.hi), Rc::new(seq), s, z) + TTSequence(mk_sp(sp.lo, p.span.hi), Rc::new(seq), s, z) } else { TTNonterminal(sp, p.parse_ident()) } } _ => { - TTTok(p.span, p.bump_and_get()) + TTToken(p.span, p.bump_and_get()) } } } @@ -2615,7 +2615,7 @@ impl<'a> Parser<'a> { // Expand to cover the entire delimited token tree let span = Span { hi: self.span.hi, ..pre_span }; - TTDelim(span, open, Rc::new(tts), close) + TTDelimited(span, open, Rc::new(tts), close) } _ => parse_non_delim_tt_tok(self) } diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 4f4b153d3a964..9a102d229718f 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -1020,14 +1020,14 @@ impl<'a> State<'a> { /// expression arguments as expressions). It can be done! I think. pub fn print_tt(&mut self, tt: &ast::TokenTree) -> IoResult<()> { match *tt { - ast::TTDelim(_, ref open, ref tts, ref close) => { + ast::TTDelimited(_, ref open, ref tts, ref close) => { try!(word(&mut self.s, parse::token::to_string(&open.token).as_slice())); try!(space(&mut self.s)); try!(self.print_tts(tts.as_slice())); try!(space(&mut self.s)); word(&mut self.s, parse::token::to_string(&close.token).as_slice()) }, - ast::TTTok(_, ref tk) => { + ast::TTToken(_, ref tk) => { try!(word(&mut self.s, parse::token::to_string(tk).as_slice())); match *tk { parse::token::DOC_COMMENT(..) => { @@ -1036,7 +1036,7 @@ impl<'a> State<'a> { _ => Ok(()) } } - ast::TTSeq(_, ref tts, ref sep, zerok) => { + ast::TTSequence(_, ref tts, ref sep, zerok) => { try!(word(&mut self.s, "$(")); for tt_elt in (*tts).iter() { try!(self.print_tt(tt_elt)); diff --git a/src/test/auxiliary/roman_numerals.rs b/src/test/auxiliary/roman_numerals.rs index 43842fae70f80..0d5abb8fb5dd9 100644 --- a/src/test/auxiliary/roman_numerals.rs +++ b/src/test/auxiliary/roman_numerals.rs @@ -18,7 +18,7 @@ extern crate rustc; use syntax::codemap::Span; use syntax::parse::token::{IDENT, get_ident}; -use syntax::ast::{TokenTree, TTTok}; +use syntax::ast::{TokenTree, TTToken}; use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacExpr}; use syntax::ext::build::AstBuilder; // trait for expr_uint use rustc::plugin::Registry; @@ -39,7 +39,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree]) ("I", 1)]; let text = match args { - [TTTok(_, IDENT(s, _))] => get_ident(s).to_string(), + [TTToken(_, IDENT(s, _))] => get_ident(s).to_string(), _ => { cx.span_err(sp, "argument should be a single identifier"); return DummyResult::any(sp); From 6a50b4d018b0e44b9e12560030ca7fb240107a68 Mon Sep 17 00:00:00 2001 From: Brendan Zabarauskas Date: Thu, 23 Oct 2014 01:42:47 +1100 Subject: [PATCH 23/47] Prevent some vector reallocations --- src/libsyntax/ext/quote.rs | 9 ++++----- src/libsyntax/ext/tt/transcribe.rs | 12 ++++++------ 2 files changed, 10 insertions(+), 11 deletions(-) diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index 93bd66d6eeba6..baba38d8cbb32 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -652,11 +652,10 @@ fn mk_tt(cx: &ExtCtxt, _: Span, tt: &ast::TokenTree) -> Vec> { vec!(cx.stmt_expr(e_push)) }, ast::TTDelimited(sp, ref open, ref tts, ref close) => { - let mut stmts = vec![]; - stmts.extend(mk_tt(cx, sp, &open.to_tt()).into_iter()); - stmts.extend(tts.iter().flat_map(|tt| mk_tt(cx, sp, tt).into_iter())); - stmts.extend(mk_tt(cx, sp, &close.to_tt()).into_iter()); - stmts + mk_tt(cx, sp, &open.to_tt()).into_iter() + .chain(tts.iter().flat_map(|tt| mk_tt(cx, sp, tt).into_iter())) + .chain(mk_tt(cx, sp, &close.to_tt()).into_iter()) + .collect() }, ast::TTSequence(..) => fail!("TTSequence in quote!"), ast::TTNonterminal(sp, ident) => { diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index e705c4d8b33c7..c0b66851dfe3f 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -202,14 +202,14 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { (*frame.forest)[frame.idx].clone() }; match t { - TTDelimited(_, open, delimed_tts, close) => { - let mut tts = vec![]; - tts.push(open.to_tt()); - tts.extend(delimed_tts.iter().map(|x| (*x).clone())); - tts.push(close.to_tt()); + TTDelimited(_, open, tts, close) => { + let mut forest = Vec::with_capacity(1 + tts.len() + 1); + forest.push(open.to_tt()); + forest.extend(tts.iter().map(|x| (*x).clone())); + forest.push(close.to_tt()); r.stack.push(TtFrame { - forest: Rc::new(tts), + forest: Rc::new(forest), idx: 0, dotdotdoted: false, sep: None From dfb4163f8380e9a1aaf64a7474de30634bca4034 Mon Sep 17 00:00:00 2001 From: Brendan Zabarauskas Date: Thu, 23 Oct 2014 04:39:58 +1100 Subject: [PATCH 24/47] Use standard capitalisation for TokenTree variants --- src/doc/guide-plugin.md | 4 +-- src/libsyntax/ast.rs | 26 +++++++-------- src/libsyntax/diagnostics/plugin.rs | 12 +++---- src/libsyntax/ext/base.rs | 4 +-- src/libsyntax/ext/concat_idents.rs | 4 +-- src/libsyntax/ext/quote.rs | 14 ++++---- src/libsyntax/ext/trace_macros.rs | 4 +-- src/libsyntax/ext/tt/macro_rules.rs | 4 +-- src/libsyntax/ext/tt/transcribe.rs | 24 ++++++------- src/libsyntax/fold.rs | 16 ++++----- src/libsyntax/parse/mod.rs | 50 ++++++++++++++-------------- src/libsyntax/parse/parser.rs | 16 ++++----- src/libsyntax/print/pprust.rs | 8 ++--- src/test/auxiliary/roman_numerals.rs | 4 +-- 14 files changed, 95 insertions(+), 95 deletions(-) diff --git a/src/doc/guide-plugin.md b/src/doc/guide-plugin.md index 9bf1d29569ca4..83a5697f75a69 100644 --- a/src/doc/guide-plugin.md +++ b/src/doc/guide-plugin.md @@ -56,7 +56,7 @@ extern crate rustc; use syntax::codemap::Span; use syntax::parse::token::{IDENT, get_ident}; -use syntax::ast::{TokenTree, TTToken}; +use syntax::ast::{TokenTree, TtToken}; use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacExpr}; use syntax::ext::build::AstBuilder; // trait for expr_uint use rustc::plugin::Registry; @@ -71,7 +71,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree]) ("I", 1)]; let text = match args { - [TTToken(_, IDENT(s, _))] => get_ident(s).to_string(), + [TtToken(_, IDENT(s, _))] => get_ident(s).to_string(), _ => { cx.span_err(sp, "argument should be a single identifier"); return DummyResult::any(sp); diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 36373638099d2..f87c7cf021547 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -25,7 +25,7 @@ use std::rc::Rc; use serialize::{Encodable, Decodable, Encoder, Decoder}; #[cfg(stage0)] -pub use self::TTToken as TTTok; +pub use self::TtToken as TTTok; // FIXME #6993: in librustc, uses of "ident" should be replaced // by just "Name". @@ -603,9 +603,9 @@ pub struct Delimiter { } impl Delimiter { - /// Convert the delimiter to a `TTToken` + /// Convert the delimiter to a `TtToken` pub fn to_tt(&self) -> TokenTree { - TTToken(self.span, self.token.clone()) + TtToken(self.span, self.token.clone()) } } @@ -617,9 +617,9 @@ impl Delimiter { /// If the syntax extension is an MBE macro, it will attempt to match its /// LHS "matchers" against the provided token tree, and if it finds a /// match, will transcribe the RHS token tree, splicing in any captured -/// `macro_parser::matched_nonterminals` into the `TTNonterminal`s it finds. +/// `macro_parser::matched_nonterminals` into the `TtNonterminal`s it finds. /// -/// The RHS of an MBE macro is the only place a `TTNonterminal` or `TTSequence` +/// The RHS of an MBE macro is the only place a `TtNonterminal` or `TtSequence` /// makes any real sense. You could write them elsewhere but nothing /// else knows what to do with them, so you'll probably get a syntax /// error. @@ -627,10 +627,10 @@ impl Delimiter { #[doc="For macro invocations; parsing is delegated to the macro"] pub enum TokenTree { /// A single token - TTToken(Span, ::parse::token::Token), + TtToken(Span, ::parse::token::Token), /// A delimited sequence of token trees // FIXME(eddyb) #6308 Use Rc<[TokenTree]> after DST. - TTDelimited(Span, Delimiter, Rc>, Delimiter), + TtDelimited(Span, Delimiter, Rc>, Delimiter), // These only make sense for right-hand-sides of MBE macros: @@ -638,20 +638,20 @@ pub enum TokenTree { /// an optional separator, and a boolean where true indicates /// zero or more (..), and false indicates one or more (+). // FIXME(eddyb) #6308 Use Rc<[TokenTree]> after DST. - TTSequence(Span, Rc>, Option<::parse::token::Token>, bool), + TtSequence(Span, Rc>, Option<::parse::token::Token>, bool), /// A syntactic variable that will be filled in by macro expansion. - TTNonterminal(Span, Ident) + TtNonterminal(Span, Ident) } impl TokenTree { /// Returns the `Span` corresponding to this token tree. pub fn get_span(&self) -> Span { match *self { - TTToken(span, _) => span, - TTDelimited(span, _, _, _) => span, - TTSequence(span, _, _, _) => span, - TTNonterminal(span, _) => span, + TtToken(span, _) => span, + TtDelimited(span, _, _, _) => span, + TtSequence(span, _, _, _) => span, + TtNonterminal(span, _) => span, } } } diff --git a/src/libsyntax/diagnostics/plugin.rs b/src/libsyntax/diagnostics/plugin.rs index 8ea08c58d065c..b8795ad5be80f 100644 --- a/src/libsyntax/diagnostics/plugin.rs +++ b/src/libsyntax/diagnostics/plugin.rs @@ -50,7 +50,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt, token_tree: &[TokenTree]) -> Box { let code = match token_tree { - [ast::TTToken(_, token::IDENT(code, _))] => code, + [ast::TtToken(_, token::IDENT(code, _))] => code, _ => unreachable!() }; with_registered_diagnostics(|diagnostics| { @@ -82,12 +82,12 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt, token_tree: &[TokenTree]) -> Box { let (code, description) = match token_tree { - [ast::TTToken(_, token::IDENT(ref code, _))] => { + [ast::TtToken(_, token::IDENT(ref code, _))] => { (code, None) }, - [ast::TTToken(_, token::IDENT(ref code, _)), - ast::TTToken(_, token::COMMA), - ast::TTToken(_, token::LIT_STR_RAW(description, _))] => { + [ast::TtToken(_, token::IDENT(ref code, _)), + ast::TtToken(_, token::COMMA), + ast::TtToken(_, token::LIT_STR_RAW(description, _))] => { (code, Some(description)) } _ => unreachable!() @@ -110,7 +110,7 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt, token_tree: &[TokenTree]) -> Box { let name = match token_tree { - [ast::TTToken(_, token::IDENT(ref name, _))] => name, + [ast::TtToken(_, token::IDENT(ref name, _))] => name, _ => unreachable!() }; diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index b5cc2d95890bb..64c8068607aa0 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -684,8 +684,8 @@ pub fn get_single_str_from_tts(cx: &ExtCtxt, cx.span_err(sp, format!("{} takes 1 argument.", name).as_slice()); } else { match tts[0] { - ast::TTToken(_, token::LIT_STR(ident)) => return Some(parse::str_lit(ident.as_str())), - ast::TTToken(_, token::LIT_STR_RAW(ident, _)) => { + ast::TtToken(_, token::LIT_STR(ident)) => return Some(parse::str_lit(ident.as_str())), + ast::TtToken(_, token::LIT_STR_RAW(ident, _)) => { return Some(parse::raw_str_lit(ident.as_str())) } _ => { diff --git a/src/libsyntax/ext/concat_idents.rs b/src/libsyntax/ext/concat_idents.rs index e6befdd2aac92..e12f9ee133a32 100644 --- a/src/libsyntax/ext/concat_idents.rs +++ b/src/libsyntax/ext/concat_idents.rs @@ -23,7 +23,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree] for (i, e) in tts.iter().enumerate() { if i & 1 == 1 { match *e { - ast::TTToken(_, token::COMMA) => (), + ast::TtToken(_, token::COMMA) => (), _ => { cx.span_err(sp, "concat_idents! expecting comma."); return DummyResult::expr(sp); @@ -31,7 +31,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree] } } else { match *e { - ast::TTToken(_, token::IDENT(ident,_)) => { + ast::TtToken(_, token::IDENT(ident,_)) => { res_str.push_str(token::get_ident(ident).get()) } _ => { diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index baba38d8cbb32..5c4290d217bfd 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -23,7 +23,7 @@ use ptr::P; * * This is registered as a set of expression syntax extension called quote! * that lifts its argument token-tree to an AST representing the -* construction of the same token tree, with ast::TTNonterminal nodes +* construction of the same token tree, with ast::TtNonterminal nodes * interpreted as antiquotes (splices). * */ @@ -639,10 +639,10 @@ fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P { fn mk_tt(cx: &ExtCtxt, _: Span, tt: &ast::TokenTree) -> Vec> { match *tt { - ast::TTToken(sp, ref tok) => { + ast::TtToken(sp, ref tok) => { let e_sp = cx.expr_ident(sp, id_ext("_sp")); let e_tok = cx.expr_call(sp, - mk_ast_path(cx, sp, "TTToken"), + mk_ast_path(cx, sp, "TtToken"), vec!(e_sp, mk_token(cx, sp, tok))); let e_push = cx.expr_method_call(sp, @@ -651,14 +651,14 @@ fn mk_tt(cx: &ExtCtxt, _: Span, tt: &ast::TokenTree) -> Vec> { vec!(e_tok)); vec!(cx.stmt_expr(e_push)) }, - ast::TTDelimited(sp, ref open, ref tts, ref close) => { + ast::TtDelimited(sp, ref open, ref tts, ref close) => { mk_tt(cx, sp, &open.to_tt()).into_iter() .chain(tts.iter().flat_map(|tt| mk_tt(cx, sp, tt).into_iter())) .chain(mk_tt(cx, sp, &close.to_tt()).into_iter()) .collect() }, - ast::TTSequence(..) => fail!("TTSequence in quote!"), - ast::TTNonterminal(sp, ident) => { + ast::TtSequence(..) => fail!("TtSequence in quote!"), + ast::TtNonterminal(sp, ident) => { // tt.extend($ident.to_tokens(ext_cx).into_iter()) let e_to_toks = @@ -692,7 +692,7 @@ fn mk_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree]) fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> (P, P) { // NB: It appears that the main parser loses its mind if we consider - // $foo as a TTNonterminal during the main parse, so we have to re-parse + // $foo as a TtNonterminal during the main parse, so we have to re-parse // under quote_depth > 0. This is silly and should go away; the _guess_ is // it has to do with transition away from supporting old-style macros, so // try removing it when enough of them are gone. diff --git a/src/libsyntax/ext/trace_macros.rs b/src/libsyntax/ext/trace_macros.rs index 4c3846731f432..abf798ddacb3a 100644 --- a/src/libsyntax/ext/trace_macros.rs +++ b/src/libsyntax/ext/trace_macros.rs @@ -20,10 +20,10 @@ pub fn expand_trace_macros(cx: &mut ExtCtxt, tt: &[ast::TokenTree]) -> Box { match tt { - [ast::TTToken(_, ref tok)] if is_keyword(keywords::True, tok) => { + [ast::TtToken(_, ref tok)] if is_keyword(keywords::True, tok) => { cx.set_trace_macros(true); } - [ast::TTToken(_, ref tok)] if is_keyword(keywords::False, tok) => { + [ast::TtToken(_, ref tok)] if is_keyword(keywords::False, tok) => { cx.set_trace_macros(false); } _ => cx.span_err(sp, "trace_macros! accepts only `true` or `false`"), diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 4a3828a8043fb..75ad2e0fde884 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use ast::{Ident, Matcher_, Matcher, MatchTok, MatchNonterminal, MatchSeq, TTDelimited}; +use ast::{Ident, Matcher_, Matcher, MatchTok, MatchNonterminal, MatchSeq, TtDelimited}; use ast; use codemap::{Span, Spanned, DUMMY_SP}; use ext::base::{ExtCtxt, MacResult, MacroDef}; @@ -172,7 +172,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, MatchedNonterminal(NtTT(ref tt)) => { match **tt { // ignore delimiters - TTDelimited(_, _, ref tts, _) => (**tts).clone(), + TtDelimited(_, _, ref tts, _) => (**tts).clone(), _ => cx.span_fatal(sp, "macro rhs must be delimited"), } }, diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index c0b66851dfe3f..59b87afe0ee08 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -9,7 +9,7 @@ // except according to those terms. use ast; -use ast::{TokenTree, TTDelimited, TTToken, TTSequence, TTNonterminal, Ident}; +use ast::{TokenTree, TtDelimited, TtToken, TtSequence, TtNonterminal, Ident}; use codemap::{Span, DUMMY_SP}; use diagnostic::SpanHandler; use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal}; @@ -45,7 +45,7 @@ pub struct TtReader<'a> { } /// This can do Macro-By-Example transcription. On the other hand, if -/// `src` contains no `TTSequence`s and `TTNonterminal`s, `interp` can (and +/// `src` contains no `TtSequence`s and `TtNonterminal`s, `interp` can (and /// should) be none. pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler, interp: Option>>, @@ -130,13 +130,13 @@ fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize { match *t { // The opening and closing delimiters are both tokens, so they are // treated as `LisUnconstrained`. - TTDelimited(_, _, ref tts, _) | TTSequence(_, ref tts, _, _) => { + TtDelimited(_, _, ref tts, _) | TtSequence(_, ref tts, _, _) => { tts.iter().fold(LisUnconstrained, |size, tt| { size + lockstep_iter_size(tt, r) }) }, - TTToken(..) => LisUnconstrained, - TTNonterminal(_, name) => match *lookup_cur_matched(r, name) { + TtToken(..) => LisUnconstrained, + TtNonterminal(_, name) => match *lookup_cur_matched(r, name) { MatchedNonterminal(_) => LisUnconstrained, MatchedSeq(ref ads, _) => LisConstraint(ads.len(), name) }, @@ -194,15 +194,15 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { } } } - loop { /* because it's easiest, this handles `TTDelimited` not starting - with a `TTToken`, even though it won't happen */ + loop { /* because it's easiest, this handles `TtDelimited` not starting + with a `TtToken`, even though it won't happen */ let t = { let frame = r.stack.last().unwrap(); // FIXME(pcwalton): Bad copy. (*frame.forest)[frame.idx].clone() }; match t { - TTDelimited(_, open, tts, close) => { + TtDelimited(_, open, tts, close) => { let mut forest = Vec::with_capacity(1 + tts.len() + 1); forest.push(open.to_tt()); forest.extend(tts.iter().map(|x| (*x).clone())); @@ -216,15 +216,15 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { }); // if this could be 0-length, we'd need to potentially recur here } - TTToken(sp, tok) => { + TtToken(sp, tok) => { r.cur_span = sp; r.cur_tok = tok; r.stack.last_mut().unwrap().idx += 1; return ret_val; } - TTSequence(sp, tts, sep, zerok) => { + TtSequence(sp, tts, sep, zerok) => { // FIXME(pcwalton): Bad copy. - match lockstep_iter_size(&TTSequence(sp, tts.clone(), sep.clone(), zerok), r) { + match lockstep_iter_size(&TtSequence(sp, tts.clone(), sep.clone(), zerok), r) { LisUnconstrained => { r.sp_diag.span_fatal( sp.clone(), /* blame macro writer */ @@ -259,7 +259,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { } } // FIXME #2887: think about span stuff here - TTNonterminal(sp, ident) => { + TtNonterminal(sp, ident) => { r.stack.last_mut().unwrap().idx += 1; match *lookup_cur_matched(r, ident) { /* sidestep the interpolation tricks for ident because diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index 9cffce74a095a..2dfa69b1f3820 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -569,10 +569,10 @@ pub fn noop_fold_arg(Arg {id, pat, ty}: Arg, fld: &mut T) -> Arg { pub fn noop_fold_tt(tt: &TokenTree, fld: &mut T) -> TokenTree { match *tt { - TTToken(span, ref tok) => - TTToken(span, fld.fold_token(tok.clone())), - TTDelimited(span, ref open, ref tts, ref close) => - TTDelimited(span, + TtToken(span, ref tok) => + TtToken(span, fld.fold_token(tok.clone())), + TtDelimited(span, ref open, ref tts, ref close) => + TtDelimited(span, Delimiter { span: open.span, token: fld.fold_token(open.token.clone()) @@ -582,13 +582,13 @@ pub fn noop_fold_tt(tt: &TokenTree, fld: &mut T) -> TokenTree { span: close.span, token: fld.fold_token(close.token.clone()) }), - TTSequence(span, ref pattern, ref sep, is_optional) => - TTSequence(span, + TtSequence(span, ref pattern, ref sep, is_optional) => + TtSequence(span, Rc::new(fld.fold_tts(pattern.as_slice())), sep.clone().map(|tok| fld.fold_token(tok)), is_optional), - TTNonterminal(sp,ref ident) => - TTNonterminal(sp,fld.fold_ident(*ident)) + TtNonterminal(sp,ref ident) => + TtNonterminal(sp,fld.fold_ident(*ident)) } } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index a2e4028232100..d7438f11a9468 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -793,29 +793,29 @@ mod test { let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_string()); let tts: &[ast::TokenTree] = tts.as_slice(); match tts { - [ast::TTToken(_, _), - ast::TTToken(_, token::NOT), - ast::TTToken(_, _), - ast::TTDelimited(_, ast::TTToken(_, token::LPAREN), + [ast::TtToken(_, _), + ast::TtToken(_, token::NOT), + ast::TtToken(_, _), + ast::TtDelimited(_, ast::TtToken(_, token::LPAREN), ref delim_elts, - ast::TTToken(_, token::RPAREN))] => { + ast::TtToken(_, token::RPAREN))] => { let delim_elts: &[ast::TokenTree] = delim_elts.as_slice(); match delim_elts { - [ast::TTDelimited(_, ast::TTToken(_, token::LPAREN), + [ast::TtDelimited(_, ast::TtToken(_, token::LPAREN), ref first_set, - ast::TTToken(_, token::RPAREN)), - ast::TTToken(_, token::FAT_ARROW), - ast::TTDelimited(_, ast::TTToken(_, token::LPAREN), + ast::TtToken(_, token::RPAREN)), + ast::TtToken(_, token::FAT_ARROW), + ast::TtDelimited(_, ast::TtToken(_, token::LPAREN), ref second_set, - ast::TTToken(_, token::RPAREN))] => { + ast::TtToken(_, token::RPAREN))] => { let first_set: &[ast::TokenTree] = first_set.as_slice(); match first_set { - [ast::TTToken(_, token::DOLLAR), ast::TTToken(_, _)] => { + [ast::TtToken(_, token::DOLLAR), ast::TtToken(_, _)] => { let second_set: &[ast::TokenTree] = second_set.as_slice(); match second_set { - [ast::TTToken(_, token::DOLLAR), ast::TTToken(_, _)] => { + [ast::TtToken(_, token::DOLLAR), ast::TtToken(_, _)] => { assert_eq!("correct","correct") } _ => assert_eq!("wrong 4","correct") @@ -845,7 +845,7 @@ mod test { assert_eq!(json::encode(&tts), "[\ {\ - \"variant\":\"TTToken\",\ + \"variant\":\"TtToken\",\ \"fields\":[\ null,\ {\ @@ -858,7 +858,7 @@ mod test { ]\ },\ {\ - \"variant\":\"TTToken\",\ + \"variant\":\"TtToken\",\ \"fields\":[\ null,\ {\ @@ -871,18 +871,18 @@ mod test { ]\ },\ {\ - \"variant\":\"TTDelimited\",\ + \"variant\":\"TtDelimited\",\ \"fields\":[\ [\ {\ - \"variant\":\"TTToken\",\ + \"variant\":\"TtToken\",\ \"fields\":[\ null,\ \"LPAREN\"\ ]\ },\ {\ - \"variant\":\"TTToken\",\ + \"variant\":\"TtToken\",\ \"fields\":[\ null,\ {\ @@ -895,14 +895,14 @@ mod test { ]\ },\ {\ - \"variant\":\"TTToken\",\ + \"variant\":\"TtToken\",\ \"fields\":[\ null,\ \"COLON\"\ ]\ },\ {\ - \"variant\":\"TTToken\",\ + \"variant\":\"TtToken\",\ \"fields\":[\ null,\ {\ @@ -915,7 +915,7 @@ mod test { ]\ },\ {\ - \"variant\":\"TTToken\",\ + \"variant\":\"TtToken\",\ \"fields\":[\ null,\ \"RPAREN\"\ @@ -925,18 +925,18 @@ mod test { ]\ },\ {\ - \"variant\":\"TTDelimited\",\ + \"variant\":\"TtDelimited\",\ \"fields\":[\ [\ {\ - \"variant\":\"TTToken\",\ + \"variant\":\"TtToken\",\ \"fields\":[\ null,\ \"LBRACE\"\ ]\ },\ {\ - \"variant\":\"TTToken\",\ + \"variant\":\"TtToken\",\ \"fields\":[\ null,\ {\ @@ -949,14 +949,14 @@ mod test { ]\ },\ {\ - \"variant\":\"TTToken\",\ + \"variant\":\"TtToken\",\ \"fields\":[\ null,\ \"SEMI\"\ ]\ },\ {\ - \"variant\":\"TTToken\",\ + \"variant\":\"TtToken\",\ \"fields\":[\ null,\ \"RBRACE\"\ diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 1ed7baa13b42d..ebca362b9d857 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -48,8 +48,8 @@ use ast::{StmtExpr, StmtSemi, StmtMac, StructDef, StructField}; use ast::{StructVariantKind, BiSub}; use ast::StrStyle; use ast::{SelfExplicit, SelfRegion, SelfStatic, SelfValue}; -use ast::{Delimiter, TokenTree, TraitItem, TraitRef, TTDelimited, TTSequence, TTToken}; -use ast::{TTNonterminal, TupleVariantKind, Ty, Ty_, TyBot}; +use ast::{Delimiter, TokenTree, TraitItem, TraitRef, TtDelimited, TtSequence, TtToken}; +use ast::{TtNonterminal, TupleVariantKind, Ty, Ty_, TyBot}; use ast::{TypeField, TyFixedLengthVec, TyClosure, TyProc, TyBareFn}; use ast::{TyTypeof, TyInfer, TypeMethod}; use ast::{TyNil, TyParam, TyParamBound, TyParen, TyPath, TyPtr, TyQPath}; @@ -2526,8 +2526,8 @@ impl<'a> Parser<'a> { /// parse a single token tree from the input. pub fn parse_token_tree(&mut self) -> TokenTree { // FIXME #6994: currently, this is too eager. It - // parses token trees but also identifies TTSequence's - // and TTNonterminal's; it's too early to know yet + // parses token trees but also identifies TtSequence's + // and TtNonterminal's; it's too early to know yet // whether something will be a nonterminal or a seq // yet. maybe_whole!(deref self, NtTT); @@ -2568,13 +2568,13 @@ impl<'a> Parser<'a> { let seq = match seq { Spanned { node, .. } => node, }; - TTSequence(mk_sp(sp.lo, p.span.hi), Rc::new(seq), s, z) + TtSequence(mk_sp(sp.lo, p.span.hi), Rc::new(seq), s, z) } else { - TTNonterminal(sp, p.parse_ident()) + TtNonterminal(sp, p.parse_ident()) } } _ => { - TTToken(p.span, p.bump_and_get()) + TtToken(p.span, p.bump_and_get()) } } } @@ -2615,7 +2615,7 @@ impl<'a> Parser<'a> { // Expand to cover the entire delimited token tree let span = Span { hi: self.span.hi, ..pre_span }; - TTDelimited(span, open, Rc::new(tts), close) + TtDelimited(span, open, Rc::new(tts), close) } _ => parse_non_delim_tt_tok(self) } diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 9a102d229718f..e3b7a16410886 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -1020,14 +1020,14 @@ impl<'a> State<'a> { /// expression arguments as expressions). It can be done! I think. pub fn print_tt(&mut self, tt: &ast::TokenTree) -> IoResult<()> { match *tt { - ast::TTDelimited(_, ref open, ref tts, ref close) => { + ast::TtDelimited(_, ref open, ref tts, ref close) => { try!(word(&mut self.s, parse::token::to_string(&open.token).as_slice())); try!(space(&mut self.s)); try!(self.print_tts(tts.as_slice())); try!(space(&mut self.s)); word(&mut self.s, parse::token::to_string(&close.token).as_slice()) }, - ast::TTToken(_, ref tk) => { + ast::TtToken(_, ref tk) => { try!(word(&mut self.s, parse::token::to_string(tk).as_slice())); match *tk { parse::token::DOC_COMMENT(..) => { @@ -1036,7 +1036,7 @@ impl<'a> State<'a> { _ => Ok(()) } } - ast::TTSequence(_, ref tts, ref sep, zerok) => { + ast::TtSequence(_, ref tts, ref sep, zerok) => { try!(word(&mut self.s, "$(")); for tt_elt in (*tts).iter() { try!(self.print_tt(tt_elt)); @@ -1051,7 +1051,7 @@ impl<'a> State<'a> { } word(&mut self.s, if zerok { "*" } else { "+" }) } - ast::TTNonterminal(_, name) => { + ast::TtNonterminal(_, name) => { try!(word(&mut self.s, "$")); self.print_ident(name) } diff --git a/src/test/auxiliary/roman_numerals.rs b/src/test/auxiliary/roman_numerals.rs index 0d5abb8fb5dd9..40ed3a35ddf13 100644 --- a/src/test/auxiliary/roman_numerals.rs +++ b/src/test/auxiliary/roman_numerals.rs @@ -18,7 +18,7 @@ extern crate rustc; use syntax::codemap::Span; use syntax::parse::token::{IDENT, get_ident}; -use syntax::ast::{TokenTree, TTToken}; +use syntax::ast::{TokenTree, TtToken}; use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacExpr}; use syntax::ext::build::AstBuilder; // trait for expr_uint use rustc::plugin::Registry; @@ -39,7 +39,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree]) ("I", 1)]; let text = match args { - [TTToken(_, IDENT(s, _))] => get_ident(s).to_string(), + [TtToken(_, IDENT(s, _))] => get_ident(s).to_string(), _ => { cx.span_err(sp, "argument should be a single identifier"); return DummyResult::any(sp); From 34dacb80cea4071233fb74b479e1f8c148a0be03 Mon Sep 17 00:00:00 2001 From: Brendan Zabarauskas Date: Thu, 23 Oct 2014 04:58:48 +1100 Subject: [PATCH 25/47] Reduce the size of the TokenTree --- src/libsyntax/ast.rs | 5 ++--- src/libsyntax/ext/quote.rs | 3 ++- src/libsyntax/ext/tt/macro_rules.rs | 5 ++++- src/libsyntax/ext/tt/transcribe.rs | 13 +++++++++---- src/libsyntax/fold.rs | 25 ++++++++++++++----------- src/libsyntax/parse/parser.rs | 2 +- src/libsyntax/print/pprust.rs | 3 ++- 7 files changed, 34 insertions(+), 22 deletions(-) diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index f87c7cf021547..a6156bfa496b3 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -629,8 +629,7 @@ pub enum TokenTree { /// A single token TtToken(Span, ::parse::token::Token), /// A delimited sequence of token trees - // FIXME(eddyb) #6308 Use Rc<[TokenTree]> after DST. - TtDelimited(Span, Delimiter, Rc>, Delimiter), + TtDelimited(Span, Rc<(Delimiter, Vec, Delimiter)>), // These only make sense for right-hand-sides of MBE macros: @@ -649,7 +648,7 @@ impl TokenTree { pub fn get_span(&self) -> Span { match *self { TtToken(span, _) => span, - TtDelimited(span, _, _, _) => span, + TtDelimited(span, _) => span, TtSequence(span, _, _, _) => span, TtNonterminal(span, _) => span, } diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index 5c4290d217bfd..6f1fd90adfa4b 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -651,7 +651,8 @@ fn mk_tt(cx: &ExtCtxt, _: Span, tt: &ast::TokenTree) -> Vec> { vec!(e_tok)); vec!(cx.stmt_expr(e_push)) }, - ast::TtDelimited(sp, ref open, ref tts, ref close) => { + ast::TtDelimited(sp, ref delimed) => { + let (ref open, ref tts, ref close) = **delimed; mk_tt(cx, sp, &open.to_tt()).into_iter() .chain(tts.iter().flat_map(|tt| mk_tt(cx, sp, tt).into_iter())) .chain(mk_tt(cx, sp, &close.to_tt()).into_iter()) diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 75ad2e0fde884..8b45cf34e8048 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -172,7 +172,10 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, MatchedNonterminal(NtTT(ref tt)) => { match **tt { // ignore delimiters - TtDelimited(_, _, ref tts, _) => (**tts).clone(), + TtDelimited(_, ref delimed) => { + let (_, ref tts, _) = **delimed; + tts.clone() + }, _ => cx.span_fatal(sp, "macro rhs must be delimited"), } }, diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 59b87afe0ee08..fde950e49997c 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -128,9 +128,13 @@ impl Add for LockstepIterSize { fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize { match *t { - // The opening and closing delimiters are both tokens, so they are - // treated as `LisUnconstrained`. - TtDelimited(_, _, ref tts, _) | TtSequence(_, ref tts, _, _) => { + TtDelimited(_, ref delimed) => { + let (_, ref tts, _) = **delimed; + tts.iter().fold(LisUnconstrained, |size, tt| { + size + lockstep_iter_size(tt, r) + }) + }, + TtSequence(_, ref tts, _, _) => { tts.iter().fold(LisUnconstrained, |size, tt| { size + lockstep_iter_size(tt, r) }) @@ -202,7 +206,8 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { (*frame.forest)[frame.idx].clone() }; match t { - TtDelimited(_, open, tts, close) => { + TtDelimited(_, ref delimed) => { + let (ref open, ref tts, ref close) = **delimed; let mut forest = Vec::with_capacity(1 + tts.len() + 1); forest.push(open.to_tt()); forest.extend(tts.iter().map(|x| (*x).clone())); diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index 2dfa69b1f3820..0f9ab5c6b261e 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -571,17 +571,20 @@ pub fn noop_fold_tt(tt: &TokenTree, fld: &mut T) -> TokenTree { match *tt { TtToken(span, ref tok) => TtToken(span, fld.fold_token(tok.clone())), - TtDelimited(span, ref open, ref tts, ref close) => - TtDelimited(span, - Delimiter { - span: open.span, - token: fld.fold_token(open.token.clone()) - }, - Rc::new(fld.fold_tts(tts.as_slice())), - Delimiter { - span: close.span, - token: fld.fold_token(close.token.clone()) - }), + TtDelimited(span, ref delimed) => { + let (ref open, ref tts, ref close) = **delimed; + TtDelimited(span, Rc::new(( + Delimiter { + span: open.span, + token: fld.fold_token(open.token.clone()) + }, + fld.fold_tts(tts.as_slice()), + Delimiter { + span: close.span, + token: fld.fold_token(close.token.clone()) + }, + ))) + }, TtSequence(span, ref pattern, ref sep, is_optional) => TtSequence(span, Rc::new(fld.fold_tts(pattern.as_slice())), diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index ebca362b9d857..f8fa053b7aeab 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -2615,7 +2615,7 @@ impl<'a> Parser<'a> { // Expand to cover the entire delimited token tree let span = Span { hi: self.span.hi, ..pre_span }; - TtDelimited(span, open, Rc::new(tts), close) + TtDelimited(span, Rc::new((open, tts, close))) } _ => parse_non_delim_tt_tok(self) } diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index e3b7a16410886..97c177b696c01 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -1020,7 +1020,8 @@ impl<'a> State<'a> { /// expression arguments as expressions). It can be done! I think. pub fn print_tt(&mut self, tt: &ast::TokenTree) -> IoResult<()> { match *tt { - ast::TtDelimited(_, ref open, ref tts, ref close) => { + ast::TtDelimited(_, ref delimed) => { + let (ref open, ref tts, ref close) = **delimed; try!(word(&mut self.s, parse::token::to_string(&open.token).as_slice())); try!(space(&mut self.s)); try!(self.print_tts(tts.as_slice())); From 94d6eee3357e24913d1331b1fe0bd4e4524bdab6 Mon Sep 17 00:00:00 2001 From: Brendan Zabarauskas Date: Thu, 23 Oct 2014 11:24:20 +1100 Subject: [PATCH 26/47] Add a KleeneOp enum for clarity --- src/libsyntax/ast.rs | 19 +++++++++++------- src/libsyntax/ext/tt/macro_parser.rs | 4 ++-- src/libsyntax/ext/tt/macro_rules.rs | 5 +++-- src/libsyntax/ext/tt/transcribe.rs | 6 +++--- src/libsyntax/parse/parser.rs | 29 +++++++++++++++------------- src/libsyntax/print/pprust.rs | 9 ++++++--- 6 files changed, 42 insertions(+), 30 deletions(-) diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index a6156bfa496b3..580b93eb4c6f3 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -609,6 +609,14 @@ impl Delimiter { } } +/// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star) +/// for token sequences. +#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] +pub enum KleeneOp { + ZeroOrMore, + OneOrMore, +} + /// When the main rust parser encounters a syntax-extension invocation, it /// parses the arguments to the invocation as a token-tree. This is a very /// loose structure, such that all sorts of different AST-fragments can @@ -633,12 +641,9 @@ pub enum TokenTree { // These only make sense for right-hand-sides of MBE macros: - /// A kleene-style repetition sequence with a span, a `TTForest`, - /// an optional separator, and a boolean where true indicates - /// zero or more (..), and false indicates one or more (+). + /// A Kleene-style repetition sequence with an optional separator. // FIXME(eddyb) #6308 Use Rc<[TokenTree]> after DST. - TtSequence(Span, Rc>, Option<::parse::token::Token>, bool), - + TtSequence(Span, Rc>, Option<::parse::token::Token>, KleeneOp), /// A syntactic variable that will be filled in by macro expansion. TtNonterminal(Span, Ident) } @@ -711,9 +716,9 @@ pub type Matcher = Spanned; pub enum Matcher_ { /// Match one token MatchTok(::parse::token::Token), - /// Match repetitions of a sequence: body, separator, zero ok?, + /// Match repetitions of a sequence: body, separator, Kleene operator, /// lo, hi position-in-match-array used: - MatchSeq(Vec , Option<::parse::token::Token>, bool, uint, uint), + MatchSeq(Vec , Option<::parse::token::Token>, KleeneOp, uint, uint), /// Parse a Rust NT: name to bind, name of NT, position in match array: MatchNonterminal(Ident, Ident, uint) } diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index f2081674fb7c8..cea8cab52654d 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -323,9 +323,9 @@ pub fn parse(sess: &ParseSess, } else { match ei.elts[idx].node.clone() { /* need to descend into sequence */ - MatchSeq(ref matchers, ref sep, zero_ok, + MatchSeq(ref matchers, ref sep, kleene_op, match_idx_lo, match_idx_hi) => { - if zero_ok { + if kleene_op == ast::ZeroOrMore { let mut new_ei = ei.clone(); new_ei.idx += 1u; //we specifically matched zero repeats. diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 8b45cf34e8048..3b51fb380b816 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -232,10 +232,11 @@ pub fn add_new_extension<'cx>(cx: &'cx mut ExtCtxt, ms(MatchSeq(vec!( ms(MatchNonterminal(lhs_nm, special_idents::matchers, 0u)), ms(MatchTok(FAT_ARROW)), - ms(MatchNonterminal(rhs_nm, special_idents::tt, 1u))), Some(SEMI), false, 0u, 2u)), + ms(MatchNonterminal(rhs_nm, special_idents::tt, 1u))), Some(SEMI), + ast::OneOrMore, 0u, 2u)), //to phase into semicolon-termination instead of //semicolon-separation - ms(MatchSeq(vec!(ms(MatchTok(SEMI))), None, true, 2u, 2u))); + ms(MatchSeq(vec!(ms(MatchTok(SEMI))), None, ast::ZeroOrMore, 2u, 2u))); // Parse the macro_rules! invocation (`none` is for no interpolations): diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index fde950e49997c..1bb519f66cd55 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -227,9 +227,9 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { r.stack.last_mut().unwrap().idx += 1; return ret_val; } - TtSequence(sp, tts, sep, zerok) => { + TtSequence(sp, tts, sep, kleene_op) => { // FIXME(pcwalton): Bad copy. - match lockstep_iter_size(&TtSequence(sp, tts.clone(), sep.clone(), zerok), r) { + match lockstep_iter_size(&TtSequence(sp, tts.clone(), sep.clone(), kleene_op), r) { LisUnconstrained => { r.sp_diag.span_fatal( sp.clone(), /* blame macro writer */ @@ -243,7 +243,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { } LisConstraint(len, _) => { if len == 0 { - if !zerok { + if kleene_op == ast::OneOrMore { // FIXME #2887 blame invoker r.sp_diag.span_fatal(sp.clone(), "this must repeat at least once"); diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index f8fa053b7aeab..7bf751c2d5ebf 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -2497,27 +2497,30 @@ impl<'a> Parser<'a> { return e; } - /// Parse an optional separator followed by a kleene-style + /// Parse an optional separator followed by a Kleene-style /// repetition token (+ or *). - pub fn parse_sep_and_zerok(&mut self) -> (Option, bool) { - fn parse_zerok(parser: &mut Parser) -> Option { + pub fn parse_sep_and_kleene_op(&mut self) -> (Option, ast::KleeneOp) { + fn parse_kleene_op(parser: &mut Parser) -> Option { match parser.token { - token::BINOP(token::STAR) | token::BINOP(token::PLUS) => { - let zerok = parser.token == token::BINOP(token::STAR); + token::BINOP(token::STAR) => { parser.bump(); - Some(zerok) + Some(ast::ZeroOrMore) + }, + token::BINOP(token::PLUS) => { + parser.bump(); + Some(ast::OneOrMore) }, _ => None } }; - match parse_zerok(self) { - Some(zerok) => return (None, zerok), + match parse_kleene_op(self) { + Some(kleene_op) => return (None, kleene_op), None => {} } let separator = self.bump_and_get(); - match parse_zerok(self) { + match parse_kleene_op(self) { Some(zerok) => (Some(separator), zerok), None => self.fatal("expected `*` or `+`") } @@ -2564,11 +2567,11 @@ impl<'a> Parser<'a> { seq_sep_none(), |p| p.parse_token_tree() ); - let (s, z) = p.parse_sep_and_zerok(); + let (sep, repeat) = p.parse_sep_and_kleene_op(); let seq = match seq { Spanned { node, .. } => node, }; - TtSequence(mk_sp(sp.lo, p.span.hi), Rc::new(seq), s, z) + TtSequence(mk_sp(sp.lo, p.span.hi), Rc::new(seq), sep, repeat) } else { TtNonterminal(sp, p.parse_ident()) } @@ -2679,8 +2682,8 @@ impl<'a> Parser<'a> { if ms.len() == 0u { self.fatal("repetition body must be nonempty"); } - let (sep, zerok) = self.parse_sep_and_zerok(); - MatchSeq(ms, sep, zerok, name_idx_lo, *name_idx) + let (sep, kleene_op) = self.parse_sep_and_kleene_op(); + MatchSeq(ms, sep, kleene_op, name_idx_lo, *name_idx) } else { let bound_to = self.parse_ident(); self.expect(&token::COLON); diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 97c177b696c01..0a77343547bf8 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -1037,20 +1037,23 @@ impl<'a> State<'a> { _ => Ok(()) } } - ast::TtSequence(_, ref tts, ref sep, zerok) => { + ast::TtSequence(_, ref tts, ref separator, kleene_op) => { try!(word(&mut self.s, "$(")); for tt_elt in (*tts).iter() { try!(self.print_tt(tt_elt)); } try!(word(&mut self.s, ")")); - match *sep { + match *separator { Some(ref tk) => { try!(word(&mut self.s, parse::token::to_string(tk).as_slice())); } None => () } - word(&mut self.s, if zerok { "*" } else { "+" }) + match kleene_op { + ast::ZeroOrMore => word(&mut self.s, "*"), + ast::OneOrMore => word(&mut self.s, "+"), + } } ast::TtNonterminal(_, name) => { try!(word(&mut self.s, "$")); From 9acce10fe7f3103dc1e65168e956fe0e53bca117 Mon Sep 17 00:00:00 2001 From: Steven Fackler Date: Sat, 25 Oct 2014 16:43:14 -0700 Subject: [PATCH 27/47] Finish cfg syntax transition --- src/libsyntax/config.rs | 22 +++++----------------- 1 file changed, 5 insertions(+), 17 deletions(-) diff --git a/src/libsyntax/config.rs b/src/libsyntax/config.rs index 8824a937038a8..72c62a173fc30 100644 --- a/src/libsyntax/config.rs +++ b/src/libsyntax/config.rs @@ -250,30 +250,18 @@ fn impl_item_in_cfg(cx: &mut Context, impl_item: &ast::ImplItem) -> bool { // Determine if an item should be translated in the current crate // configuration based on the item's attributes fn in_cfg(diagnostic: &SpanHandler, cfg: &[P], attrs: &[ast::Attribute]) -> bool { - let mut in_cfg = false; - let mut seen_cfg = false; - for attr in attrs.iter() { + attrs.iter().all(|attr| { let mis = match attr.node.value.node { ast::MetaList(_, ref mis) if attr.check_name("cfg") => mis, - _ => continue + _ => return true }; if mis.len() != 1 { diagnostic.span_err(attr.span, "expected 1 cfg-pattern"); - return false; + return true; } - if seen_cfg { - diagnostic.span_err(attr.span, "The semantics of multiple `#[cfg(..)]` attributes on \ - same item are changing from the union of the cfgs to \ - the intersection of the cfgs. Change `#[cfg(a)] \ - #[cfg(b)]` to `#[cfg(any(a, b))]`."); - return false; - } - - seen_cfg = true; - in_cfg |= attr::cfg_matches(diagnostic, cfg, &*mis[0]); - } - in_cfg | !seen_cfg + attr::cfg_matches(diagnostic, cfg, &*mis[0]) + }) } From 6598d33bd0edf22adb24423851bf2761cae0ada0 Mon Sep 17 00:00:00 2001 From: Brendan Zabarauskas Date: Sun, 26 Oct 2014 10:51:41 +1100 Subject: [PATCH 28/47] Update parse::test::string_to_tts_1 test --- src/libsyntax/parse/mod.rs | 213 +++++++++++++++++-------------------- 1 file changed, 100 insertions(+), 113 deletions(-) diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index d7438f11a9468..2965094f23662 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -793,54 +793,47 @@ mod test { let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_string()); let tts: &[ast::TokenTree] = tts.as_slice(); match tts { - [ast::TtToken(_, _), + [ast::TtToken(_, token::IDENT(name_macro_rules, false)), ast::TtToken(_, token::NOT), - ast::TtToken(_, _), - ast::TtDelimited(_, ast::TtToken(_, token::LPAREN), - ref delim_elts, - ast::TtToken(_, token::RPAREN))] => { - let delim_elts: &[ast::TokenTree] = delim_elts.as_slice(); - match delim_elts { - [ast::TtDelimited(_, ast::TtToken(_, token::LPAREN), - ref first_set, - ast::TtToken(_, token::RPAREN)), - ast::TtToken(_, token::FAT_ARROW), - ast::TtDelimited(_, ast::TtToken(_, token::LPAREN), - ref second_set, - ast::TtToken(_, token::RPAREN))] => { - let first_set: &[ast::TokenTree] = - first_set.as_slice(); - match first_set { - [ast::TtToken(_, token::DOLLAR), ast::TtToken(_, _)] => { - let second_set: &[ast::TokenTree] = - second_set.as_slice(); - match second_set { - [ast::TtToken(_, token::DOLLAR), ast::TtToken(_, _)] => { - assert_eq!("correct","correct") - } - _ => assert_eq!("wrong 4","correct") - } - }, - _ => { - error!("failing value 3: {}",first_set); - assert_eq!("wrong 3","correct") - } + ast::TtToken(_, token::IDENT(name_zip, false)), + ast::TtDelimited(_, ref macro_delimed)] + if name_macro_rules.as_str() == "macro_rules" + && name_zip.as_str() == "zip" => { + let (ref macro_open, ref macro_tts, ref macro_close) = **macro_delimed; + match (macro_open, macro_tts.as_slice(), macro_close) { + (&ast::Delimiter { token: token::LPAREN, .. }, + [ast::TtDelimited(_, ref first_delimed), + ast::TtToken(_, token::FAT_ARROW), + ast::TtDelimited(_, ref second_delimed)], + &ast::Delimiter { token: token::RPAREN, .. }) => { + let (ref first_open, ref first_tts, ref first_close) = **first_delimed; + match (first_open, first_tts.as_slice(), first_close) { + (&ast::Delimiter { token: token::LPAREN, .. }, + [ast::TtToken(_, token::DOLLAR), + ast::TtToken(_, token::IDENT(name, false))], + &ast::Delimiter { token: token::RPAREN, .. }) + if name.as_str() == "a" => {}, + _ => fail!("value 3: {}", **first_delimed), + } + let (ref second_open, ref second_tts, ref second_close) = **second_delimed; + match (second_open, second_tts.as_slice(), second_close) { + (&ast::Delimiter { token: token::LPAREN, .. }, + [ast::TtToken(_, token::DOLLAR), + ast::TtToken(_, token::IDENT(name, false))], + &ast::Delimiter { token: token::RPAREN, .. }) + if name.as_str() == "a" => {}, + _ => fail!("value 4: {}", **second_delimed), } }, - _ => { - error!("failing value 2: {}",delim_elts); - assert_eq!("wrong","correct"); - } + _ => fail!("value 2: {}", **macro_delimed), } }, - _ => { - error!("failing value: {}",tts); - assert_eq!("wrong 1","correct"); - }, + _ => fail!("value: {}",tts), } } - #[test] fn string_to_tts_1 () { + #[test] + fn string_to_tts_1 () { let tts = string_to_tts("fn a (b : int) { b; }".to_string()); assert_eq!(json::encode(&tts), "[\ @@ -873,53 +866,50 @@ mod test { {\ \"variant\":\"TtDelimited\",\ \"fields\":[\ + null,\ [\ {\ - \"variant\":\"TtToken\",\ - \"fields\":[\ - null,\ - \"LPAREN\"\ - ]\ - },\ - {\ - \"variant\":\"TtToken\",\ - \"fields\":[\ - null,\ - {\ - \"variant\":\"IDENT\",\ - \"fields\":[\ - \"b\",\ - false\ - ]\ - }\ - ]\ - },\ - {\ - \"variant\":\"TtToken\",\ - \"fields\":[\ - null,\ - \"COLON\"\ - ]\ - },\ - {\ - \"variant\":\"TtToken\",\ - \"fields\":[\ - null,\ - {\ - \"variant\":\"IDENT\",\ - \"fields\":[\ - \"int\",\ - false\ - ]\ - }\ - ]\ + \"span\":null,\ + \"token\":\"LPAREN\"\ },\ + [\ + {\ + \"variant\":\"TtToken\",\ + \"fields\":[\ + null,\ + {\ + \"variant\":\"IDENT\",\ + \"fields\":[\ + \"b\",\ + false\ + ]\ + }\ + ]\ + },\ + {\ + \"variant\":\"TtToken\",\ + \"fields\":[\ + null,\ + \"COLON\"\ + ]\ + },\ + {\ + \"variant\":\"TtToken\",\ + \"fields\":[\ + null,\ + {\ + \"variant\":\"IDENT\",\ + \"fields\":[\ + \"int\",\ + false\ + ]\ + }\ + ]\ + }\ + ],\ {\ - \"variant\":\"TtToken\",\ - \"fields\":[\ - null,\ - \"RPAREN\"\ - ]\ + \"span\":null,\ + \"token\":\"RPAREN\"\ }\ ]\ ]\ @@ -927,40 +917,37 @@ mod test { {\ \"variant\":\"TtDelimited\",\ \"fields\":[\ + null,\ [\ {\ - \"variant\":\"TtToken\",\ - \"fields\":[\ - null,\ - \"LBRACE\"\ - ]\ - },\ - {\ - \"variant\":\"TtToken\",\ - \"fields\":[\ - null,\ - {\ - \"variant\":\"IDENT\",\ - \"fields\":[\ - \"b\",\ - false\ - ]\ - }\ - ]\ - },\ - {\ - \"variant\":\"TtToken\",\ - \"fields\":[\ - null,\ - \"SEMI\"\ - ]\ + \"span\":null,\ + \"token\":\"LBRACE\"\ },\ + [\ + {\ + \"variant\":\"TtToken\",\ + \"fields\":[\ + null,\ + {\ + \"variant\":\"IDENT\",\ + \"fields\":[\ + \"b\",\ + false\ + ]\ + }\ + ]\ + },\ + {\ + \"variant\":\"TtToken\",\ + \"fields\":[\ + null,\ + \"SEMI\"\ + ]\ + }\ + ],\ {\ - \"variant\":\"TtToken\",\ - \"fields\":[\ - null,\ - \"RBRACE\"\ - ]\ + \"span\":null,\ + \"token\":\"RBRACE\"\ }\ ]\ ]\ From f29535d2356fe138239f01899fc47555f3b7cf0f Mon Sep 17 00:00:00 2001 From: Vadim Chugunov Date: Sat, 25 Oct 2014 17:37:41 -0700 Subject: [PATCH 29/47] Fix bug #17982. --- src/libterm/win.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/libterm/win.rs b/src/libterm/win.rs index 0aae85503d07d..d4f06403c1a6e 100644 --- a/src/libterm/win.rs +++ b/src/libterm/win.rs @@ -71,7 +71,8 @@ fn color_to_bits(color: color::Color) -> u16 { } fn bits_to_color(bits: u16) -> color::Color { - let color = match bits & 0x7 { + let bits = bits & 0x7; + let color = match bits { 0 => color::BLACK, 0x1 => color::BLUE, 0x2 => color::GREEN, From bed5a7d92a90f067e933fb7e4f2d656330f88f11 Mon Sep 17 00:00:00 2001 From: Steven Fackler Date: Sat, 25 Oct 2014 18:33:56 -0700 Subject: [PATCH 30/47] Add MemWriter::from_vec --- src/libstd/io/mem.rs | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/src/libstd/io/mem.rs b/src/libstd/io/mem.rs index f86ae05d623ca..dd4a3e0593594 100644 --- a/src/libstd/io/mem.rs +++ b/src/libstd/io/mem.rs @@ -22,7 +22,7 @@ use slice; use slice::AsSlice; use vec::Vec; -static BUF_CAPACITY: uint = 128; +const BUF_CAPACITY: uint = 128; fn combine(seek: SeekStyle, cur: uint, end: uint, offset: i64) -> IoResult { // compute offset as signed and clamp to prevent overflow @@ -71,7 +71,12 @@ impl MemWriter { /// the internal buffer. #[inline] pub fn with_capacity(n: uint) -> MemWriter { - MemWriter { buf: Vec::with_capacity(n) } + MemWriter::from_vec(Vec::with_capacity(n)) + } + /// Create a new `MemWriter` that will append to an existing `Vec`. + #[inline] + pub fn from_vec(buf: Vec) -> MemWriter { + MemWriter { buf: buf } } /// Acquires an immutable reference to the underlying buffer of this From 30403204d695b687cc264c875eae829ae9368937 Mon Sep 17 00:00:00 2001 From: Joseph Crail Date: Sat, 25 Oct 2014 23:10:16 -0400 Subject: [PATCH 31/47] Fix spelling mistakes in comments. --- src/etc/emacs/rust-mode-tests.el | 2 +- src/libcollections/btree/map.rs | 4 ++-- src/libcollections/btree/node.rs | 18 +++++++++--------- src/librand/chacha.rs | 2 +- src/librustc/middle/traits/select.rs | 2 +- src/librustc/middle/trans/expr.rs | 4 ++-- src/librustc/middle/ty.rs | 4 ++-- src/librustc/middle/typeck/check/regionck.rs | 2 +- src/libstd/collections/hashmap/set.rs | 2 +- src/libsyntax/ast.rs | 2 +- src/test/run-pass/dst-coercions.rs | 2 +- src/test/run-pass/realloc-16687.rs | 2 +- src/test/run-pass/vec-dst.rs | 2 +- 13 files changed, 24 insertions(+), 24 deletions(-) diff --git a/src/etc/emacs/rust-mode-tests.el b/src/etc/emacs/rust-mode-tests.el index 1b6794e77f9f4..f255dbf15071b 100644 --- a/src/etc/emacs/rust-mode-tests.el +++ b/src/etc/emacs/rust-mode-tests.el @@ -376,7 +376,7 @@ fn bar( a:int, -> int { } -fn baz( a:int, // shoudl work with a comment here +fn baz( a:int, // should work with a comment here b:char) -> int { } diff --git a/src/libcollections/btree/map.rs b/src/libcollections/btree/map.rs index dbbff61b8dd50..77fb6d4a1203b 100644 --- a/src/libcollections/btree/map.rs +++ b/src/libcollections/btree/map.rs @@ -41,10 +41,10 @@ use ringbuf::RingBuf; /// the BST strategy. /// /// A B-Tree instead makes each node contain B-1 to 2B-1 elements in a contiguous array. By doing -/// this, we reduce the number of allocations by a factor of B, and improve cache effeciency in +/// this, we reduce the number of allocations by a factor of B, and improve cache efficiency in /// searches. However, this does mean that searches will have to do *more* comparisons on average. /// The precise number of comparisons depends on the node search strategy used. For optimal cache -/// effeciency, one could search the nodes linearly. For optimal comparisons, one could search +/// efficiency, one could search the nodes linearly. For optimal comparisons, one could search /// the node using binary search. As a compromise, one could also perform a linear search /// that initially only checks every ith element for some choice of i. /// diff --git a/src/libcollections/btree/node.rs b/src/libcollections/btree/node.rs index e30b29f8767d3..4da362952b67c 100644 --- a/src/libcollections/btree/node.rs +++ b/src/libcollections/btree/node.rs @@ -53,7 +53,7 @@ pub struct Node { // hard. For now, we accept this cost in the name of correctness and simplicity. // // As a compromise, keys and vals could be merged into one Vec<(K, V)>, which would shave - // off 3 words, but possibly hurt our cache effeciency during search, which only cares about + // off 3 words, but possibly hurt our cache efficiency during search, which only cares about // keys. This would also avoid the Zip we use in our iterator implementations. This is // probably worth investigating. // @@ -72,7 +72,7 @@ impl Node { /// `GoDown` will be yielded with the index of the subtree the key must lie in. pub fn search(&self, key: &K) -> SearchResult { // FIXME(Gankro): Tune when to search linear or binary based on B (and maybe K/V). - // For the B configured as of this writing (B = 6), binary search was *singnificantly* + // For the B configured as of this writing (B = 6), binary search was *significantly* // worse for uints. self.search_linear(key) } @@ -375,7 +375,7 @@ impl Node { } } - /// Steal! Stealing is roughly analagous to a binary tree rotation. + /// Steal! Stealing is roughly analogous to a binary tree rotation. /// In this case, we're "rotating" right. unsafe fn steal_to_left(&mut self, underflowed_child_index: uint) { // Take the biggest stuff off left @@ -387,7 +387,7 @@ impl Node { } }; - // Swap the parent's seperating key-value pair with left's + // Swap the parent's separating key-value pair with left's self.unsafe_swap(underflowed_child_index - 1, &mut key, &mut val); // Put them at the start of right @@ -402,7 +402,7 @@ impl Node { } } - /// Steal! Stealing is roughly analagous to a binary tree rotation. + /// Steal! Stealing is roughly analogous to a binary tree rotation. /// In this case, we're "rotating" left. unsafe fn steal_to_right(&mut self, underflowed_child_index: uint) { // Take the smallest stuff off right @@ -414,7 +414,7 @@ impl Node { } }; - // Swap the parent's seperating key-value pair with right's + // Swap the parent's separating key-value pair with right's self.unsafe_swap(underflowed_child_index, &mut key, &mut val); // Put them at the end of left @@ -430,9 +430,9 @@ impl Node { } /// Merge! Left and right will be smooshed into one node, along with the key-value - /// pair that seperated them in their parent. + /// pair that separated them in their parent. unsafe fn merge_children(&mut self, left_index: uint) { - // Permanently remove right's index, and the key-value pair that seperates + // Permanently remove right's index, and the key-value pair that separates // left and right let (key, val, right) = { match (self.keys.remove(left_index), @@ -448,7 +448,7 @@ impl Node { left.absorb(key, val, right); } - /// Take all the values from right, seperated by the given key and value + /// Take all the values from right, separated by the given key and value fn absorb(&mut self, key: K, val: V, right: Node) { // Just as a sanity check, make sure we can fit this guy in debug_assert!(self.len() + right.len() <= self.capacity()) diff --git a/src/librand/chacha.rs b/src/librand/chacha.rs index 83d03bb265e95..97e68bcbb2c6c 100644 --- a/src/librand/chacha.rs +++ b/src/librand/chacha.rs @@ -173,7 +173,7 @@ impl<'a> SeedableRng<&'a [u32]> for ChaChaRng { fn reseed(&mut self, seed: &'a [u32]) { // reset state self.init(&[0u32, ..KEY_WORDS]); - // set key inplace + // set key in place let key = self.state.slice_mut(4, 4+KEY_WORDS); for (k, s) in key.iter_mut().zip(seed.iter()) { *k = *s; diff --git a/src/librustc/middle/traits/select.rs b/src/librustc/middle/traits/select.rs index 23257912b8264..998a9164647fe 100644 --- a/src/librustc/middle/traits/select.rs +++ b/src/librustc/middle/traits/select.rs @@ -211,7 +211,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { // can be applied to particular types. It skips the "confirmation" // step and hence completely ignores output type parameters. // - // The result is "true" if the obliation *may* hold and "false" if + // The result is "true" if the obligation *may* hold and "false" if // we can be sure it does not. pub fn evaluate_obligation_intercrate(&mut self, diff --git a/src/librustc/middle/trans/expr.rs b/src/librustc/middle/trans/expr.rs index 834441d4430b0..cec96b13fbe41 100644 --- a/src/librustc/middle/trans/expr.rs +++ b/src/librustc/middle/trans/expr.rs @@ -2117,7 +2117,7 @@ fn deref_once<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, deref_owned_pointer(bcx, expr, datum, content_ty) } else { // A fat pointer and an opened DST value have the same - // represenation just different types. Since there is no + // representation just different types. Since there is no // temporary for `*e` here (because it is unsized), we cannot // emulate the sized object code path for running drop glue and // free. Instead, we schedule cleanup for `e`, turning it into @@ -2142,7 +2142,7 @@ fn deref_once<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, // owner (or, in the case of *T, by the user). DatumBlock::new(bcx, Datum::new(ptr, content_ty, LvalueExpr)) } else { - // A fat pointer and an opened DST value have the same represenation + // A fat pointer and an opened DST value have the same representation // just different types. DatumBlock::new(bcx, Datum::new(datum.val, ty::mk_open(bcx.tcx(), content_ty), diff --git a/src/librustc/middle/ty.rs b/src/librustc/middle/ty.rs index 8c602548f33f6..1ce567e63293b 100644 --- a/src/librustc/middle/ty.rs +++ b/src/librustc/middle/ty.rs @@ -3605,7 +3605,7 @@ pub fn expr_kind(tcx: &ctxt, expr: &ast::Expr) -> ExprKind { // Special case: A unit like struct's constructor must be called without () at the // end (like `UnitStruct`) which means this is an ExprPath to a DefFn. But in case - // of unit structs this is should not be interpretet as function pointer but as + // of unit structs this is should not be interpreted as function pointer but as // call to the constructor. def::DefFn(_, _, true) => RvalueDpsExpr, @@ -5409,7 +5409,7 @@ impl BorrowKind { MutBorrow => ast::MutMutable, ImmBorrow => ast::MutImmutable, - // We have no type correponding to a unique imm borrow, so + // We have no type corresponding to a unique imm borrow, so // use `&mut`. It gives all the capabilities of an `&uniq` // and hence is a safe "over approximation". UniqueImmBorrow => ast::MutMutable, diff --git a/src/librustc/middle/typeck/check/regionck.rs b/src/librustc/middle/typeck/check/regionck.rs index 31fe30fc9f857..c49246b5c5423 100644 --- a/src/librustc/middle/typeck/check/regionck.rs +++ b/src/librustc/middle/typeck/check/regionck.rs @@ -1675,7 +1675,7 @@ fn link_reborrowed_region(rcx: &Rcx, // // If mutability was inferred from an upvar, we may be // forced to revisit this decision later if processing - // another borrow or nested closure ends up coverting the + // another borrow or nested closure ends up converting the // upvar borrow kind to mutable/unique. Record the // information needed to perform the recursive link in the // maybe link map. diff --git a/src/libstd/collections/hashmap/set.rs b/src/libstd/collections/hashmap/set.rs index dde1f27c9a322..ca954679c1c9d 100644 --- a/src/libstd/collections/hashmap/set.rs +++ b/src/libstd/collections/hashmap/set.rs @@ -186,7 +186,7 @@ impl, S, H: Hasher> HashSet { /// # Example /// /// This is a slightly silly example where we define the number's - /// parity as the equivilance class. It is important that the + /// parity as the equivalance class. It is important that the /// values hash the same, which is why we implement `Hash`. /// /// ``` diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 8eaee7282d197..c06feae6872b8 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -436,7 +436,7 @@ pub enum Stmt_ { /// Expr with trailing semi-colon (may have any type): StmtSemi(P, NodeId), - /// bool: is there a trailing sem-colon? + /// bool: is there a trailing semi-colon? StmtMac(Mac, bool), } diff --git a/src/test/run-pass/dst-coercions.rs b/src/test/run-pass/dst-coercions.rs index 1c9d5cd3afe4d..dbad546ce1ae3 100644 --- a/src/test/run-pass/dst-coercions.rs +++ b/src/test/run-pass/dst-coercions.rs @@ -28,7 +28,7 @@ pub fn main() { let x: *mut S = &mut S; - // Test we can chnage the mutability from mut to const. + // Test we can change the mutability from mut to const. let x: &T = &mut S; let x: *const T = &mut S; } diff --git a/src/test/run-pass/realloc-16687.rs b/src/test/run-pass/realloc-16687.rs index d8f48c7e6623b..966e34dfe49c6 100644 --- a/src/test/run-pass/realloc-16687.rs +++ b/src/test/run-pass/realloc-16687.rs @@ -30,7 +30,7 @@ unsafe fn test_triangle() -> bool { let ascend = ascend.as_mut_slice(); static ALIGN : uint = 1; - // Checks that `ascend` forms triangle of acending size formed + // Checks that `ascend` forms triangle of ascending size formed // from pairs of rows (where each pair of rows is equally sized), // and the elements of the triangle match their row-pair index. unsafe fn sanity_check(ascend: &[*mut u8]) { diff --git a/src/test/run-pass/vec-dst.rs b/src/test/run-pass/vec-dst.rs index 2fe8f4bdf011a..11b58948e0535 100644 --- a/src/test/run-pass/vec-dst.rs +++ b/src/test/run-pass/vec-dst.rs @@ -10,7 +10,7 @@ fn sub_expr() { // Test for a &[T] => &&[T] coercion in sub-expression position - // (surpisingly, this can cause errors which are not caused by either of: + // (surprisingly, this can cause errors which are not caused by either of: // `let x = vec.slice_mut(0, 2);` // `foo(vec.slice_mut(0, 2));` ). let mut vec: Vec = vec!(1, 2, 3, 4); From 2877e47ea7409ff4ea820b0a6668111baf441377 Mon Sep 17 00:00:00 2001 From: Brian Koropoff Date: Sat, 25 Oct 2014 21:39:34 -0700 Subject: [PATCH 32/47] Ensure unboxed closure upvars are marked as used mutably Closes #18336 --- src/librustc/middle/borrowck/check_loans.rs | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/src/librustc/middle/borrowck/check_loans.rs b/src/librustc/middle/borrowck/check_loans.rs index 5f09cafb5e26e..5d984168a31cb 100644 --- a/src/librustc/middle/borrowck/check_loans.rs +++ b/src/librustc/middle/borrowck/check_loans.rs @@ -825,12 +825,20 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { mc::cat_rvalue(..) | mc::cat_static_item | mc::cat_deref(_, _, mc::UnsafePtr(..)) | - mc::cat_deref(_, _, mc::BorrowedPtr(..)) | mc::cat_deref(_, _, mc::Implicit(..)) => { assert_eq!(cmt.mutbl, mc::McDeclared); return; } + mc::cat_deref(_, _, mc::BorrowedPtr(..)) => { + assert_eq!(cmt.mutbl, mc::McDeclared); + // We need to drill down to upvar if applicable + match cmt.upvar() { + Some(b) => cmt = b, + None => return + } + } + mc::cat_discr(b, _) | mc::cat_deref(b, _, mc::OwnedPtr) => { assert_eq!(cmt.mutbl, mc::McInherited); From 1062955b46950114ed3a0a71c043e9644afdce48 Mon Sep 17 00:00:00 2001 From: Brian Koropoff Date: Sat, 25 Oct 2014 21:40:25 -0700 Subject: [PATCH 33/47] Tweak mem categorization of upvar mutability - Correctly categorize env pointer deref for `FnMut` as declared rather than inherited. This fixes an assert in borrowck. Closes #18238 - Categorize env pointer deref as mutable only if the closure is `FnMut` *and* the original variable is declared mutable. This disallows capture-by-value `FnMut` closures from mutating captured variables that aren't declared mutable. This is a difference from the equivalent desugared code which would permit it, but it is consistent with the behavior of procs. Closes #18335 - Avoid computing info about the env pointer if there isn't one. --- src/librustc/middle/mem_categorization.rs | 131 ++++++++++++---------- 1 file changed, 69 insertions(+), 62 deletions(-) diff --git a/src/librustc/middle/mem_categorization.rs b/src/librustc/middle/mem_categorization.rs index ebca401ecf4eb..1dc5ce274c86b 100644 --- a/src/librustc/middle/mem_categorization.rs +++ b/src/librustc/middle/mem_categorization.rs @@ -656,51 +656,54 @@ impl<'t,'tcx,TYPER:Typer<'tcx>> MemCategorizationContext<'t,TYPER> { // FnOnce | copied | upvar -> &'up bk // old stack | N/A | upvar -> &'env mut -> &'up bk // old proc/once | copied | N/A + let var_ty = if_ok!(self.node_ty(var_id)); + let upvar_id = ty::UpvarId { var_id: var_id, closure_expr_id: fn_node_id }; - // Do we need to deref through an env reference? - let has_env_deref = kind != ty::FnOnceUnboxedClosureKind; - // Mutability of original variable itself let var_mutbl = MutabilityCategory::from_local(self.tcx(), var_id); - // Mutability of environment dereference - let env_mutbl = match kind { - ty::FnOnceUnboxedClosureKind => var_mutbl, - ty::FnMutUnboxedClosureKind => McInherited, - ty::FnUnboxedClosureKind => McImmutable + // Construct information about env pointer dereference, if any + let mutbl = match kind { + ty::FnOnceUnboxedClosureKind => None, // None, env is by-value + ty::FnMutUnboxedClosureKind => match mode { // Depends on capture type + ast::CaptureByValue => Some(var_mutbl), // Mutable if the original var is + ast::CaptureByRef => Some(McDeclared) // Mutable regardless + }, + ty::FnUnboxedClosureKind => Some(McImmutable) // Never mutable }; + let env_info = mutbl.map(|env_mutbl| { + // Look up the node ID of the closure body so we can construct + // a free region within it + let fn_body_id = { + let fn_expr = match self.tcx().map.find(fn_node_id) { + Some(ast_map::NodeExpr(e)) => e, + _ => unreachable!() + }; - // Look up the node ID of the closure body so we can construct - // a free region within it - let fn_body_id = { - let fn_expr = match self.tcx().map.find(fn_node_id) { - Some(ast_map::NodeExpr(e)) => e, - _ => unreachable!() + match fn_expr.node { + ast::ExprFnBlock(_, _, ref body) | + ast::ExprProc(_, ref body) | + ast::ExprUnboxedFn(_, _, _, ref body) => body.id, + _ => unreachable!() + } }; - match fn_expr.node { - ast::ExprFnBlock(_, _, ref body) | - ast::ExprProc(_, ref body) | - ast::ExprUnboxedFn(_, _, _, ref body) => body.id, - _ => unreachable!() - } - }; + // Region of environment pointer + let env_region = ty::ReFree(ty::FreeRegion { + scope_id: fn_body_id, + bound_region: ty::BrEnv + }); - // Region of environment pointer - let env_region = ty::ReFree(ty::FreeRegion { - scope_id: fn_body_id, - bound_region: ty::BrEnv - }); - - let env_ptr = BorrowedPtr(if env_mutbl.is_mutable() { - ty::MutBorrow - } else { - ty::ImmBorrow - }, env_region); + let env_ptr = BorrowedPtr(if env_mutbl.is_mutable() { + ty::MutBorrow + } else { + ty::ImmBorrow + }, env_region); - let var_ty = if_ok!(self.node_ty(var_id)); + (env_mutbl, env_ptr) + }); // First, switch by capture mode Ok(match mode { @@ -718,25 +721,27 @@ impl<'t,'tcx,TYPER:Typer<'tcx>> MemCategorizationContext<'t,TYPER> { note: NoteNone }; - if has_env_deref { - // We need to add the env deref. This means that - // the above is actually immutable and has a ref - // type. However, nothing should actually look at - // the type, so we can get away with stuffing a - // `ty_err` in there instead of bothering to - // construct a proper one. - base.mutbl = McImmutable; - base.ty = ty::mk_err(); - Rc::new(cmt_ { - id: id, - span: span, - cat: cat_deref(Rc::new(base), 0, env_ptr), - mutbl: env_mutbl, - ty: var_ty, - note: NoteClosureEnv(upvar_id) - }) - } else { - Rc::new(base) + match env_info { + Some((env_mutbl, env_ptr)) => { + // We need to add the env deref. This means + // that the above is actually immutable and + // has a ref type. However, nothing should + // actually look at the type, so we can get + // away with stuffing a `ty_err` in there + // instead of bothering to construct a proper + // one. + base.mutbl = McImmutable; + base.ty = ty::mk_err(); + Rc::new(cmt_ { + id: id, + span: span, + cat: cat_deref(Rc::new(base), 0, env_ptr), + mutbl: env_mutbl, + ty: var_ty, + note: NoteClosureEnv(upvar_id) + }) + } + None => Rc::new(base) } }, ast::CaptureByRef => { @@ -756,16 +761,18 @@ impl<'t,'tcx,TYPER:Typer<'tcx>> MemCategorizationContext<'t,TYPER> { note: NoteNone }; - // As in the by-value case, add env deref if needed - if has_env_deref { - base = cmt_ { - id: id, - span: span, - cat: cat_deref(Rc::new(base), 0, env_ptr), - mutbl: env_mutbl, - ty: ty::mk_err(), - note: NoteClosureEnv(upvar_id) - }; + match env_info { + Some((env_mutbl, env_ptr)) => { + base = cmt_ { + id: id, + span: span, + cat: cat_deref(Rc::new(base), 0, env_ptr), + mutbl: env_mutbl, + ty: ty::mk_err(), + note: NoteClosureEnv(upvar_id) + }; + } + None => {} } // Look up upvar borrow so we can get its region From 7129f172aee6d5143ca36d81defd47dd0c113ce3 Mon Sep 17 00:00:00 2001 From: Brian Koropoff Date: Sat, 25 Oct 2014 21:46:24 -0700 Subject: [PATCH 34/47] Improve diagnostics that result from the fix for #18335 --- src/librustc/middle/borrowck/check_loans.rs | 29 ++++++++++++++++----- src/librustc/middle/borrowck/mod.rs | 21 ++++++++++----- 2 files changed, 37 insertions(+), 13 deletions(-) diff --git a/src/librustc/middle/borrowck/check_loans.rs b/src/librustc/middle/borrowck/check_loans.rs index 5d984168a31cb..bdcf01b21abd9 100644 --- a/src/librustc/middle/borrowck/check_loans.rs +++ b/src/librustc/middle/borrowck/check_loans.rs @@ -777,13 +777,28 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { // Otherwise, just a plain error. match assignee_cmt.note { mc::NoteClosureEnv(upvar_id) => { - self.bccx.span_err( - assignment_span, - format!("cannot assign to {}", - self.bccx.cmt_to_string(&*assignee_cmt)).as_slice()); - self.bccx.span_note( - self.tcx().map.span(upvar_id.closure_expr_id), - "consider changing this closure to take self by mutable reference"); + // If this is an `Fn` closure, it simply can't mutate upvars. + // If it's an `FnMut` closure, the original variable was declared immutable. + // We need to determine which is the case here. + let kind = match assignee_cmt.upvar().unwrap().cat { + mc::cat_upvar(mc::Upvar { kind, .. }) => kind, + _ => unreachable!() + }; + if kind == ty::FnUnboxedClosureKind { + self.bccx.span_err( + assignment_span, + format!("cannot assign to {}", + self.bccx.cmt_to_string(&*assignee_cmt)).as_slice()); + self.bccx.span_note( + self.tcx().map.span(upvar_id.closure_expr_id), + "consider changing this closure to take self by mutable reference"); + } else { + self.bccx.span_err( + assignment_span, + format!("cannot assign to {} {}", + assignee_cmt.mutbl.to_user_str(), + self.bccx.cmt_to_string(&*assignee_cmt)).as_slice()); + } } _ => match opt_loan_path(&assignee_cmt) { Some(lp) => { diff --git a/src/librustc/middle/borrowck/mod.rs b/src/librustc/middle/borrowck/mod.rs index 850c6008706c8..06249b956b67d 100644 --- a/src/librustc/middle/borrowck/mod.rs +++ b/src/librustc/middle/borrowck/mod.rs @@ -626,7 +626,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { match err.code { err_mutbl => { let descr = match err.cmt.note { - mc::NoteClosureEnv(_) => { + mc::NoteClosureEnv(_) | mc::NoteUpvarRef(_) => { self.cmt_to_string(&*err.cmt) } _ => match opt_loan_path(&err.cmt) { @@ -762,11 +762,20 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { match code { err_mutbl(..) => { match err.cmt.note { - mc::NoteClosureEnv(upvar_id) => { - self.tcx.sess.span_note( - self.tcx.map.span(upvar_id.closure_expr_id), - "consider changing this closure to take \ - self by mutable reference"); + mc::NoteClosureEnv(upvar_id) | mc::NoteUpvarRef(upvar_id) => { + // If this is an `Fn` closure, it simply can't mutate upvars. + // If it's an `FnMut` closure, the original variable was declared immutable. + // We need to determine which is the case here. + let kind = match err.cmt.upvar().unwrap().cat { + mc::cat_upvar(mc::Upvar { kind, .. }) => kind, + _ => unreachable!() + }; + if kind == ty::FnUnboxedClosureKind { + self.tcx.sess.span_note( + self.tcx.map.span(upvar_id.closure_expr_id), + "consider changing this closure to take \ + self by mutable reference"); + } } _ => {} } From 6be48ea92e0b3fabb782c5837c134664753c88d2 Mon Sep 17 00:00:00 2001 From: Brian Koropoff Date: Sat, 25 Oct 2014 21:27:19 -0700 Subject: [PATCH 35/47] Add regression test for #18238 and #18336 --- .../run-pass/unboxed-closures-move-mutable.rs | 28 +++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 src/test/run-pass/unboxed-closures-move-mutable.rs diff --git a/src/test/run-pass/unboxed-closures-move-mutable.rs b/src/test/run-pass/unboxed-closures-move-mutable.rs new file mode 100644 index 0000000000000..f7e1e46e54d65 --- /dev/null +++ b/src/test/run-pass/unboxed-closures-move-mutable.rs @@ -0,0 +1,28 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(unboxed_closures)] +#![deny(unused_mut)] + +// Test that mutating a mutable upvar in a capture-by-value unboxed +// closure does not ice (issue #18238) and marks the upvar as used +// mutably so we do not get a spurious warning about it not needing to +// be declared mutable (issue #18336). + +fn main() { + { + let mut x = 0u; + move |&mut:| x += 1; + } + { + let mut x = 0u; + move |:| x += 1; + } +} From 5662bbad0726811be241f88086781208875b05e1 Mon Sep 17 00:00:00 2001 From: Brian Koropoff Date: Sat, 25 Oct 2014 21:27:54 -0700 Subject: [PATCH 36/47] Add regression test for #18335 --- .../unboxed-closure-immutable-capture.rs | 31 +++++++++++++++++++ 1 file changed, 31 insertions(+) create mode 100644 src/test/compile-fail/unboxed-closure-immutable-capture.rs diff --git a/src/test/compile-fail/unboxed-closure-immutable-capture.rs b/src/test/compile-fail/unboxed-closure-immutable-capture.rs new file mode 100644 index 0000000000000..e28abaf2b1fd6 --- /dev/null +++ b/src/test/compile-fail/unboxed-closure-immutable-capture.rs @@ -0,0 +1,31 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(unboxed_closures)] + +// Test that even unboxed closures that are capable of mutating their +// environment cannot mutate captured variables that have not been +// declared mutable (#18335) + +fn set(x: &mut uint) { *x = 0; } + +fn main() { + let x = 0u; + move |&mut:| x = 1; //~ ERROR cannot assign + move |&mut:| set(&mut x); //~ ERROR cannot borrow + move |:| x = 1; //~ ERROR cannot assign + move |:| set(&mut x); //~ ERROR cannot borrow + |&mut:| x = 1; //~ ERROR cannot assign + // FIXME: this should be `cannot borrow` (issue #18330) + |&mut:| set(&mut x); //~ ERROR cannot assign + |:| x = 1; //~ ERROR cannot assign + // FIXME: this should be `cannot borrow` (issue #18330) + |:| set(&mut x); //~ ERROR cannot assign +} From 79e05e999505a3ed4f3e4e1ed5e57deb39ebe485 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adolfo=20Ochagav=C3=ADa?= Date: Sun, 26 Oct 2014 12:58:04 +0100 Subject: [PATCH 37/47] Implement Show for `Arc` Fixes https://github.com/rust-lang/rust/issues/18299 --- src/liballoc/arc.rs | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs index c447cb46c532a..0e62fbb01441d 100644 --- a/src/liballoc/arc.rs +++ b/src/liballoc/arc.rs @@ -15,6 +15,7 @@ use core::atomic; use core::clone::Clone; +use core::fmt::{mod, Show}; use core::kinds::{Sync, Send}; use core::mem::{min_align_of, size_of, drop}; use core::mem; @@ -147,6 +148,12 @@ impl Deref for Arc { } } +impl Show for Arc { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + (**self).fmt(f) + } +} + impl Arc { /// Acquires a mutable pointer to the inner contents by guaranteeing that /// the reference count is one (no sharing is possible). @@ -280,6 +287,7 @@ mod tests { use std::mem::drop; use std::ops::Drop; use std::option::{Option, Some, None}; + use std::str::Str; use std::sync::atomic; use std::task; use std::vec::Vec; @@ -426,4 +434,10 @@ mod tests { assert!(canary.load(atomic::Acquire) == 1); drop(arc_weak); } + + #[test] + fn show_arc() { + let a = Arc::new(5u32); + assert!(format!("{}", a).as_slice() == "5") + } } From 622ae41e90e7caad31c71b3299e13b362cf345cd Mon Sep 17 00:00:00 2001 From: Daniel Hofstetter Date: Sun, 26 Oct 2014 16:58:17 +0100 Subject: [PATCH 38/47] Guide: Add link to FFI explanation --- src/doc/guide.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/doc/guide.md b/src/doc/guide.md index c7b8e42b28cde..12df2c548d249 100644 --- a/src/doc/guide.md +++ b/src/doc/guide.md @@ -5288,9 +5288,9 @@ There are two circumstances where Rust's safety provisions don't work well. The first is when interfacing with C code, and the second is when building certain kinds of abstractions. -Rust has support for FFI (which you can read about in the [FFI -Guide](guide-ffi.html)), but can't guarantee that the C code will be safe. -Therefore, Rust marks such functions with the `unsafe` +Rust has support for [FFI](http://en.wikipedia.org/wiki/Foreign_function_interface) +(which you can read about in the [FFI Guide](guide-ffi.html)), but can't guarantee +that the C code will be safe. Therefore, Rust marks such functions with the `unsafe` keyword, which indicates that the function may not behave properly. Second, if you'd like to create some sort of shared-memory data structure, Rust From f16744ce521537346188f984fdc50ea6c7c6eda3 Mon Sep 17 00:00:00 2001 From: Jakub Bukaj Date: Sun, 26 Oct 2014 22:35:26 +0100 Subject: [PATCH 39/47] Fix a typecheck regression with constant borrowed pointers in patterns Change the eqtype relationship to be a suptype relationship instead. Fixes #18350. Fixes #18352. --- src/librustc/middle/typeck/check/_match.rs | 2 +- src/test/run-pass/issue-18352.rs | 22 ++++++++++++++++++++++ 2 files changed, 23 insertions(+), 1 deletion(-) create mode 100644 src/test/run-pass/issue-18352.rs diff --git a/src/librustc/middle/typeck/check/_match.rs b/src/librustc/middle/typeck/check/_match.rs index 14725c581c8a8..1dcd8c76f4b84 100644 --- a/src/librustc/middle/typeck/check/_match.rs +++ b/src/librustc/middle/typeck/check/_match.rs @@ -74,7 +74,7 @@ pub fn check_pat(pcx: &pat_ctxt, pat: &ast::Pat, expected: ty::t) { let const_did = tcx.def_map.borrow().get_copy(&pat.id).def_id(); let const_pty = ty::lookup_item_type(tcx, const_did); fcx.write_ty(pat.id, const_pty.ty); - demand::eqtype(fcx, pat.span, expected, const_pty.ty); + demand::suptype(fcx, pat.span, expected, const_pty.ty); } ast::PatIdent(bm, ref path, ref sub) if pat_is_binding(&tcx.def_map, pat) => { let typ = fcx.local_ty(pat.span, pat.id); diff --git a/src/test/run-pass/issue-18352.rs b/src/test/run-pass/issue-18352.rs new file mode 100644 index 0000000000000..7878d698e52ae --- /dev/null +++ b/src/test/run-pass/issue-18352.rs @@ -0,0 +1,22 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +const X: &'static str = "12345"; + +fn test(s: String) -> bool { + match s.as_slice() { + X => true, + _ => false + } +} + +fn main() { + assert!(test("12345".to_string())); +} From d257b376084ce18ae351e1f89989e36433515d24 Mon Sep 17 00:00:00 2001 From: John Kleint Date: Tue, 21 Oct 2014 23:30:09 -0400 Subject: [PATCH 40/47] Guide: motivate Box and Rc pointers with need, uses, benefits, and examples. Explain that Rust has different pointer types because there is a tradeoff between flexibility and efficiency. Motivate boxes as fixed-size containers of variable-sized objects. Clarify that Box and Rc are pointer types that you deref with * just like references. Stick to explaining the semantics and avoid implementation details. Scope isn't the most accurate framework to think about deallocation (since you return boxes and otherwise move values out of scopes); it's more "when the value is done being used," i.e., lifetime. Provide a connection between Rust's pointer types by locating them on a flexibiltiy / performance scale. Explain the compiler can't statically analyze lifetimes with multiple owners; hence the need for (runtime) reference counting. --- src/doc/guide.md | 122 ++++++++++++++++++++++++++++++----------------- 1 file changed, 78 insertions(+), 44 deletions(-) diff --git a/src/doc/guide.md b/src/doc/guide.md index c7b8e42b28cde..2a74eba0c3326 100644 --- a/src/doc/guide.md +++ b/src/doc/guide.md @@ -3637,40 +3637,72 @@ pub fn as_maybe_owned(&self) -> MaybeOwned<'a> { ... } ## Boxes -All of our references so far have been to variables we've created on the stack. -In Rust, the simplest way to allocate heap variables is using a *box*. To -create a box, use the `box` keyword: +Most of the types we've seen so far have a fixed size or number of components. +The compiler needs this fact to lay out values in memory. However, some data +structures, such as a linked list, do not have a fixed size. You might think to +implement a linked list with an enum that's either a `Node` or the end of the +list (`Nil`), like this: + +```{rust,ignore} +enum List { // error: illegal recursive enum type + Node(u32, List), + Nil +} +``` + +But the compiler complains that the type is recursive, that is, it could be +arbitrarily large. To remedy this, Rust provides a fixed-size container called +a **box** that can hold any type. You can box up any value with the `box` +keyword. Our boxed List gets the type `Box` (more on the notation when we +get to generics): ```{rust} -let x = box 5i; +enum List { + Node(u32, Box), + Nil +} + +fn main() { + let list = Node(0, box Node(1, box Nil)); +} ``` -This allocates an integer `5` on the heap, and creates a binding `x` that -refers to it. The great thing about boxed pointers is that we don't have to -manually free this allocation! If we write +A box dynamically allocates memory to hold its contents. The great thing about +Rust is that that memory is *automatically*, *efficiently*, and *predictably* +deallocated when you're done with the box. + +A box is a pointer type, and you access what's inside using the `*` operator, +just like regular references. This (rather silly) example dynamically allocates +an integer `5` and makes `x` a pointer to it: ```{rust} { let x = box 5i; - // do stuff + println!("{}", *x); // Prints 5 } ``` -then Rust will automatically free `x` at the end of the block. This isn't -because Rust has a garbage collector -- it doesn't. Instead, when `x` goes out -of scope, Rust `free`s `x`. This Rust code will do the same thing as the -following C code: +The great thing about boxes is that we don't have to manually free this +allocation! Instead, when `x` reaches the end of its lifetime -- in this case, +when it goes out of scope at the end of the block -- Rust `free`s `x`. This +isn't because Rust has a garbage collector (it doesn't). Instead, by tracking +the ownership and lifetime of a variable (with a little help from you, the +programmer), the compiler knows precisely when it is no longer used. + +The Rust code above will do the same thing as the following C code: ```{c,ignore} { int *x = (int *)malloc(sizeof(int)); - // do stuff + if (!x) abort(); + *x = 5; + printf("%d\n", *x); free(x); } ``` -This means we get the benefits of manual memory management, but the compiler -ensures that we don't do something wrong. We can't forget to `free` our memory. +We get the benefits of manual memory management, while ensuring we don't +introduce any bugs. We can't forget to `free` our memory. Boxes are the sole owner of their contents, so you cannot take a mutable reference to them and then use the original box: @@ -3706,48 +3738,50 @@ let mut x = box 5i; *x; ``` -## Rc and Arc - -Sometimes, you need to allocate something on the heap, but give out multiple -references to the memory. Rust's `Rc` (pronounced 'arr cee tee') and -`Arc` types (again, the `T` is for generics, we'll learn more later) provide -you with this ability. **Rc** stands for 'reference counted,' and **Arc** for -'atomically reference counted.' This is how Rust keeps track of the multiple -owners: every time we make a new reference to the `Rc`, we add one to its -internal 'reference count.' Every time a reference goes out of scope, we -subtract one from the count. When the count is zero, the `Rc` can be safely -deallocated. `Arc` is almost identical to `Rc`, except for one thing: The -'atomically' in 'Arc' means that increasing and decreasing the count uses a -thread-safe mechanism to do so. Why two types? `Rc` is faster, so if you're -not in a multi-threaded scenario, you can have that advantage. Since we haven't -talked about threading yet in Rust, we'll show you `Rc` for the rest of this -section. +Boxes are simple and efficient pointers to dynamically allocated values with a +single owner. They are useful for tree-like structures where the lifetime of a +child depends solely on the lifetime of its (single) parent. If you need a +value that must persist as long as any of several referrers, read on. -To create an `Rc`, use `Rc::new()`: +## Rc and Arc -```{rust} -use std::rc::Rc; +Sometimes, you need a variable that is referenced from multiple places +(immutably!), lasting as long as any of those places, and disappearing when it +is no longer referenced. For instance, in a graph-like data structure, a node +might be referenced from all of its neighbors. In this case, it is not possible +for the compiler to determine ahead of time when the value can be freed -- it +needs a little run-time support. -let x = Rc::new(5i); -``` +Rust's **Rc** type provides shared ownership of a dynamically allocated value +that is automatically freed at the end of its last owner's lifetime. (`Rc` +stands for 'reference counted,' referring to the way these library types are +implemented.) This provides more flexibility than single-owner boxes, but has +some runtime overhead. -To create a second reference, use the `.clone()` method: +To create an `Rc` value, use `Rc::new()`. To create a second owner, use the +`.clone()` method: ```{rust} use std::rc::Rc; let x = Rc::new(5i); let y = x.clone(); + +println!("{} {}", *x, *y); // Prints 5 5 ``` -The `Rc` will live as long as any of its references are alive. After they -all go out of scope, the memory will be `free`d. +The `Rc` will live as long as any of its owners are alive. After that, the +memory will be `free`d. + +**Arc** is an 'atomically reference counted' value, identical to `Rc` except +that ownership can be safely shared among multiple threads. Why two types? +`Arc` has more overhead, so if you're not in a multi-threaded scenario, you +don't have to pay the price. -If you use `Rc` or `Arc`, you have to be careful about introducing -cycles. If you have two `Rc`s that point to each other, the reference counts -will never drop to zero, and you'll have a memory leak. To learn more, check -out [the section on `Rc` and `Arc` in the pointers -guide](guide-pointers.html#rc-and-arc). +If you use `Rc` or `Arc`, you have to be careful about introducing cycles. If +you have two `Rc`s that point to each other, they will happily keep each other +alive forever, creating a memory leak. To learn more, check out [the section on +`Rc` and `Arc` in the pointers guide](guide-pointers.html#rc-and-arc). # Patterns From 8a4bd8427cb4d4334e649772b2b8fc8857fd0289 Mon Sep 17 00:00:00 2001 From: Colin Sherratt Date: Sat, 25 Oct 2014 23:38:27 -0400 Subject: [PATCH 41/47] Added Encodable and Decodable for Arc. --- src/libserialize/serialize.rs | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/src/libserialize/serialize.rs b/src/libserialize/serialize.rs index 3bed4e4040b3a..0a040fff40d6a 100644 --- a/src/libserialize/serialize.rs +++ b/src/libserialize/serialize.rs @@ -17,6 +17,7 @@ Core encoding and decoding interfaces. use std::path; use std::rc::Rc; use std::cell::{Cell, RefCell}; +use std::sync::Arc; pub trait Encoder { // Primitive types: @@ -556,6 +557,18 @@ impl, T: Decodable> Decodable for RefCell { } } +impl, T:Encodable+Send+Sync> Encodable for Arc { + fn encode(&self, s: &mut S) -> Result<(), E> { + (**self).encode(s) + } +} + +impl,T:Decodable+Send+Sync> Decodable for Arc { + fn decode(d: &mut D) -> Result, E> { + Ok(Arc::new(try!(Decodable::decode(d)))) + } +} + // ___________________________________________________________________________ // Helper routines From e5f709079a2f9c5227e10f5f4cd0371a2fd76ae3 Mon Sep 17 00:00:00 2001 From: Kevin Mehall Date: Mon, 27 Oct 2014 00:11:26 -0700 Subject: [PATCH 42/47] Preserve struct field pattern shorthand in the prettyprinter. Use the `is_shorthand` field introduced by #17813 (ead6c4b) to make the prettyprinter output the shorthand form. Fixes a few places that set `is_shorthand: true` when the pattern is not a PatIdent with the same name as the field. --- src/librustc/middle/check_match.rs | 2 +- src/librustc/middle/const_eval.rs | 2 +- src/libsyntax/ext/deriving/generic/mod.rs | 2 +- src/libsyntax/print/pprust.rs | 6 ++++-- src/test/pretty/struct-pattern.rs | 15 +++++++++++++++ 5 files changed, 22 insertions(+), 5 deletions(-) create mode 100644 src/test/pretty/struct-pattern.rs diff --git a/src/librustc/middle/check_match.rs b/src/librustc/middle/check_match.rs index 315266dbc8406..fe38669ea6c29 100644 --- a/src/librustc/middle/check_match.rs +++ b/src/librustc/middle/check_match.rs @@ -421,7 +421,7 @@ fn construct_witness(cx: &MatchCheckCtxt, ctor: &Constructor, node: FieldPat { ident: Ident::new(field.name), pat: pat, - is_shorthand: true, + is_shorthand: false, } }).collect(); let has_more_fields = field_pats.len() < pats_len; diff --git a/src/librustc/middle/const_eval.rs b/src/librustc/middle/const_eval.rs index 9e2f78edb77f0..3d6b319ac0d62 100644 --- a/src/librustc/middle/const_eval.rs +++ b/src/librustc/middle/const_eval.rs @@ -341,7 +341,7 @@ pub fn const_expr_to_pat(tcx: &ty::ctxt, expr: &Expr) -> P { node: FieldPat { ident: field.ident.node, pat: const_expr_to_pat(tcx, &*field.expr), - is_shorthand: true, + is_shorthand: false, }, }).collect(); PatStruct(path.clone(), field_pats, false) diff --git a/src/libsyntax/ext/deriving/generic/mod.rs b/src/libsyntax/ext/deriving/generic/mod.rs index 533a28998bd1d..7c32b84550893 100644 --- a/src/libsyntax/ext/deriving/generic/mod.rs +++ b/src/libsyntax/ext/deriving/generic/mod.rs @@ -1250,7 +1250,7 @@ impl<'a> TraitDef<'a> { // id is guaranteed to be Some codemap::Spanned { span: pat.span, - node: ast::FieldPat { ident: id.unwrap(), pat: pat, is_shorthand: true }, + node: ast::FieldPat { ident: id.unwrap(), pat: pat, is_shorthand: false }, } }).collect(); cx.pat_struct(self.span, matching_path, field_pats) diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index b63f9b0120b9e..ed4a16da0138a 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -1983,8 +1983,10 @@ impl<'a> State<'a> { Consistent, fields.as_slice(), |s, f| { try!(s.cbox(indent_unit)); - try!(s.print_ident(f.node.ident)); - try!(s.word_nbsp(":")); + if !f.node.is_shorthand { + try!(s.print_ident(f.node.ident)); + try!(s.word_nbsp(":")); + } try!(s.print_pat(&*f.node.pat)); s.end() }, diff --git a/src/test/pretty/struct-pattern.rs b/src/test/pretty/struct-pattern.rs new file mode 100644 index 0000000000000..b0795bb08f361 --- /dev/null +++ b/src/test/pretty/struct-pattern.rs @@ -0,0 +1,15 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// pp-exact +// pretty-compare-only +// Testing that shorthand struct patterns are preserved + +fn main() { let Foo { a, ref b, mut c, x: y, z: z } = foo; } From 25650e0eeb92e1e495b7c628319e65cf09cd1d6f Mon Sep 17 00:00:00 2001 From: Daniel Hofstetter Date: Mon, 27 Oct 2014 15:41:24 +0100 Subject: [PATCH 43/47] Guide: Add missing "a" --- src/doc/guide-pointers.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/doc/guide-pointers.md b/src/doc/guide-pointers.md index dd9c687172247..87eb91d3ec7ff 100644 --- a/src/doc/guide-pointers.md +++ b/src/doc/guide-pointers.md @@ -416,7 +416,7 @@ great detail, so if you want the full details, check that out. In general, prefer stack allocation over heap allocation. Using references to stack allocated information is preferred whenever possible. Therefore, -references are the default pointer type you should use, unless you have +references are the default pointer type you should use, unless you have a specific reason to use a different type. The other types of pointers cover when they're appropriate to use in their own best practices sections. From b8c4eb3a4e3bf53b220578d69d76a5a2e7f414f5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adolfo=20Ochagav=C3=ADa?= Date: Mon, 27 Oct 2014 16:04:55 +0100 Subject: [PATCH 44/47] Fix undefined behavior in std::ascii Closes https://github.com/rust-lang/rust/issues/18314 --- src/libstd/ascii.rs | 23 +++++++++++++++++++---- 1 file changed, 19 insertions(+), 4 deletions(-) diff --git a/src/libstd/ascii.rs b/src/libstd/ascii.rs index 07be15486fdbc..f7a84fc3478b9 100644 --- a/src/libstd/ascii.rs +++ b/src/libstd/ascii.rs @@ -247,8 +247,7 @@ impl OwnedAsciiCast for String { #[inline] unsafe fn into_ascii_nocheck(self) -> Vec { - let v: Vec = mem::transmute(self); - v.into_ascii_nocheck() + self.into_bytes().into_ascii_nocheck() } } @@ -260,7 +259,14 @@ impl OwnedAsciiCast for Vec { #[inline] unsafe fn into_ascii_nocheck(self) -> Vec { - mem::transmute(self) + let v = Vec::from_raw_parts(self.len(), + self.capacity(), + mem::transmute(self.as_ptr())); + + // We forget `self` to avoid freeing it at the end of the scope + // Otherwise, the returned `Vec` would point to freed memory + mem::forget(self); + v } } @@ -338,7 +344,16 @@ pub trait IntoBytes { impl IntoBytes for Vec { fn into_bytes(self) -> Vec { - unsafe { mem::transmute(self) } + unsafe { + let v = Vec::from_raw_parts(self.len(), + self.capacity(), + mem::transmute(self.as_ptr())); + + // We forget `self` to avoid freeing it at the end of the scope + // Otherwise, the returned `Vec` would point to freed memory + mem::forget(self); + v + } } } From 9dab88712f68d79ec81d87deb433094a3b9d0013 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adolfo=20Ochagav=C3=ADa?= Date: Sun, 26 Oct 2014 16:29:27 +0100 Subject: [PATCH 45/47] Show a note when closure field is called as method Closes https://github.com/rust-lang/rust/issues/18343 --- src/librustc/middle/typeck/check/method.rs | 22 +++++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/src/librustc/middle/typeck/check/method.rs b/src/librustc/middle/typeck/check/method.rs index cb2f1e010ac6e..53a301cc94bc3 100644 --- a/src/librustc/middle/typeck/check/method.rs +++ b/src/librustc/middle/typeck/check/method.rs @@ -223,17 +223,37 @@ pub fn report_error(fcx: &FnCtxt, { match error { NoMatch(static_sources) => { + let cx = fcx.tcx(); + let method_ustring = method_name.user_string(cx); + + // True if the type is a struct and contains a field with + // the same name as the not-found method + let is_field = match ty::get(rcvr_ty).sty { + ty_struct(did, _) => + ty::lookup_struct_fields(cx, did) + .iter() + .any(|f| f.name.user_string(cx) == method_ustring), + _ => false + }; + fcx.type_error_message( span, |actual| { format!("type `{}` does not implement any \ method in scope named `{}`", actual, - method_name.user_string(fcx.tcx())) + method_ustring) }, rcvr_ty, None); + // If the method has the name of a field, give a help note + if is_field { + cx.sess.span_note(span, + format!("use `(s.{0})(...)` if you meant to call the \ + function stored in the `{0}` field", method_ustring).as_slice()); + } + if static_sources.len() > 0 { fcx.tcx().sess.fileline_note( span, From 2ce77b33b04c64d57bef696d88605d08dff72775 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adolfo=20Ochagav=C3=ADa?= Date: Mon, 27 Oct 2014 12:15:03 +0100 Subject: [PATCH 46/47] Add test for issue 18343 --- src/test/compile-fail/issue-18343.rs | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) create mode 100644 src/test/compile-fail/issue-18343.rs diff --git a/src/test/compile-fail/issue-18343.rs b/src/test/compile-fail/issue-18343.rs new file mode 100644 index 0000000000000..1608d2137fc32 --- /dev/null +++ b/src/test/compile-fail/issue-18343.rs @@ -0,0 +1,19 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +struct Obj<'a> { + closure: ||: 'a -> u32 +} + +fn main() { + let o = Obj { closure: || 42 }; + o.closure(); //~ ERROR type `Obj<'_>` does not implement any method in scope named `closure` + //~^ NOTE use `(s.closure)(...)` if you meant to call the function stored in the `closure` field +} From a33d7617c5b5d18ff495fe6aa9108aa13939a114 Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Mon, 27 Oct 2014 09:11:07 -0700 Subject: [PATCH 47/47] Test fixes and rebase conflicts from rollup --- src/libcollections/vec.rs | 2 +- src/libstd/ascii.rs | 12 ++++++------ src/test/{run-pass => compile-fail}/issue-17458.rs | 1 + src/test/run-pass/issue-16668.rs | 2 ++ 4 files changed, 10 insertions(+), 7 deletions(-) rename src/test/{run-pass => compile-fail}/issue-17458.rs (88%) diff --git a/src/libcollections/vec.rs b/src/libcollections/vec.rs index c57a465df3780..765c9827cb79f 100644 --- a/src/libcollections/vec.rs +++ b/src/libcollections/vec.rs @@ -231,7 +231,7 @@ impl Vec { /// } /// /// // Put everything back together into a Vec - /// let rebuilt = Vec::from_raw_parts(len, cap, p); + /// let rebuilt = Vec::from_raw_parts(p, len, cap); /// assert_eq!(rebuilt, vec![4i, 5i, 6i]); /// } /// } diff --git a/src/libstd/ascii.rs b/src/libstd/ascii.rs index f7a84fc3478b9..c2e88bfdbcfb9 100644 --- a/src/libstd/ascii.rs +++ b/src/libstd/ascii.rs @@ -259,9 +259,9 @@ impl OwnedAsciiCast for Vec { #[inline] unsafe fn into_ascii_nocheck(self) -> Vec { - let v = Vec::from_raw_parts(self.len(), - self.capacity(), - mem::transmute(self.as_ptr())); + let v = Vec::from_raw_parts(self.as_ptr() as *mut Ascii, + self.len(), + self.capacity()); // We forget `self` to avoid freeing it at the end of the scope // Otherwise, the returned `Vec` would point to freed memory @@ -345,9 +345,9 @@ pub trait IntoBytes { impl IntoBytes for Vec { fn into_bytes(self) -> Vec { unsafe { - let v = Vec::from_raw_parts(self.len(), - self.capacity(), - mem::transmute(self.as_ptr())); + let v = Vec::from_raw_parts(self.as_ptr() as *mut u8, + self.len(), + self.capacity()); // We forget `self` to avoid freeing it at the end of the scope // Otherwise, the returned `Vec` would point to freed memory diff --git a/src/test/run-pass/issue-17458.rs b/src/test/compile-fail/issue-17458.rs similarity index 88% rename from src/test/run-pass/issue-17458.rs rename to src/test/compile-fail/issue-17458.rs index a32a31e97e3e0..b1fbe6f5549e4 100644 --- a/src/test/run-pass/issue-17458.rs +++ b/src/test/compile-fail/issue-17458.rs @@ -9,6 +9,7 @@ // except according to those terms. static X: uint = 0 as *const uint as uint; +//~^ ERROR: can not cast a pointer to an integer in a constant expression fn main() { assert_eq!(X, 0); diff --git a/src/test/run-pass/issue-16668.rs b/src/test/run-pass/issue-16668.rs index 1bfa79b8a110d..b66fb4306d029 100644 --- a/src/test/run-pass/issue-16668.rs +++ b/src/test/run-pass/issue-16668.rs @@ -8,6 +8,8 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +// ignore-pretty + #![feature(unboxed_closures)] struct Parser<'a, I, O> {