diff --git a/doc/rust.md b/doc/rust.md index 503d1a1072b91..7a9d5d26b57af 100644 --- a/doc/rust.md +++ b/doc/rust.md @@ -3079,7 +3079,7 @@ A value of type `str` is a Unicode string, represented as a vector of 8-bit unsigned bytes holding a sequence of UTF-8 codepoints. Since `str` is of unknown size, it is not a _first class_ type, but can only be instantiated through a pointer type, -such as `&str`, `@str` or `~str`. +such as `&str` or `~str`. ### Tuple types diff --git a/src/libextra/serialize.rs b/src/libextra/serialize.rs index 020404057fb1f..fa2737ce75f87 100644 --- a/src/libextra/serialize.rs +++ b/src/libextra/serialize.rs @@ -310,18 +310,6 @@ impl Decodable for ~str { } } -impl Encodable for @str { - fn encode(&self, s: &mut S) { - s.emit_str(*self) - } -} - -impl Decodable for @str { - fn decode(d: &mut D) -> @str { - d.read_str().to_managed() - } -} - impl Encodable for f32 { fn encode(&self, s: &mut S) { s.emit_f32(*self) diff --git a/src/librustc/back/link.rs b/src/librustc/back/link.rs index a81302035ee1c..fc38fa25a2146 100644 --- a/src/librustc/back/link.rs +++ b/src/librustc/back/link.rs @@ -473,10 +473,10 @@ pub fn build_link_meta(sess: Session, symbol_hasher: &mut Sha256) -> LinkMeta { // This calculates CMH as defined above - fn crate_hash(symbol_hasher: &mut Sha256, crateid: &CrateId) -> @str { + fn crate_hash(symbol_hasher: &mut Sha256, crateid: &CrateId) -> ~str { symbol_hasher.reset(); symbol_hasher.input_str(crateid.to_str()); - truncated_hash_result(symbol_hasher).to_managed() + truncated_hash_result(symbol_hasher) } let crateid = match attr::find_crateid(attrs) { @@ -510,7 +510,8 @@ fn truncated_hash_result(symbol_hasher: &mut Sha256) -> ~str { pub fn symbol_hash(tcx: ty::ctxt, symbol_hasher: &mut Sha256, t: ty::t, - link_meta: &LinkMeta) -> @str { + link_meta: &LinkMeta) + -> ~str { // NB: do *not* use abbrevs here as we want the symbol names // to be independent of one another in the crate. @@ -523,15 +524,14 @@ pub fn symbol_hash(tcx: ty::ctxt, let mut hash = truncated_hash_result(symbol_hasher); // Prefix with 'h' so that it never blends into adjacent digits hash.unshift_char('h'); - // tjc: allocation is unfortunate; need to change std::hash - hash.to_managed() + hash } -pub fn get_symbol_hash(ccx: &CrateContext, t: ty::t) -> @str { +pub fn get_symbol_hash(ccx: &CrateContext, t: ty::t) -> ~str { { let type_hashcodes = ccx.type_hashcodes.borrow(); match type_hashcodes.get().find(&t) { - Some(&h) => return h, + Some(h) => return h.to_str(), None => {} } } @@ -539,7 +539,7 @@ pub fn get_symbol_hash(ccx: &CrateContext, t: ty::t) -> @str { let mut type_hashcodes = ccx.type_hashcodes.borrow_mut(); let mut symbol_hasher = ccx.symbol_hasher.borrow_mut(); let hash = symbol_hash(ccx.tcx, symbol_hasher.get(), t, &ccx.link_meta); - type_hashcodes.get().insert(t, hash); + type_hashcodes.get().insert(t, hash.clone()); hash } @@ -963,7 +963,7 @@ fn link_staticlib(sess: Session, obj_filename: &Path, out_filename: &Path) { let crates = sess.cstore.get_used_crates(cstore::RequireStatic); for &(cnum, ref path) in crates.iter() { - let name = sess.cstore.get_crate_data(cnum).name; + let name = sess.cstore.get_crate_data(cnum).name.clone(); let p = match *path { Some(ref p) => p.clone(), None => { sess.err(format!("could not find rlib for: `{}`", name)); @@ -1221,7 +1221,7 @@ fn add_upstream_rust_crates(args: &mut ~[~str], sess: Session, // If we're not doing LTO, then our job is simply to just link // against the archive. if sess.lto() { - let name = sess.cstore.get_crate_data(cnum).name; + let name = sess.cstore.get_crate_data(cnum).name.clone(); time(sess.time_passes(), format!("altering {}.rlib", name), (), |()| { let dst = tmpdir.join(cratepath.filename().unwrap()); diff --git a/src/librustc/back/lto.rs b/src/librustc/back/lto.rs index ced8fa68f59cd..3fbcd377b8b1c 100644 --- a/src/librustc/back/lto.rs +++ b/src/librustc/back/lto.rs @@ -42,7 +42,7 @@ pub fn run(sess: session::Session, llmod: ModuleRef, // module that we've got. let crates = sess.cstore.get_used_crates(cstore::RequireStatic); for (cnum, path) in crates.move_iter() { - let name = sess.cstore.get_crate_data(cnum).name; + let name = sess.cstore.get_crate_data(cnum).name.clone(); let path = match path { Some(p) => p, None => { diff --git a/src/librustc/driver/driver.rs b/src/librustc/driver/driver.rs index 211d60f7e2d53..00b878a7bf742 100644 --- a/src/librustc/driver/driver.rs +++ b/src/librustc/driver/driver.rs @@ -44,6 +44,7 @@ use syntax::codemap; use syntax::diagnostic; use syntax::ext::base::CrateLoader; use syntax::parse; +use syntax::parse::token::InternedString; use syntax::parse::token; use syntax::print::{pp, pprust}; use syntax; @@ -60,12 +61,14 @@ pub enum PpMode { * The name used for source code that doesn't originate in a file * (e.g. source from stdin or a string) */ -pub fn anon_src() -> @str { @"" } +pub fn anon_src() -> ~str { + "".to_str() +} -pub fn source_name(input: &Input) -> @str { +pub fn source_name(input: &Input) -> ~str { match *input { // FIXME (#9639): This needs to handle non-utf8 paths - FileInput(ref ifile) => ifile.as_str().unwrap().to_managed(), + FileInput(ref ifile) => ifile.as_str().unwrap().to_str(), StrInput(_) => anon_src() } } @@ -73,39 +76,41 @@ pub fn source_name(input: &Input) -> @str { pub fn default_configuration(sess: Session) -> ast::CrateConfig { let tos = match sess.targ_cfg.os { - abi::OsWin32 => @"win32", - abi::OsMacos => @"macos", - abi::OsLinux => @"linux", - abi::OsAndroid => @"android", - abi::OsFreebsd => @"freebsd" + abi::OsWin32 => InternedString::new("win32"), + abi::OsMacos => InternedString::new("macos"), + abi::OsLinux => InternedString::new("linux"), + abi::OsAndroid => InternedString::new("android"), + abi::OsFreebsd => InternedString::new("freebsd"), }; // ARM is bi-endian, however using NDK seems to default // to little-endian unless a flag is provided. let (end,arch,wordsz) = match sess.targ_cfg.arch { - abi::X86 => (@"little", @"x86", @"32"), - abi::X86_64 => (@"little", @"x86_64", @"64"), - abi::Arm => (@"little", @"arm", @"32"), - abi::Mips => (@"big", @"mips", @"32") + abi::X86 => ("little", "x86", "32"), + abi::X86_64 => ("little", "x86_64", "64"), + abi::Arm => ("little", "arm", "32"), + abi::Mips => ("big", "mips", "32") }; let fam = match sess.targ_cfg.os { - abi::OsWin32 => @"windows", - _ => @"unix" + abi::OsWin32 => InternedString::new("windows"), + _ => InternedString::new("unix") }; let mk = attr::mk_name_value_item_str; return ~[ // Target bindings. - attr::mk_word_item(fam), - mk(@"target_os", tos), - mk(@"target_family", fam), - mk(@"target_arch", arch), - mk(@"target_endian", end), - mk(@"target_word_size", wordsz), + attr::mk_word_item(fam.clone()), + mk(InternedString::new("target_os"), tos), + mk(InternedString::new("target_family"), fam), + mk(InternedString::new("target_arch"), InternedString::new(arch)), + mk(InternedString::new("target_endian"), InternedString::new(end)), + mk(InternedString::new("target_word_size"), + InternedString::new(wordsz)), ]; } -pub fn append_configuration(cfg: &mut ast::CrateConfig, name: @str) { +pub fn append_configuration(cfg: &mut ast::CrateConfig, + name: InternedString) { if !cfg.iter().any(|mi| mi.name() == name) { cfg.push(attr::mk_word_item(name)) } @@ -118,9 +123,15 @@ pub fn build_configuration(sess: Session) -> let default_cfg = default_configuration(sess); let mut user_cfg = sess.opts.cfg.clone(); // If the user wants a test runner, then add the test cfg - if sess.opts.test { append_configuration(&mut user_cfg, @"test") } + if sess.opts.test { + append_configuration(&mut user_cfg, InternedString::new("test")) + } // If the user requested GC, then add the GC cfg - append_configuration(&mut user_cfg, if sess.opts.gc { @"gc" } else { @"nogc" }); + append_configuration(&mut user_cfg, if sess.opts.gc { + InternedString::new("gc") + } else { + InternedString::new("nogc") + }); return vec::append(user_cfg, default_cfg); } @@ -129,7 +140,7 @@ fn parse_cfgspecs(cfgspecs: ~[~str], demitter: @diagnostic::Emitter) -> ast::CrateConfig { cfgspecs.move_iter().map(|s| { let sess = parse::new_parse_sess(Some(demitter)); - parse::parse_meta_from_source_str(@"cfgspec", s.to_managed(), ~[], sess) + parse::parse_meta_from_source_str("cfgspec".to_str(), s, ~[], sess) }).collect::() } @@ -137,8 +148,7 @@ pub enum Input { /// Load source from file FileInput(Path), /// The string is the source - // FIXME (#2319): Don't really want to box the source string - StrInput(@str) + StrInput(~str) } pub fn phase_1_parse_input(sess: Session, cfg: ast::CrateConfig, input: &Input) @@ -148,9 +158,11 @@ pub fn phase_1_parse_input(sess: Session, cfg: ast::CrateConfig, input: &Input) FileInput(ref file) => { parse::parse_crate_from_file(&(*file), cfg.clone(), sess.parse_sess) } - StrInput(src) => { - parse::parse_crate_from_source_str( - anon_src(), src, cfg.clone(), sess.parse_sess) + StrInput(ref src) => { + parse::parse_crate_from_source_str(anon_src(), + (*src).clone(), + cfg.clone(), + sess.parse_sess) } } }) @@ -474,13 +486,13 @@ fn write_out_deps(sess: Session, input: &Input, outputs: &OutputFilenames, crate // Build a list of files used to compile the output and // write Makefile-compatible dependency rules - let files: ~[@str] = { + let files: ~[~str] = { let files = sess.codemap.files.borrow(); files.get() .iter() .filter_map(|fmap| { if fmap.is_real_file() { - Some(fmap.name) + Some(fmap.name.clone()) } else { None } @@ -615,7 +627,7 @@ pub fn pretty_print_input(sess: Session, _ => @pprust::NoAnn as @pprust::PpAnn, }; - let src = sess.codemap.get_filemap(source_name(input)).src; + let src = &sess.codemap.get_filemap(source_name(input)).src; let mut rdr = MemReader::new(src.as_bytes().to_owned()); let stdout = io::stdout(); pprust::print_crate(sess.codemap, @@ -1100,17 +1112,17 @@ pub fn build_output_filenames(input: &Input, let mut stem = match *input { // FIXME (#9639): This needs to handle non-utf8 paths - FileInput(ref ifile) => (*ifile).filestem_str().unwrap().to_managed(), - StrInput(_) => @"rust_out" + FileInput(ref ifile) => { + (*ifile).filestem_str().unwrap().to_str() + } + StrInput(_) => ~"rust_out" }; // If a crateid is present, we use it as the link name let crateid = attr::find_crateid(attrs); match crateid { None => {} - Some(crateid) => { - stem = crateid.name.to_managed() - } + Some(crateid) => stem = crateid.name.to_str(), } if sess.building_library.get() { diff --git a/src/librustc/driver/session.rs b/src/librustc/driver/session.rs index 5cda81836a44b..cebc25c4845d4 100644 --- a/src/librustc/driver/session.rs +++ b/src/librustc/driver/session.rs @@ -352,9 +352,11 @@ impl Session_ { self.debugging_opt(NO_LANDING_PADS) } - // pointless function, now... - pub fn str_of(&self, id: ast::Ident) -> @str { - token::ident_to_str(&id) + // DEPRECATED. This function results in a lot of allocations when they + // are not necessary. + pub fn str_of(&self, id: ast::Ident) -> ~str { + let string = token::get_ident(id.name); + string.get().to_str() } // pointless function, now... @@ -417,7 +419,12 @@ pub fn building_library(options: &Options, crate: &ast::Crate) -> bool { } } match syntax::attr::first_attr_value_str_by_name(crate.attrs, "crate_type") { - Some(s) => "lib" == s || "rlib" == s || "dylib" == s || "staticlib" == s, + Some(s) => { + s.equiv(&("lib")) || + s.equiv(&("rlib")) || + s.equiv(&("dylib")) || + s.equiv(&("staticlib")) + } _ => false } } @@ -435,16 +442,22 @@ pub fn collect_outputs(session: &Session, } let mut base = session.opts.outputs.clone(); let mut iter = attrs.iter().filter_map(|a| { - if "crate_type" == a.name() { + if a.name().equiv(&("crate_type")) { match a.value_str() { - Some(n) if "rlib" == n => Some(OutputRlib), - Some(n) if "dylib" == n => Some(OutputDylib), - Some(n) if "lib" == n => Some(default_lib_output()), - Some(n) if "staticlib" == n => Some(OutputStaticlib), - Some(n) if "bin" == n => Some(OutputExecutable), + Some(ref n) if n.equiv(&("rlib")) => Some(OutputRlib), + Some(ref n) if n.equiv(&("dylib")) => Some(OutputDylib), + Some(ref n) if n.equiv(&("lib")) => { + Some(default_lib_output()) + } + Some(ref n) if n.equiv(&("staticlib")) => { + Some(OutputStaticlib) + } + Some(ref n) if n.equiv(&("bin")) => Some(OutputExecutable), Some(_) => { - session.add_lint(lint::UnknownCrateType, ast::CRATE_NODE_ID, - a.span, ~"invalid `crate_type` value"); + session.add_lint(lint::UnknownCrateType, + ast::CRATE_NODE_ID, + a.span, + ~"invalid `crate_type` value"); None } _ => { diff --git a/src/librustc/front/feature_gate.rs b/src/librustc/front/feature_gate.rs index bfb2759410854..de5a7b7fd889f 100644 --- a/src/librustc/front/feature_gate.rs +++ b/src/librustc/front/feature_gate.rs @@ -98,7 +98,8 @@ impl Context { impl Visitor<()> for Context { fn visit_ident(&mut self, sp: Span, id: ast::Ident, _: ()) { - let s = token::ident_to_str(&id); + let string = token::get_ident(id.name); + let s = string.get(); if !s.is_ascii() { self.gate_feature("non_ascii_idents", sp, @@ -122,7 +123,7 @@ impl Visitor<()> for Context { } ast::ViewItemExternMod(..) => { for attr in i.attrs.iter() { - if "phase" == attr.name() { + if attr.name().get() == "phase"{ self.gate_feature("phase", attr.span, "compile time crate loading is \ experimental and possibly buggy"); @@ -135,7 +136,7 @@ impl Visitor<()> for Context { fn visit_item(&mut self, i: &ast::Item, _:()) { for attr in i.attrs.iter() { - if "thread_local" == attr.name() { + if attr.name().equiv(&("thread_local")) { self.gate_feature("thread_local", i.span, "`#[thread_local]` is an experimental feature, and does not \ currently handle destructors. There is no corresponding \ @@ -258,7 +259,9 @@ pub fn check_crate(sess: Session, crate: &ast::Crate) { }; for attr in crate.attrs.iter() { - if "feature" != attr.name() { continue } + if !attr.name().equiv(&("feature")) { + continue + } match attr.meta_item_list() { None => { @@ -268,14 +271,16 @@ pub fn check_crate(sess: Session, crate: &ast::Crate) { Some(list) => { for &mi in list.iter() { let name = match mi.node { - ast::MetaWord(word) => word, + ast::MetaWord(ref word) => (*word).clone(), _ => { - sess.span_err(mi.span, "malformed feature, expected \ - just one word"); + sess.span_err(mi.span, + "malformed feature, expected just \ + one word"); continue } }; - match KNOWN_FEATURES.iter().find(|& &(n, _)| n == name) { + match KNOWN_FEATURES.iter() + .find(|& &(n, _)| name.equiv(&n)) { Some(&(name, Active)) => { cx.features.push(name); } Some(&(_, Removed)) => { sess.span_err(mi.span, "feature has been removed"); diff --git a/src/librustc/front/std_inject.rs b/src/librustc/front/std_inject.rs index 71a82536aee0c..4eb36b0f3fbd1 100644 --- a/src/librustc/front/std_inject.rs +++ b/src/librustc/front/std_inject.rs @@ -19,6 +19,8 @@ use syntax::codemap; use syntax::fold::Folder; use syntax::fold; use syntax::opt_vec; +use syntax::parse::token::InternedString; +use syntax::parse::token; use syntax::util::small_vector::SmallVector; pub static VERSION: &'static str = "0.10-pre"; @@ -56,11 +58,13 @@ struct StandardLibraryInjector { sess: Session, } -pub fn with_version(crate: &str) -> Option<(@str, ast::StrStyle)> { +pub fn with_version(crate: &str) -> Option<(InternedString, ast::StrStyle)> { match option_env!("CFG_DISABLE_INJECT_STD_VERSION") { Some("1") => None, _ => { - Some((format!("{}\\#{}", crate, VERSION).to_managed(), + Some((token::intern_and_get_ident(format!("{}\\#{}", + crate, + VERSION)), ast::CookedStr)) } } @@ -73,9 +77,12 @@ impl fold::Folder for StandardLibraryInjector { with_version("std"), ast::DUMMY_NODE_ID), attrs: ~[ - attr::mk_attr(attr::mk_list_item(@"phase", - ~[attr::mk_word_item(@"syntax"), - attr::mk_word_item(@"link")])) + attr::mk_attr(attr::mk_list_item( + InternedString::new("phase"), + ~[ + attr::mk_word_item(InternedString::new("syntax")), + attr::mk_word_item(InternedString::new("link") + )])) ], vis: ast::Inherited, span: DUMMY_SP diff --git a/src/librustc/front/test.rs b/src/librustc/front/test.rs index 0714c1c620bb1..2704e828ea463 100644 --- a/src/librustc/front/test.rs +++ b/src/librustc/front/test.rs @@ -27,6 +27,8 @@ use syntax::ext::base::ExtCtxt; use syntax::fold::Folder; use syntax::fold; use syntax::opt_vec; +use syntax::parse::token::InternedString; +use syntax::parse::token; use syntax::print::pprust; use syntax::{ast, ast_util}; use syntax::util::small_vector::SmallVector; @@ -132,7 +134,7 @@ impl<'a> fold::Folder for TestHarnessGenerator<'a> { if !cx.sess.building_library.get() { @ast::Item { attrs: item.attrs.iter().filter_map(|attr| { - if "main" != attr.name() { + if !attr.name().equiv(&("main")) { Some(*attr) } else { None @@ -169,7 +171,7 @@ fn generate_test_harness(sess: session::Session, crate: ast::Crate) cx.ext_cx.bt_push(ExpnInfo { call_site: DUMMY_SP, callee: NameAndSpan { - name: @"test", + name: ~"test", format: MacroAttribute, span: None } @@ -248,7 +250,7 @@ fn is_bench_fn(i: @ast::Item) -> bool { fn is_ignored(cx: &TestCtxt, i: @ast::Item) -> bool { i.attrs.iter().any(|attr| { // check ignore(cfg(foo, bar)) - "ignore" == attr.name() && match attr.meta_item_list() { + attr.name().equiv(&("ignore")) && match attr.meta_item_list() { Some(ref cfgs) => attr::test_cfg(cx.config, cfgs.iter().map(|x| *x)), None => true } @@ -330,8 +332,9 @@ fn mk_test_module(cx: &TestCtxt) -> @ast::Item { let item_ = ast::ItemMod(testmod); // This attribute tells resolve to let us call unexported functions + let resolve_unexported_str = InternedString::new("!resolve_unexported"); let resolve_unexported_attr = - attr::mk_attr(attr::mk_word_item(@"!resolve_unexported")); + attr::mk_attr(attr::mk_word_item(resolve_unexported_str)); let item = ast::Item { ident: cx.sess.ident_of("__test"), @@ -424,7 +427,8 @@ fn mk_test_desc_and_fn_rec(cx: &TestCtxt, test: &Test) -> @ast::Expr { debug!("encoding {}", ast_util::path_name_i(path)); let name_lit: ast::Lit = - nospan(ast::LitStr(ast_util::path_name_i(path).to_managed(), ast::CookedStr)); + nospan(ast::LitStr(token::intern_and_get_ident( + ast_util::path_name_i(path)), ast::CookedStr)); let name_expr = @ast::Expr { id: ast::DUMMY_NODE_ID, diff --git a/src/librustc/lib.rs b/src/librustc/lib.rs index c840faecb559b..7ba96516bf91b 100644 --- a/src/librustc/lib.rs +++ b/src/librustc/lib.rs @@ -235,9 +235,10 @@ pub fn run_compiler(args: &[~str], demitter: @diagnostic::Emitter) { 0u => d::early_error(demitter, "no input filename given"), 1u => { let ifile = matches.free[0].as_slice(); - if "-" == ifile { - let src = str::from_utf8_owned(io::stdin().read_to_end()).unwrap(); - (d::StrInput(src.to_managed()), None) + if ifile == "-" { + let src = + str::from_utf8_owned(io::stdin().read_to_end()).unwrap(); + (d::StrInput(src), None) } else { (d::FileInput(Path::new(ifile)), Some(Path::new(ifile))) } @@ -319,9 +320,11 @@ fn parse_crate_attrs(sess: session::Session, d::FileInput(ref ifile) => { parse::parse_crate_attrs_from_file(ifile, ~[], sess.parse_sess) } - d::StrInput(src) => { - parse::parse_crate_attrs_from_source_str( - d::anon_src(), src, ~[], sess.parse_sess) + d::StrInput(ref src) => { + parse::parse_crate_attrs_from_source_str(d::anon_src(), + (*src).clone(), + ~[], + sess.parse_sess) } } } diff --git a/src/librustc/metadata/common.rs b/src/librustc/metadata/common.rs index e8cfa97c0e1c8..e9732f5c9960d 100644 --- a/src/librustc/metadata/common.rs +++ b/src/librustc/metadata/common.rs @@ -210,5 +210,5 @@ pub static tag_macro_def: uint = 0x112; #[deriving(Clone)] pub struct LinkMeta { crateid: CrateId, - crate_hash: @str, + crate_hash: ~str, } diff --git a/src/librustc/metadata/creader.rs b/src/librustc/metadata/creader.rs index fa2e94b6f8f20..9c2c5a5745848 100644 --- a/src/librustc/metadata/creader.rs +++ b/src/librustc/metadata/creader.rs @@ -27,8 +27,8 @@ use syntax::attr::AttrMetaMethods; use syntax::codemap::{Span, DUMMY_SP}; use syntax::diagnostic::SpanHandler; use syntax::ext::base::{CrateLoader, MacroCrate}; +use syntax::parse::token::{IdentInterner, InternedString}; use syntax::parse::token; -use syntax::parse::token::IdentInterner; use syntax::crateid::CrateId; use syntax::visit; @@ -76,7 +76,7 @@ impl<'a> visit::Visitor<()> for ReadCrateVisitor<'a> { struct cache_entry { cnum: ast::CrateNum, span: Span, - hash: @str, + hash: ~str, crateid: CrateId, } @@ -124,19 +124,17 @@ struct Env { fn visit_crate(e: &Env, c: &ast::Crate) { let cstore = e.sess.cstore; - for a in c.attrs.iter().filter(|m| "link_args" == m.name()) { + for a in c.attrs.iter().filter(|m| m.name().equiv(&("link_args"))) { match a.value_str() { - Some(ref linkarg) => { - cstore.add_used_link_args(*linkarg); - } - None => {/* fallthrough */ } + Some(ref linkarg) => cstore.add_used_link_args(linkarg.get()), + None => { /* fallthrough */ } } } } fn visit_view_item(e: &mut Env, i: &ast::ViewItem) { let should_load = i.attrs.iter().all(|attr| { - "phase" != attr.name() || + attr.name().get() != "phase" || attr.meta_item_list().map_or(false, |phases| { attr::contains_name(phases, "link") }) @@ -148,8 +146,12 @@ fn visit_view_item(e: &mut Env, i: &ast::ViewItem) { match extract_crate_info(i) { Some(info) => { - let cnum = resolve_crate(e, info.ident, info.name, info.version, - @"", i.span); + let cnum = resolve_crate(e, + info.ident.clone(), + info.name.clone(), + info.version.clone(), + ~"", + i.span); e.sess.cstore.add_extern_mod_stmt_cnum(info.id, cnum); } None => () @@ -157,36 +159,36 @@ fn visit_view_item(e: &mut Env, i: &ast::ViewItem) { } struct CrateInfo { - ident: @str, - name: @str, - version: @str, + ident: ~str, + name: ~str, + version: ~str, id: ast::NodeId, } fn extract_crate_info(i: &ast::ViewItem) -> Option { match i.node { - ast::ViewItemExternMod(ident, path_opt, id) => { - let ident = token::ident_to_str(&ident); + ast::ViewItemExternMod(ref ident, ref path_opt, id) => { + let ident = token::get_ident(ident.name); debug!("resolving extern mod stmt. ident: {:?} path_opt: {:?}", - ident, path_opt); - let (name, version) = match path_opt { - Some((path_str, _)) => { - let crateid: Option = from_str(path_str); + ident.get(), path_opt); + let (name, version) = match *path_opt { + Some((ref path_str, _)) => { + let crateid: Option = from_str(path_str.get()); match crateid { - None => (@"", @""), + None => (~"", ~""), Some(crateid) => { let version = match crateid.version { - None => @"", - Some(ref ver) => ver.to_managed(), + None => ~"", + Some(ref ver) => ver.to_str(), }; - (crateid.name.to_managed(), version) + (crateid.name.to_str(), version) } } } - None => (ident, @""), + None => (ident.get().to_str(), ~""), }; Some(CrateInfo { - ident: ident, + ident: ident.get().to_str(), name: name, version: version, id: id, @@ -206,13 +208,15 @@ fn visit_item(e: &Env, i: &ast::Item) { // First, add all of the custom link_args attributes let cstore = e.sess.cstore; let link_args = i.attrs.iter() - .filter_map(|at| if "link_args" == at.name() {Some(at)} else {None}) + .filter_map(|at| if at.name().equiv(&("link_args")) { + Some(at) + } else { + None + }) .to_owned_vec(); for m in link_args.iter() { match m.value_str() { - Some(linkarg) => { - cstore.add_used_link_args(linkarg); - } + Some(linkarg) => cstore.add_used_link_args(linkarg.get()), None => { /* fallthrough */ } } } @@ -220,22 +224,26 @@ fn visit_item(e: &Env, i: &ast::Item) { // Next, process all of the #[link(..)]-style arguments let cstore = e.sess.cstore; let link_args = i.attrs.iter() - .filter_map(|at| if "link" == at.name() {Some(at)} else {None}) + .filter_map(|at| if at.name().equiv(&("link")) { + Some(at) + } else { + None + }) .to_owned_vec(); for m in link_args.iter() { match m.meta_item_list() { Some(items) => { let kind = items.iter().find(|k| { - "kind" == k.name() + k.name().equiv(&("kind")) }).and_then(|a| a.value_str()); let kind = match kind { Some(k) => { - if "static" == k { + if k.equiv(&("static")) { cstore::NativeStatic } else if e.sess.targ_cfg.os == abi::OsMacos && - "framework" == k { + k.equiv(&("framework")) { cstore::NativeFramework - } else if "framework" == k { + } else if k.equiv(&("framework")) { e.sess.span_err(m.span, "native frameworks are only available \ on OSX targets"); @@ -249,7 +257,7 @@ fn visit_item(e: &Env, i: &ast::Item) { None => cstore::NativeUnknown }; let n = items.iter().find(|n| { - "name" == n.name() + n.name().equiv(&("name")) }).and_then(|a| a.value_str()); let n = match n { Some(n) => n, @@ -257,13 +265,13 @@ fn visit_item(e: &Env, i: &ast::Item) { e.sess.span_err(m.span, "#[link(...)] specified without \ `name = \"foo\"`"); - @"foo" + InternedString::new("foo") } }; - if n.is_empty() { + if n.get().is_empty() { e.sess.span_err(m.span, "#[link(name = \"\")] given with empty name"); } else { - cstore.add_used_library(n.to_owned(), kind); + cstore.add_used_library(n.get().to_owned(), kind); } } None => {} @@ -274,14 +282,14 @@ fn visit_item(e: &Env, i: &ast::Item) { } } -fn existing_match(e: &Env, name: @str, version: @str, hash: &str) -> Option { +fn existing_match(e: &Env, name: ~str, version: ~str, hash: &str) -> Option { let crate_cache = e.crate_cache.borrow(); for c in crate_cache.get().iter() { let crateid_version = match c.crateid.version { - None => @"0.0", - Some(ref ver) => ver.to_managed(), + None => ~"0.0", + Some(ref ver) => ver.to_str(), }; - if (name.is_empty() || c.crateid.name.to_managed() == name) && + if (name.is_empty() || c.crateid.name == name) && (version.is_empty() || crateid_version == version) && (hash.is_empty() || c.hash.as_slice() == hash) { return Some(c.cnum); @@ -291,19 +299,19 @@ fn existing_match(e: &Env, name: @str, version: @str, hash: &str) -> Option ast::CrateNum { - match existing_match(e, name, version, hash) { + match existing_match(e, name.clone(), version.clone(), hash.clone()) { None => { let load_ctxt = loader::Context { sess: e.sess, span: span, ident: ident, - name: name, + name: name.clone(), version: version, hash: hash, os: e.os, @@ -364,10 +372,13 @@ fn resolve_crate_deps(e: &mut Env, cdata: &[u8]) -> cstore::cnum_map { let r = decoder::get_crate_deps(cdata); for dep in r.iter() { let extrn_cnum = dep.cnum; - let cname_str = token::ident_to_str(&dep.name); + let cname_str = token::get_ident(dep.name.name); debug!("resolving dep crate {} ver: {} hash: {}", cname_str, dep.vers, dep.hash); - match existing_match(e, cname_str, dep.vers, dep.hash) { + match existing_match(e, + cname_str.get().to_str(), + dep.vers.clone(), + dep.hash.clone()) { Some(local_cnum) => { debug!("already have it"); // We've already seen this crate @@ -379,8 +390,12 @@ fn resolve_crate_deps(e: &mut Env, cdata: &[u8]) -> cstore::cnum_map { // FIXME (#2404): Need better error reporting than just a bogus // span. let fake_span = DUMMY_SP; - let local_cnum = resolve_crate(e, cname_str, cname_str, dep.vers, - dep.hash, fake_span); + let local_cnum = resolve_crate(e, + cname_str.get().to_str(), + cname_str.get().to_str(), + dep.vers.clone(), + dep.hash.clone(), + fake_span); cnum_map.insert(extrn_cnum, local_cnum); } } @@ -411,8 +426,12 @@ impl Loader { impl CrateLoader for Loader { fn load_crate(&mut self, crate: &ast::ViewItem) -> MacroCrate { let info = extract_crate_info(crate).unwrap(); - let cnum = resolve_crate(&mut self.env, info.ident, info.name, - info.version, @"", crate.span); + let cnum = resolve_crate(&mut self.env, + info.ident.clone(), + info.name.clone(), + info.version.clone(), + ~"", + crate.span); let library = self.env.sess.cstore.get_used_crate_source(cnum).unwrap(); MacroCrate { lib: library.dylib, diff --git a/src/librustc/metadata/cstore.rs b/src/librustc/metadata/cstore.rs index 024e214a2fe71..45eccc94ed800 100644 --- a/src/librustc/metadata/cstore.rs +++ b/src/librustc/metadata/cstore.rs @@ -32,7 +32,7 @@ pub enum MetadataBlob { } pub struct crate_metadata { - name: @str, + name: ~str, data: MetadataBlob, cnum_map: cnum_map, cnum: ast::CrateNum @@ -89,12 +89,12 @@ impl CStore { *metas.get().get(&cnum) } - pub fn get_crate_hash(&self, cnum: ast::CrateNum) -> @str { + pub fn get_crate_hash(&self, cnum: ast::CrateNum) -> ~str { let cdata = self.get_crate_data(cnum); decoder::get_crate_hash(cdata.data()) } - pub fn get_crate_vers(&self, cnum: ast::CrateNum) -> @str { + pub fn get_crate_vers(&self, cnum: ast::CrateNum) -> ~str { let cdata = self.get_crate_data(cnum); decoder::get_crate_vers(cdata.data()) } @@ -192,7 +192,7 @@ impl CStore { // returns hashes of crates directly used by this crate. Hashes are sorted by // (crate name, crate version, crate hash) in lexicographic order (not semver) - pub fn get_dep_hashes(&self) -> ~[@str] { + pub fn get_dep_hashes(&self) -> ~[~str] { let mut result = ~[]; let extern_mod_crate_map = self.extern_mod_crate_map.borrow(); @@ -202,7 +202,7 @@ impl CStore { let vers = decoder::get_crate_vers(cdata.data()); debug!("Add hash[{}]: {} {}", cdata.name, vers, hash); result.push(crate_hash { - name: cdata.name, + name: cdata.name.clone(), vers: vers, hash: hash }); @@ -215,15 +215,23 @@ impl CStore { debug!(" hash[{}]: {}", x.name, x.hash); } - result.map(|ch| ch.hash) + let mut hashes = ~[]; + for ch in result.move_iter() { + let crate_hash { + hash, + .. + } = ch; + hashes.push(hash) + } + hashes } } #[deriving(Clone, TotalEq, TotalOrd)] struct crate_hash { - name: @str, - vers: @str, - hash: @str, + name: ~str, + vers: ~str, + hash: ~str, } impl crate_metadata { diff --git a/src/librustc/metadata/decoder.rs b/src/librustc/metadata/decoder.rs index 11fab9cced7f7..7e866b326319b 100644 --- a/src/librustc/metadata/decoder.rs +++ b/src/librustc/metadata/decoder.rs @@ -1042,15 +1042,15 @@ fn get_meta_items(md: ebml::Doc) -> ~[@ast::MetaItem] { let mut items: ~[@ast::MetaItem] = ~[]; reader::tagged_docs(md, tag_meta_item_word, |meta_item_doc| { let nd = reader::get_doc(meta_item_doc, tag_meta_item_name); - let n = nd.as_str_slice().to_managed(); + let n = token::intern_and_get_ident(nd.as_str_slice()); items.push(attr::mk_word_item(n)); true }); reader::tagged_docs(md, tag_meta_item_name_value, |meta_item_doc| { let nd = reader::get_doc(meta_item_doc, tag_meta_item_name); let vd = reader::get_doc(meta_item_doc, tag_meta_item_value); - let n = nd.as_str_slice().to_managed(); - let v = vd.as_str_slice().to_managed(); + let n = token::intern_and_get_ident(nd.as_str_slice()); + let v = token::intern_and_get_ident(vd.as_str_slice()); // FIXME (#623): Should be able to decode MetaNameValue variants, // but currently the encoder just drops them items.push(attr::mk_name_value_item_str(n, v)); @@ -1058,7 +1058,7 @@ fn get_meta_items(md: ebml::Doc) -> ~[@ast::MetaItem] { }); reader::tagged_docs(md, tag_meta_item_list, |meta_item_doc| { let nd = reader::get_doc(meta_item_doc, tag_meta_item_name); - let n = nd.as_str_slice().to_managed(); + let n = token::intern_and_get_ident(nd.as_str_slice()); let subitems = get_meta_items(meta_item_doc); items.push(attr::mk_list_item(n, subitems)); true @@ -1113,8 +1113,8 @@ pub fn get_crate_attributes(data: &[u8]) -> ~[ast::Attribute] { pub struct CrateDep { cnum: ast::CrateNum, name: ast::Ident, - vers: @str, - hash: @str + vers: ~str, + hash: ~str } pub fn get_crate_deps(data: &[u8]) -> ~[CrateDep] { @@ -1122,9 +1122,9 @@ pub fn get_crate_deps(data: &[u8]) -> ~[CrateDep] { let cratedoc = reader::Doc(data); let depsdoc = reader::get_doc(cratedoc, tag_crate_deps); let mut crate_num = 1; - fn docstr(doc: ebml::Doc, tag_: uint) -> @str { + fn docstr(doc: ebml::Doc, tag_: uint) -> ~str { let d = reader::get_doc(doc, tag_); - d.as_str_slice().to_managed() + d.as_str_slice().to_str() } reader::tagged_docs(depsdoc, tag_crate_dep, |depdoc| { deps.push(CrateDep {cnum: crate_num, @@ -1142,24 +1142,29 @@ fn list_crate_deps(data: &[u8], out: &mut io::Writer) { let r = get_crate_deps(data); for dep in r.iter() { - write!(out, "{} {}-{}-{}\n", - dep.cnum, token::ident_to_str(&dep.name), dep.hash, dep.vers); + let string = token::get_ident(dep.name.name); + write!(out, + "{} {}-{}-{}\n", + dep.cnum, + string.get(), + dep.hash, + dep.vers); } write!(out, "\n"); } -pub fn get_crate_hash(data: &[u8]) -> @str { +pub fn get_crate_hash(data: &[u8]) -> ~str { let cratedoc = reader::Doc(data); let hashdoc = reader::get_doc(cratedoc, tag_crate_hash); - hashdoc.as_str_slice().to_managed() + hashdoc.as_str_slice().to_str() } -pub fn get_crate_vers(data: &[u8]) -> @str { +pub fn get_crate_vers(data: &[u8]) -> ~str { let attrs = decoder::get_crate_attributes(data); match attr::find_crateid(attrs) { - None => @"0.0", - Some(crateid) => crateid.version_or_default().to_managed(), + None => ~"0.0", + Some(crateid) => crateid.version_or_default().to_str(), } } diff --git a/src/librustc/metadata/encoder.rs b/src/librustc/metadata/encoder.rs index ac3ee78fb8642..c7ba3ab01e965 100644 --- a/src/librustc/metadata/encoder.rs +++ b/src/librustc/metadata/encoder.rs @@ -21,29 +21,28 @@ use middle::ty; use middle::typeck; use middle; +use extra::serialize::Encodable; use std::cast; use std::cell::{Cell, RefCell}; use std::hashmap::{HashMap, HashSet}; use std::io::MemWriter; use std::str; use std::vec; - -use extra::serialize::Encodable; - use syntax::abi::AbiSet; use syntax::ast::*; use syntax::ast; use syntax::ast_map; use syntax::ast_util::*; -use syntax::attr; +use syntax::ast_util; use syntax::attr::AttrMetaMethods; +use syntax::attr; use syntax::codemap; use syntax::diagnostic::SpanHandler; +use syntax::parse::token::InternedString; use syntax::parse::token::special_idents; -use syntax::ast_util; +use syntax::parse::token; use syntax::visit::Visitor; use syntax::visit; -use syntax::parse::token; use syntax; use writer = extra::ebml::writer; @@ -491,7 +490,7 @@ fn encode_reexported_static_methods(ecx: &EncodeContext, exp: &middle::resolve::Export2) { match ecx.tcx.items.find(exp.def_id.node) { Some(ast_map::NodeItem(item, path)) => { - let original_name = ecx.tcx.sess.str_of(item.ident); + let original_name = token::get_ident(item.ident.name); // // We don't need to reexport static methods on items @@ -503,7 +502,7 @@ fn encode_reexported_static_methods(ecx: &EncodeContext, // encoded metadata for static methods relative to Bar, // but not yet for Foo. // - if mod_path != *path || exp.name != original_name { + if mod_path != *path || original_name.get() != exp.name { if !encode_reexported_static_base_methods(ecx, ebml_w, exp) { if encode_reexported_static_trait_methods(ecx, ebml_w, exp) { debug!("(encode reexported static methods) {} \ @@ -1351,11 +1350,10 @@ fn my_visit_foreign_item(ni: &ForeignItem, index: @RefCell<~[entry]>) { match items.get(ni.id) { ast_map::NodeForeignItem(_, abi, _, pt) => { + let string = token::get_ident(ni.ident.name); debug!("writing foreign item {}::{}", - ast_map::path_to_str( - *pt, - token::get_ident_interner()), - token::ident_to_str(&ni.ident)); + ast_map::path_to_str(*pt, token::get_ident_interner()), + string.get()); let mut ebml_w = unsafe { ebml_w.unsafe_clone() @@ -1507,32 +1505,32 @@ fn write_i64(writer: &mut MemWriter, &n: &i64) { fn encode_meta_item(ebml_w: &mut writer::Encoder, mi: @MetaItem) { match mi.node { - MetaWord(name) => { + MetaWord(ref name) => { ebml_w.start_tag(tag_meta_item_word); ebml_w.start_tag(tag_meta_item_name); - ebml_w.writer.write(name.as_bytes()); + ebml_w.writer.write(name.get().as_bytes()); ebml_w.end_tag(); ebml_w.end_tag(); } - MetaNameValue(name, value) => { + MetaNameValue(ref name, ref value) => { match value.node { - LitStr(value, _) => { + LitStr(ref value, _) => { ebml_w.start_tag(tag_meta_item_name_value); ebml_w.start_tag(tag_meta_item_name); - ebml_w.writer.write(name.as_bytes()); + ebml_w.writer.write(name.get().as_bytes()); ebml_w.end_tag(); ebml_w.start_tag(tag_meta_item_value); - ebml_w.writer.write(value.as_bytes()); + ebml_w.writer.write(value.get().as_bytes()); ebml_w.end_tag(); ebml_w.end_tag(); } _ => {/* FIXME (#623): encode other variants */ } } } - MetaList(name, ref items) => { + MetaList(ref name, ref items) => { ebml_w.start_tag(tag_meta_item_list); ebml_w.start_tag(tag_meta_item_name); - ebml_w.writer.write(name.as_bytes()); + ebml_w.writer.write(name.get().as_bytes()); ebml_w.end_tag(); for inner_item in items.iter() { encode_meta_item(ebml_w, *inner_item); @@ -1563,13 +1561,13 @@ fn synthesize_crate_attrs(ecx: &EncodeContext, attr::mk_attr( attr::mk_name_value_item_str( - @"crate_id", - ecx.link_meta.crateid.to_str().to_managed())) + InternedString::new("crate_id"), + token::intern_and_get_ident(ecx.link_meta.crateid.to_str()))) } let mut attrs = ~[]; for attr in crate.attrs.iter() { - if "crate_id" != attr.name() { + if !attr.name().equiv(&("crate_id")) { attrs.push(*attr); } } @@ -1615,7 +1613,7 @@ fn encode_crate_deps(ecx: &EncodeContext, ebml_w.start_tag(tag_crate_deps); let r = get_ordered_deps(ecx, cstore); for dep in r.iter() { - encode_crate_dep(ecx, ebml_w, *dep); + encode_crate_dep(ecx, ebml_w, (*dep).clone()); } ebml_w.end_tag(); } diff --git a/src/librustc/metadata/loader.rs b/src/librustc/metadata/loader.rs index 046184bef58bb..8e557560b95f1 100644 --- a/src/librustc/metadata/loader.rs +++ b/src/librustc/metadata/loader.rs @@ -46,10 +46,10 @@ pub enum Os { pub struct Context { sess: Session, span: Span, - ident: @str, - name: @str, - version: @str, - hash: @str, + ident: ~str, + name: ~str, + version: ~str, + hash: ~str, os: Os, intr: @IdentInterner } @@ -80,7 +80,7 @@ impl Context { fn find_library_crate(&self) -> Option { let filesearch = self.sess.filesearch; - let crate_name = self.name; + let crate_name = self.name.clone(); let (dyprefix, dysuffix) = self.dylibname(); // want: crate_name.dir_part() + prefix + crate_name.file_part + "-" @@ -109,8 +109,10 @@ impl Context { } else if candidate { match get_metadata_section(self.os, path) { Some(cvec) => - if crate_matches(cvec.as_slice(), self.name, - self.version, self.hash) { + if crate_matches(cvec.as_slice(), + self.name.clone(), + self.version.clone(), + self.hash.clone()) { debug!("found {} with matching crate_id", path.display()); let (rlib, dylib) = if file.ends_with(".rlib") { @@ -235,9 +237,9 @@ pub fn note_crateid_attr(diag: @SpanHandler, crateid: &CrateId) { } fn crate_matches(crate_data: &[u8], - name: @str, - version: @str, - hash: @str) -> bool { + name: ~str, + version: ~str, + hash: ~str) -> bool { let attrs = decoder::get_crate_attributes(crate_data); match attr::find_crateid(attrs) { None => false, @@ -246,8 +248,9 @@ fn crate_matches(crate_data: &[u8], let chash = decoder::get_crate_hash(crate_data); if chash != hash { return false; } } - name == crateid.name.to_managed() && - (version.is_empty() || version == crateid.version_or_default().to_managed()) + name == crateid.name && + (version.is_empty() || + crateid.version_or_default() == version) } } } diff --git a/src/librustc/metadata/tyencode.rs b/src/librustc/metadata/tyencode.rs index 9da19d666673f..b7977f9d64329 100644 --- a/src/librustc/metadata/tyencode.rs +++ b/src/librustc/metadata/tyencode.rs @@ -45,7 +45,7 @@ pub struct ctxt { pub struct ty_abbrev { pos: uint, len: uint, - s: @str + s: ~str } pub enum abbrev_ctxt { @@ -65,19 +65,21 @@ pub fn enc_ty(w: &mut MemWriter, cx: @ctxt, t: ty::t) { let short_names_cache = cx.tcx.short_names_cache.borrow(); result_str_opt = short_names_cache.get() .find(&t) - .map(|result| *result); + .map(|result| { + (*result).clone() + }); } let result_str = match result_str_opt { Some(s) => s, None => { let wr = &mut MemWriter::new(); enc_sty(wr, cx, &ty::get(t).sty); - let s = str::from_utf8(wr.get_ref()).unwrap().to_managed(); + let s = str::from_utf8(wr.get_ref()).unwrap(); let mut short_names_cache = cx.tcx .short_names_cache .borrow_mut(); - short_names_cache.get().insert(t, s); - s + short_names_cache.get().insert(t, s.to_str()); + s.to_str() } }; w.write(result_str.as_bytes()); @@ -103,7 +105,7 @@ pub fn enc_ty(w: &mut MemWriter, cx: @ctxt, t: ty::t) { let abbrev_len = 3 + estimate_sz(pos) + estimate_sz(len); if abbrev_len < len { // I.e. it's actually an abbreviation. - let s = format!("\\#{:x}:{:x}\\#", pos, len).to_managed(); + let s = format!("\\#{:x}:{:x}\\#", pos, len); let a = ty_abbrev { pos: pos as uint, len: len as uint, s: s }; diff --git a/src/librustc/middle/borrowck/mod.rs b/src/librustc/middle/borrowck/mod.rs index 90c9a61b18b17..68e205ebb6ec6 100644 --- a/src/librustc/middle/borrowck/mod.rs +++ b/src/librustc/middle/borrowck/mod.rs @@ -774,7 +774,8 @@ impl BorrowckCtxt { match pat.node { ast::PatIdent(_, ref path, _) => { let ident = ast_util::path_to_ident(path); - out.push_str(token::ident_to_str(&ident)); + let string = token::get_ident(ident.name); + out.push_str(string.get()); } _ => { self.tcx.sess.bug( @@ -795,8 +796,9 @@ impl BorrowckCtxt { self.append_loan_path_to_str_from_interior(lp_base, out); match fname { mc::NamedField(ref fname) => { + let string = token::get_ident(*fname); out.push_char('.'); - out.push_str(token::interner_get(*fname)); + out.push_str(string.get()); } mc::PositionalField(idx) => { out.push_char('#'); // invent a notation here diff --git a/src/librustc/middle/check_match.rs b/src/librustc/middle/check_match.rs index 38376de4346df..d402305401eb9 100644 --- a/src/librustc/middle/check_match.rs +++ b/src/librustc/middle/check_match.rs @@ -176,8 +176,8 @@ fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, pats: ~[@Pat]) { match ty::get(ty).sty { ty::ty_bool => { match *ctor { - val(const_bool(true)) => Some(@"true"), - val(const_bool(false)) => Some(@"false"), + val(const_bool(true)) => Some(~"true"), + val(const_bool(false)) => Some(~"false"), _ => None } } @@ -197,7 +197,7 @@ fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, pats: ~[@Pat]) { } ty::ty_unboxed_vec(..) | ty::ty_vec(..) => { match *ctor { - vec(n) => Some(format!("vectors of length {}", n).to_managed()), + vec(n) => Some(format!("vectors of length {}", n)), _ => None } } @@ -214,9 +214,14 @@ fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, pats: ~[@Pat]) { type matrix = ~[~[@Pat]]; -enum useful { useful(ty::t, ctor), useful_, not_useful } +#[deriving(Clone)] +enum useful { + useful(ty::t, ctor), + useful_, + not_useful, +} -#[deriving(Eq)] +#[deriving(Clone, Eq)] enum ctor { single, variant(DefId), @@ -261,7 +266,7 @@ fn is_useful(cx: &MatchCheckCtxt, m: &matrix, v: &[@Pat]) -> useful { val(const_bool(false)), 0u, left_ty) } - ref u => *u, + ref u => (*u).clone(), } } ty::ty_enum(eid, _) => { @@ -269,7 +274,7 @@ fn is_useful(cx: &MatchCheckCtxt, m: &matrix, v: &[@Pat]) -> useful { match is_useful_specialized(cx, m, v, variant(va.id), va.args.len(), left_ty) { not_useful => (), - ref u => return *u, + ref u => return (*u).clone(), } } not_useful @@ -289,7 +294,7 @@ fn is_useful(cx: &MatchCheckCtxt, m: &matrix, v: &[@Pat]) -> useful { for n in iter::range(0u, max_len + 1) { match is_useful_specialized(cx, m, v, vec(n), n, left_ty) { not_useful => (), - ref u => return *u, + ref u => return (*u).clone(), } } not_useful @@ -304,15 +309,15 @@ fn is_useful(cx: &MatchCheckCtxt, m: &matrix, v: &[@Pat]) -> useful { match is_useful(cx, &m.iter().filter_map(|r| default(cx, *r)).collect::(), v.tail()) { - useful_ => useful(left_ty, *ctor), - ref u => *u, + useful_ => useful(left_ty, (*ctor).clone()), + ref u => (*u).clone(), } } } } Some(ref v0_ctor) => { let arity = ctor_arity(cx, v0_ctor, left_ty); - is_useful_specialized(cx, m, v, *v0_ctor, arity, left_ty) + is_useful_specialized(cx, m, v, (*v0_ctor).clone(), arity, left_ty) } } } @@ -329,7 +334,7 @@ fn is_useful_specialized(cx: &MatchCheckCtxt, cx, &ms, specialize(cx, v, &ctor, arity, lty).unwrap()); match could_be_useful { useful_ => useful(lty, ctor), - ref u => *u, + ref u => (*u).clone(), } } @@ -407,7 +412,7 @@ fn missing_ctor(cx: &MatchCheckCtxt, let r = pat_ctor_id(cx, r[0]); for id in r.iter() { if !found.contains(id) { - found.push(*id); + found.push((*id).clone()); } } } @@ -770,8 +775,8 @@ fn specialize(cx: &MatchCheckCtxt, } PatRange(lo, hi) => { let (c_lo, c_hi) = match *ctor_id { - val(ref v) => (*v, *v), - range(ref lo, ref hi) => (*lo, *hi), + val(ref v) => ((*v).clone(), (*v).clone()), + range(ref lo, ref hi) => ((*lo).clone(), (*hi).clone()), single => return Some(r.tail().to_owned()), _ => fail!("type error") }; diff --git a/src/librustc/middle/const_eval.rs b/src/librustc/middle/const_eval.rs index 5905712855536..51410068378a4 100644 --- a/src/librustc/middle/const_eval.rs +++ b/src/librustc/middle/const_eval.rs @@ -16,10 +16,11 @@ use middle::ty; use middle::typeck::astconv; use middle; -use syntax::{ast, ast_map, ast_util}; -use syntax::visit; -use syntax::visit::Visitor; use syntax::ast::*; +use syntax::parse::token::InternedString; +use syntax::visit::Visitor; +use syntax::visit; +use syntax::{ast, ast_map, ast_util}; use std::cell::RefCell; use std::hashmap::HashMap; @@ -319,7 +320,7 @@ pub enum const_val { const_float(f64), const_int(i64), const_uint(u64), - const_str(@str), + const_str(InternedString), const_binary(@[u8]), const_bool(bool) } @@ -508,15 +509,15 @@ pub fn eval_const_expr_partial(tcx: &T, e: &Expr) pub fn lit_to_const(lit: &Lit) -> const_val { match lit.node { - LitStr(s, _) => const_str(s), + LitStr(ref s, _) => const_str((*s).clone()), LitBinary(data) => const_binary(data), LitChar(n) => const_uint(n as u64), LitInt(n, _) => const_int(n), LitUint(n, _) => const_uint(n), LitIntUnsuffixed(n) => const_int(n), - LitFloat(n, _) => const_float(from_str::(n).unwrap() as f64), - LitFloatUnsuffixed(n) => - const_float(from_str::(n).unwrap() as f64), + LitFloat(ref n, _) | LitFloatUnsuffixed(ref n) => { + const_float(from_str::(n.get()).unwrap() as f64) + } LitNil => const_int(0i64), LitBool(b) => const_bool(b) } @@ -530,7 +531,7 @@ pub fn compare_const_vals(a: &const_val, b: &const_val) -> Option { (&const_int(a), &const_int(b)) => compare_vals(a, b), (&const_uint(a), &const_uint(b)) => compare_vals(a, b), (&const_float(a), &const_float(b)) => compare_vals(a, b), - (&const_str(a), &const_str(b)) => compare_vals(a, b), + (&const_str(ref a), &const_str(ref b)) => compare_vals(a, b), (&const_bool(a), &const_bool(b)) => compare_vals(a, b), _ => None } diff --git a/src/librustc/middle/dead.rs b/src/librustc/middle/dead.rs index c333bc58feee1..08ab8edf750ca 100644 --- a/src/librustc/middle/dead.rs +++ b/src/librustc/middle/dead.rs @@ -360,9 +360,10 @@ impl DeadVisitor { fn warn_dead_code(&mut self, id: ast::NodeId, span: codemap::Span, ident: &ast::Ident) { + let string = token::get_ident(ident.name); self.tcx.sess.add_lint(DeadCode, id, span, format!("code is never used: `{}`", - token::ident_to_str(ident))); + string.get())); } } diff --git a/src/librustc/middle/lang_items.rs b/src/librustc/middle/lang_items.rs index d11cd4b3f3819..190835f7fb9e9 100644 --- a/src/librustc/middle/lang_items.rs +++ b/src/librustc/middle/lang_items.rs @@ -26,8 +26,9 @@ use middle::ty::{BuiltinBound, BoundFreeze, BoundPod, BoundSend, BoundSized}; use syntax::ast; use syntax::ast_util::local_def; use syntax::attr::AttrMetaMethods; -use syntax::visit; +use syntax::parse::token::InternedString; use syntax::visit::Visitor; +use syntax::visit; use std::hashmap::HashMap; use std::iter::Enumerate; @@ -194,11 +195,11 @@ impl LanguageItemCollector { } } -pub fn extract(attrs: &[ast::Attribute]) -> Option<@str> { +pub fn extract(attrs: &[ast::Attribute]) -> Option { for attribute in attrs.iter() { match attribute.name_str_pair() { - Some((key, value)) if "lang" == key => { - return Some(value); + Some((ref key, ref value)) if key.equiv(&("lang")) => { + return Some((*value).clone()); } Some(..) | None => {} } diff --git a/src/librustc/middle/lint.rs b/src/librustc/middle/lint.rs index 89d5ca740120e..f801d6343b2f3 100644 --- a/src/librustc/middle/lint.rs +++ b/src/librustc/middle/lint.rs @@ -34,18 +34,17 @@ //! Context itself, span_lint should be used instead of add_lint. use driver::session; +use metadata::csearch; use middle::dead::DEAD_CODE_LINT_STR; +use middle::pat_util; use middle::privacy; use middle::trans::adt; // for `adt::is_ffi_safe` use middle::ty; +use middle::typeck::astconv::{ast_ty_to_ty, AstConv}; +use middle::typeck::infer; use middle::typeck; -use middle::pat_util; -use metadata::csearch; -use util::ppaux::{ty_to_str}; use std::to_str::ToStr; - -use middle::typeck::infer; -use middle::typeck::astconv::{ast_ty_to_ty, AstConv}; +use util::ppaux::{ty_to_str}; use std::cmp; use std::hashmap::HashMap; @@ -59,13 +58,14 @@ use std::u64; use std::u8; use extra::smallintmap::SmallIntMap; use syntax::ast_map; -use syntax::attr; +use syntax::ast_util::IdVisitingOperation; use syntax::attr::{AttrMetaMethods, AttributeMethods}; +use syntax::attr; use syntax::codemap::Span; +use syntax::parse::token::InternedString; use syntax::parse::token; -use syntax::{ast, ast_util, visit}; -use syntax::ast_util::IdVisitingOperation; use syntax::visit::Visitor; +use syntax::{ast, ast_util, visit}; #[deriving(Clone, Eq, Ord, TotalEq, TotalOrd)] pub enum Lint { @@ -540,10 +540,16 @@ impl<'a> Context<'a> { }); let old_is_doc_hidden = self.is_doc_hidden; - self.is_doc_hidden = self.is_doc_hidden || - attrs.iter().any(|attr| ("doc" == attr.name() && match attr.meta_item_list() - { None => false, - Some(l) => attr::contains_name(l, "hidden") })); + self.is_doc_hidden = + self.is_doc_hidden || + attrs.iter() + .any(|attr| { + attr.name().equiv(&("doc")) && + match attr.meta_item_list() { + None => false, + Some(l) => attr::contains_name(l, "hidden") + } + }); f(self); @@ -569,12 +575,12 @@ impl<'a> Context<'a> { // Return true if that's the case. Otherwise return false. pub fn each_lint(sess: session::Session, attrs: &[ast::Attribute], - f: |@ast::MetaItem, level, @str| -> bool) + f: |@ast::MetaItem, level, InternedString| -> bool) -> bool { let xs = [allow, warn, deny, forbid]; for &level in xs.iter() { let level_name = level_to_str(level); - for attr in attrs.iter().filter(|m| level_name == m.name()) { + for attr in attrs.iter().filter(|m| m.name().equiv(&level_name)) { let meta = attr.node.value; let metas = match meta.node { ast::MetaList(_, ref metas) => metas, @@ -585,8 +591,8 @@ pub fn each_lint(sess: session::Session, }; for meta in metas.iter() { match meta.node { - ast::MetaWord(lintname) => { - if !f(*meta, level, lintname) { + ast::MetaWord(ref lintname) => { + if !f(*meta, level, (*lintname).clone()) { return false; } } @@ -603,15 +609,17 @@ pub fn each_lint(sess: session::Session, // Check from a list of attributes if it contains the appropriate // `#[level(lintname)]` attribute (e.g. `#[allow(dead_code)]). pub fn contains_lint(attrs: &[ast::Attribute], - level: level, lintname: &'static str) -> bool { + level: level, + lintname: &'static str) + -> bool { let level_name = level_to_str(level); - for attr in attrs.iter().filter(|m| level_name == m.name()) { + for attr in attrs.iter().filter(|m| m.name().equiv(&level_name)) { if attr.meta_item_list().is_none() { continue } let list = attr.meta_item_list().unwrap(); for meta_item in list.iter() { - if lintname == meta_item.name() { + if meta_item.name().equiv(&lintname) { return true; } } @@ -879,7 +887,7 @@ fn check_heap_type(cx: &Context, span: Span, ty: ty::t) { let mut n_uniq = 0; ty::fold_ty(cx.tcx, ty, |t| { match ty::get(t).sty { - ty::ty_box(_) | ty::ty_str(ty::vstore_box) | + ty::ty_box(_) | ty::ty_vec(_, ty::vstore_box) | ty::ty_trait(_, _, ty::BoxTraitStore, _, _) => { n_box += 1; @@ -1314,7 +1322,7 @@ fn check_missing_doc_attrs(cx: &Context, let has_doc = attrs.iter().any(|a| { match a.node.value.node { - ast::MetaNameValue(ref name, _) if "doc" == *name => true, + ast::MetaNameValue(ref name, _) if name.equiv(&("doc")) => true, _ => false } }); diff --git a/src/librustc/middle/liveness.rs b/src/librustc/middle/liveness.rs index 6a1fa488121bb..ed583f919ca14 100644 --- a/src/librustc/middle/liveness.rs +++ b/src/librustc/middle/liveness.rs @@ -120,6 +120,7 @@ use std::vec; use syntax::ast::*; use syntax::codemap::Span; use syntax::parse::token::special_idents; +use syntax::parse::token; use syntax::print::pprust::{expr_to_str, block_to_str}; use syntax::{visit, ast_util}; use syntax::visit::{Visitor, FnKind}; @@ -332,13 +333,14 @@ impl IrMaps { } } - pub fn variable_name(&self, var: Variable) -> @str { + pub fn variable_name(&self, var: Variable) -> ~str { let var_kinds = self.var_kinds.borrow(); match var_kinds.get()[var.get()] { Local(LocalInfo { ident: nm, .. }) | Arg(_, nm) => { - self.tcx.sess.str_of(nm) + let string = token::get_ident(nm.name); + string.get().to_str() }, - ImplicitRet => @"" + ImplicitRet => ~"" } } @@ -1669,7 +1671,7 @@ impl Liveness { } } - pub fn should_warn(&self, var: Variable) -> Option<@str> { + pub fn should_warn(&self, var: Variable) -> Option<~str> { let name = self.ir.variable_name(var); if name.len() == 0 || name[0] == ('_' as u8) { None } else { Some(name) } } diff --git a/src/librustc/middle/mem_categorization.rs b/src/librustc/middle/mem_categorization.rs index ce1840283b2e4..9bab74dc56a67 100644 --- a/src/librustc/middle/mem_categorization.rs +++ b/src/librustc/middle/mem_categorization.rs @@ -179,8 +179,7 @@ pub fn opt_deref_kind(t: ty::t) -> Option { ty::ty_box(_) | ty::ty_vec(_, ty::vstore_box) | - ty::ty_trait(_, _, ty::BoxTraitStore, _, _) | - ty::ty_str(ty::vstore_box) => { + ty::ty_trait(_, _, ty::BoxTraitStore, _, _) => { Some(deref_ptr(gc_ptr)) } @@ -1233,7 +1232,10 @@ pub fn ptr_sigil(ptr: PointerKind) -> ~str { impl Repr for InteriorKind { fn repr(&self, _tcx: ty::ctxt) -> ~str { match *self { - InteriorField(NamedField(fld)) => token::interner_get(fld).to_owned(), + InteriorField(NamedField(fld)) => { + let string = token::get_ident(fld); + string.get().to_owned() + } InteriorField(PositionalField(i)) => format!("\\#{:?}", i), InteriorElement(_) => ~"[]", } diff --git a/src/librustc/middle/privacy.rs b/src/librustc/middle/privacy.rs index 2562c34b54b00..ae1b71f5ccad1 100644 --- a/src/librustc/middle/privacy.rs +++ b/src/librustc/middle/privacy.rs @@ -530,8 +530,10 @@ impl<'a> PrivacyVisitor<'a> { ast::ItemTrait(..) => "trait", _ => return false, }; - let msg = format!("{} `{}` is private", desc, - token::ident_to_str(&item.ident)); + let string = token::get_ident(item.ident.name); + let msg = format!("{} `{}` is private", + desc, + string.get()); self.tcx.sess.span_note(span, msg); } Some(..) | None => {} @@ -588,8 +590,10 @@ impl<'a> PrivacyVisitor<'a> { if struct_vis != ast::Public && field.vis == ast::Public { break } if !is_local(field.id) || !self.private_accessible(field.id.node) { - self.tcx.sess.span_err(span, format!("field `{}` is private", - token::ident_to_str(&ident))); + let string = token::get_ident(ident.name); + self.tcx.sess.span_err(span, + format!("field `{}` is private", + string.get())) } break; } @@ -603,8 +607,11 @@ impl<'a> PrivacyVisitor<'a> { let method_id = ty::method(self.tcx, method_id).provided_source .unwrap_or(method_id); - self.ensure_public(span, method_id, None, - format!("method `{}`", token::ident_to_str(name))); + let string = token::get_ident(name.name); + self.ensure_public(span, + method_id, + None, + format!("method `{}`", string.get())); } // Checks that a path is in scope. @@ -617,10 +624,17 @@ impl<'a> PrivacyVisitor<'a> { match *self.last_private_map.get(&path_id) { resolve::AllPublic => {}, resolve::DependsOn(def) => { - let name = token::ident_to_str(&path.segments.last().unwrap() - .identifier); - self.ensure_public(span, def, Some(origdid), - format!("{} `{}`", tyname, name)); + let name = token::get_ident(path.segments + .last() + .unwrap() + .identifier + .name); + self.ensure_public(span, + def, + Some(origdid), + format!("{} `{}`", + tyname, + name.get())); } } }; diff --git a/src/librustc/middle/resolve.rs b/src/librustc/middle/resolve.rs index e235e914689e8..8eaa4ebd97c69 100644 --- a/src/librustc/middle/resolve.rs +++ b/src/librustc/middle/resolve.rs @@ -20,9 +20,8 @@ use syntax::ast::*; use syntax::ast; use syntax::ast_util::{def_id_of_def, local_def, mtwt_resolve}; use syntax::ast_util::{path_to_ident, walk_pat, trait_method_to_ty_method}; +use syntax::parse::token::{IdentInterner, special_idents}; use syntax::parse::token; -use syntax::parse::token::{IdentInterner, interner_get}; -use syntax::parse::token::special_idents; use syntax::print::pprust::path_to_str; use syntax::codemap::{Span, DUMMY_SP, Pos}; use syntax::opt_vec::OptVec; @@ -53,7 +52,7 @@ pub type TraitMap = HashMap>; pub type ExportMap2 = @RefCell>; pub struct Export2 { - name: @str, // The name of the target. + name: ~str, // The name of the target. def_id: DefId, // The definition of the target. } @@ -1894,8 +1893,9 @@ impl Resolver { csearch::each_child_of_item(self.session.cstore, def_id, |def_like, child_ident, visibility| { + let child_ident_string = token::get_ident(child_ident.name); debug!("(populating external module) ... found ident: {}", - token::ident_to_str(&child_ident)); + child_ident_string.get()); self.build_reduced_graph_for_external_crate_def(module, def_like, child_ident, @@ -2114,24 +2114,26 @@ impl Resolver { } fn import_directive_subclass_to_str(&mut self, - subclass: ImportDirectiveSubclass) - -> @str { + subclass: ImportDirectiveSubclass) + -> ~str { match subclass { - SingleImport(_target, source) => self.session.str_of(source), - GlobImport => @"*" + SingleImport(_target, source) => { + self.session.str_of(source).to_str() + } + GlobImport => ~"*" } } fn import_path_to_str(&mut self, - idents: &[Ident], - subclass: ImportDirectiveSubclass) - -> @str { + idents: &[Ident], + subclass: ImportDirectiveSubclass) + -> ~str { if idents.is_empty() { self.import_directive_subclass_to_str(subclass) } else { (format!("{}::{}", - self.idents_to_str(idents), - self.import_directive_subclass_to_str(subclass))).to_managed() + self.idents_to_str(idents), + self.import_directive_subclass_to_str(subclass))) } } @@ -2584,7 +2586,7 @@ impl Resolver { debug!("(resolving glob import) writing resolution `{}` in `{}` \ to `{}`", - interner_get(name), + token::get_ident(name).get().to_str(), self.module_to_str(containing_module), self.module_to_str(module_)); @@ -3101,11 +3103,12 @@ impl Resolver { // top of the crate otherwise. let mut containing_module; let mut i; - if "self" == token::ident_to_str(&module_path[0]) { + let first_module_path_string = token::get_ident(module_path[0].name); + if "self" == first_module_path_string.get() { containing_module = self.get_nearest_normal_module_parent_or_self(module_); i = 1; - } else if "super" == token::ident_to_str(&module_path[0]) { + } else if "super" == first_module_path_string.get() { containing_module = self.get_nearest_normal_module_parent_or_self(module_); i = 0; // We'll handle `super` below. @@ -3114,8 +3117,11 @@ impl Resolver { } // Now loop through all the `super`s we find. - while i < module_path.len() && - "super" == token::ident_to_str(&module_path[i]) { + while i < module_path.len() { + let string = token::get_ident(module_path[i].name); + if "super" != string.get() { + break + } debug!("(resolving module prefix) resolving `super` at {}", self.module_to_str(containing_module)); match self.get_nearest_normal_module_parent(containing_module) { @@ -3354,10 +3360,10 @@ impl Resolver { match namebindings.def_for_namespace(ns) { Some(d) => { debug!("(computing exports) YES: export '{}' => {:?}", - interner_get(name), + token::get_ident(name).get().to_str(), def_id_of_def(d)); exports2.push(Export2 { - name: interner_get(name), + name: token::get_ident(name).get().to_str(), def_id: def_id_of_def(d) }); } @@ -3380,7 +3386,7 @@ impl Resolver { match importresolution.target_for_namespace(ns) { Some(target) => { debug!("(computing exports) maybe export '{}'", - interner_get(*name)); + token::get_ident(*name).get().to_str()); self.add_exports_of_namebindings(exports2, *name, target.bindings, @@ -4155,19 +4161,23 @@ impl Resolver { for (&key, &binding_0) in map_0.iter() { match map_i.find(&key) { None => { + let string = token::get_ident(key); self.resolve_error( p.span, format!("variable `{}` from pattern \\#1 is \ not bound in pattern \\#{}", - interner_get(key), i + 1)); + string.get(), + i + 1)); } Some(binding_i) => { if binding_0.binding_mode != binding_i.binding_mode { + let string = token::get_ident(key); self.resolve_error( binding_i.span, format!("variable `{}` is bound with different \ mode in pattern \\#{} than in pattern \\#1", - interner_get(key), i + 1)); + string.get(), + i + 1)); } } } @@ -4175,11 +4185,13 @@ impl Resolver { for (&key, &binding) in map_i.iter() { if !map_0.contains_key(&key) { + let string = token::get_ident(key); self.resolve_error( binding.span, format!("variable `{}` from pattern \\#{} is \ not bound in pattern \\#1", - interner_get(key), i + 1)); + string.get(), + i + 1)); } } } @@ -4371,9 +4383,10 @@ impl Resolver { match self.resolve_bare_identifier_pattern(ident) { FoundStructOrEnumVariant(def, lp) if mode == RefutableMode => { + let string = token::get_ident(renamed); debug!("(resolving pattern) resolving `{}` to \ struct or enum variant", - interner_get(renamed)); + string.get()); self.enforce_default_binding_mode( pattern, @@ -4382,17 +4395,19 @@ impl Resolver { self.record_def(pattern.id, (def, lp)); } FoundStructOrEnumVariant(..) => { + let string = token::get_ident(renamed); self.resolve_error(pattern.span, format!("declaration of `{}` \ shadows an enum \ variant or unit-like \ struct in scope", - interner_get(renamed))); + string.get())); } FoundConst(def, lp) if mode == RefutableMode => { + let string = token::get_ident(renamed); debug!("(resolving pattern) resolving `{}` to \ constant", - interner_get(renamed)); + string.get()); self.enforce_default_binding_mode( pattern, @@ -4406,8 +4421,9 @@ impl Resolver { allowed here"); } BareIdentifierPatternUnresolved => { + let string = token::get_ident(renamed); debug!("(resolving pattern) binding `{}`", - interner_get(renamed)); + string.get()); let def = match mode { RefutableMode => { @@ -5009,10 +5025,10 @@ impl Resolver { } fn find_best_match_for_name(&mut self, name: &str, max_distance: uint) - -> Option<@str> { + -> Option<~str> { let this = &mut *self; - let mut maybes: ~[@str] = ~[]; + let mut maybes: ~[~str] = ~[]; let mut values: ~[uint] = ~[]; let mut j = { @@ -5024,14 +5040,15 @@ impl Resolver { let value_ribs = this.value_ribs.borrow(); let bindings = value_ribs.get()[j].bindings.borrow(); for (&k, _) in bindings.get().iter() { - maybes.push(interner_get(k)); + let string = token::get_ident(k); + maybes.push(string.get().to_str()); values.push(uint::MAX); } } let mut smallest = 0; - for (i, &other) in maybes.iter().enumerate() { - values[i] = name.lev_distance(other); + for (i, other) in maybes.iter().enumerate() { + values[i] = name.lev_distance(*other); if values[i] <= values[smallest] { smallest = i; @@ -5190,7 +5207,9 @@ impl Resolver { self.resolve_error(expr.span, format!("use of undeclared label \ `{}`", - interner_get(label))), + token::get_ident(label) + .get() + .to_str())), Some(DlDef(def @ DefLabel(_))) => { // FIXME: is AllPublic correct? self.record_def(expr.id, (def, AllPublic)) @@ -5510,7 +5529,7 @@ impl Resolver { self.populate_module_if_necessary(module_); let children = module_.children.borrow(); for (&name, _) in children.get().iter() { - debug!("* {}", interner_get(name)); + debug!("* {}", token::get_ident(name).get().to_str()); } debug!("Import resolutions:"); @@ -5534,7 +5553,7 @@ impl Resolver { } } - debug!("* {}:{}{}", interner_get(*name), + debug!("* {}:{}{}", token::get_ident(*name).get().to_str(), value_repr, type_repr); } } diff --git a/src/librustc/middle/trans/_match.rs b/src/librustc/middle/trans/_match.rs index 2fabc44d0e4b2..e755a3c825509 100644 --- a/src/librustc/middle/trans/_match.rs +++ b/src/librustc/middle/trans/_match.rs @@ -229,6 +229,7 @@ use syntax::ast::Ident; use syntax::ast_util::path_to_ident; use syntax::ast_util; use syntax::codemap::{Span, DUMMY_SP}; +use syntax::parse::token::InternedString; // An option identifying a literal: either a unit-like struct or an // expression. @@ -1174,7 +1175,7 @@ fn any_tuple_struct_pat(bcx: &Block, m: &[Match], col: uint) -> bool { struct DynamicFailureHandler<'a> { bcx: &'a Block<'a>, sp: Span, - msg: @str, + msg: InternedString, finished: @Cell>, } @@ -1187,7 +1188,7 @@ impl<'a> DynamicFailureHandler<'a> { let fcx = self.bcx.fcx; let fail_cx = fcx.new_block(false, "case_fallthrough", None); - controlflow::trans_fail(fail_cx, Some(self.sp), self.msg); + controlflow::trans_fail(fail_cx, Some(self.sp), self.msg.clone()); self.finished.set(Some(fail_cx.llbb)); fail_cx.llbb } @@ -1891,7 +1892,8 @@ fn trans_match_inner<'a>(scope_cx: &'a Block<'a>, let fail_handler = ~DynamicFailureHandler { bcx: scope_cx, sp: discr_expr.span, - msg: @"scrutinizing value that can't exist", + msg: InternedString::new("scrutinizing value that can't \ + exist"), finished: fail_cx, }; DynamicFailureHandlerClass(fail_handler) diff --git a/src/librustc/middle/trans/asm.rs b/src/librustc/middle/trans/asm.rs index bae35f68ada56..db99bd53704eb 100644 --- a/src/librustc/middle/trans/asm.rs +++ b/src/librustc/middle/trans/asm.rs @@ -38,8 +38,8 @@ pub fn trans_inline_asm<'a>(bcx: &'a Block<'a>, ia: &ast::InlineAsm) let temp_scope = fcx.push_custom_cleanup_scope(); // Prepare the output operands - let outputs = ia.outputs.map(|&(c, out)| { - constraints.push(c); + let outputs = ia.outputs.map(|&(ref c, out)| { + constraints.push((*c).clone()); let out_datum = unpack_datum!(bcx, expr::trans(bcx, out)); output_types.push(type_of::type_of(bcx.ccx(), out_datum.ty)); @@ -48,8 +48,8 @@ pub fn trans_inline_asm<'a>(bcx: &'a Block<'a>, ia: &ast::InlineAsm) }); // Now the input operands - let inputs = ia.inputs.map(|&(c, input)| { - constraints.push(c); + let inputs = ia.inputs.map(|&(ref c, input)| { + constraints.push((*c).clone()); unpack_result!(bcx, { callee::trans_arg_expr(bcx, @@ -63,13 +63,13 @@ pub fn trans_inline_asm<'a>(bcx: &'a Block<'a>, ia: &ast::InlineAsm) // no failure occurred preparing operands, no need to cleanup fcx.pop_custom_cleanup_scope(temp_scope); - let mut constraints = constraints.connect(","); + let mut constraints = constraints.map(|s| s.get().to_str()).connect(","); let mut clobbers = getClobbers(); - if !ia.clobbers.is_empty() && !clobbers.is_empty() { - clobbers = format!("{},{}", ia.clobbers, clobbers); + if !ia.clobbers.get().is_empty() && !clobbers.is_empty() { + clobbers = format!("{},{}", ia.clobbers.get(), clobbers); } else { - clobbers.push_str(ia.clobbers); + clobbers.push_str(ia.clobbers.get()); } // Add the clobbers to our constraints list @@ -98,7 +98,7 @@ pub fn trans_inline_asm<'a>(bcx: &'a Block<'a>, ia: &ast::InlineAsm) ast::AsmIntel => lib::llvm::AD_Intel }; - let r = ia.asm.with_c_str(|a| { + let r = ia.asm.get().with_c_str(|a| { constraints.with_c_str(|c| { InlineAsmCall(bcx, a, c, inputs, output_type, ia.volatile, ia.alignstack, dialect) }) diff --git a/src/librustc/middle/trans/base.rs b/src/librustc/middle/trans/base.rs index d39369c7a5f47..39bc3c6ba4aef 100644 --- a/src/librustc/middle/trans/base.rs +++ b/src/librustc/middle/trans/base.rs @@ -76,16 +76,17 @@ use std::hashmap::HashMap; use std::libc::c_uint; use std::vec; use std::local_data; +use syntax::abi::{X86, X86_64, Arm, Mips, Rust, RustIntrinsic, OsWin32}; use syntax::ast_map::{PathName, PathPrettyName, path_elem_to_str}; use syntax::ast_util::{local_def, is_local}; +use syntax::attr::AttrMetaMethods; use syntax::attr; use syntax::codemap::Span; +use syntax::parse::token::InternedString; use syntax::parse::token; -use syntax::{ast, ast_util, ast_map}; -use syntax::attr::AttrMetaMethods; -use syntax::abi::{X86, X86_64, Arm, Mips, Rust, RustIntrinsic, OsWin32}; -use syntax::visit; use syntax::visit::Visitor; +use syntax::visit; +use syntax::{ast, ast_util, ast_map}; pub use middle::trans::context::task_llcx; @@ -509,7 +510,7 @@ pub fn set_no_split_stack(f: ValueRef) { // Double-check that we never ask LLVM to declare the same symbol twice. It // silently mangles such symbols, breaking our linkage model. -pub fn note_unique_llvm_symbol(ccx: &CrateContext, sym: @str) { +pub fn note_unique_llvm_symbol(ccx: &CrateContext, sym: ~str) { let mut all_llvm_symbols = ccx.all_llvm_symbols.borrow_mut(); if all_llvm_symbols.get().contains(&sym) { ccx.sess.bug(~"duplicate LLVM symbol: " + sym); @@ -604,7 +605,8 @@ pub fn compare_scalar_types<'a>( rslt( controlflow::trans_fail( cx, None, - @"attempt to compare values of type type"), + InternedString::new("attempt to compare values of type \ + type")), C_nil()) } _ => { @@ -856,9 +858,9 @@ pub fn fail_if_zero<'a>( rhs_t: ty::t) -> &'a Block<'a> { let text = if divrem == ast::BiDiv { - @"attempted to divide by zero" + "attempted to divide by zero" } else { - @"attempted remainder with a divisor of zero" + "attempted remainder with a divisor of zero" }; let is_zero = match ty::get(rhs_t).sty { ty::ty_int(t) => { @@ -875,7 +877,7 @@ pub fn fail_if_zero<'a>( } }; with_cond(cx, is_zero, |bcx| { - controlflow::trans_fail(bcx, Some(span), text) + controlflow::trans_fail(bcx, Some(span), InternedString::new(text)) }) } @@ -1945,7 +1947,7 @@ fn exported_name(ccx: &CrateContext, path: ast_map::Path, ty: ty::t, attrs: &[ast::Attribute]) -> ~str { match attr::first_attr_value_str_by_name(attrs, "export_name") { // Use provided name - Some(name) => name.to_owned(), + Some(name) => name.get().to_owned(), // Don't mangle _ if attr::contains_name(attrs, "no_mangle") @@ -2093,7 +2095,7 @@ pub fn get_item_val(ccx: @CrateContext, id: ast::NodeId) -> ValueRef { match attr::first_attr_value_str_by_name(i.attrs, "link_section") { Some(sect) => unsafe { - sect.with_c_str(|buf| { + sect.get().with_c_str(|buf| { llvm::LLVMSetSection(v, buf); }) }, @@ -2155,9 +2157,9 @@ pub fn get_item_val(ccx: @CrateContext, id: ast::NodeId) -> ValueRef { ccx.crate_map } } else { - let ident = foreign::link_name(ccx, ni); + let ident = foreign::link_name(ni); unsafe { - ident.with_c_str(|buf| { + ident.get().with_c_str(|buf| { let ty = type_of(ccx, ty); llvm::LLVMAddGlobal(ccx.llmod, ty.to_ref(), buf) @@ -2470,21 +2472,21 @@ pub fn create_module_map(ccx: &CrateContext) -> (ValueRef, uint) { let mut keys = ~[]; let module_data = ccx.module_data.borrow(); for (k, _) in module_data.get().iter() { - keys.push(k.to_managed()); + keys.push(k.clone()); } keys }; for key in keys.iter() { - let llstrval = C_str_slice(ccx, *key); - let module_data = ccx.module_data.borrow(); - let val = *module_data.get().find_equiv(key).unwrap(); - let v_ptr = p2i(ccx, val); - let elt = C_struct([ - llstrval, - v_ptr - ], false); - elts.push(elt); + let llstrval = C_str_slice(ccx, token::intern_and_get_ident(*key)); + let module_data = ccx.module_data.borrow(); + let val = *module_data.get().find_equiv(key).unwrap(); + let v_ptr = p2i(ccx, val); + let elt = C_struct([ + llstrval, + v_ptr + ], false); + elts.push(elt); } unsafe { llvm::LLVMSetInitializer(map, C_array(elttype, elts)); diff --git a/src/librustc/middle/trans/common.rs b/src/librustc/middle/trans/common.rs index 18a6727dee069..dca8006f75d54 100644 --- a/src/librustc/middle/trans/common.rs +++ b/src/librustc/middle/trans/common.rs @@ -30,7 +30,6 @@ use middle::ty; use middle::typeck; use util::ppaux::Repr; - use arena::TypedArena; use std::c_str::ToCStr; use std::cast::transmute; @@ -41,6 +40,7 @@ use std::libc::{c_uint, c_longlong, c_ulonglong, c_char}; use syntax::ast::{Ident}; use syntax::ast_map::{Path, PathElem, PathPrettyName}; use syntax::codemap::Span; +use syntax::parse::token::InternedString; use syntax::parse::token; use syntax::{ast, ast_map}; @@ -446,8 +446,9 @@ impl<'a> Block<'a> { } pub fn sess(&self) -> Session { self.fcx.ccx.sess } - pub fn ident(&self, ident: Ident) -> @str { - token::ident_to_str(&ident) + pub fn ident(&self, ident: Ident) -> ~str { + let string = token::get_ident(ident.name); + string.get().to_str() } pub fn node_id_to_str(&self, id: ast::NodeId) -> ~str { @@ -597,18 +598,19 @@ pub fn C_u8(i: uint) -> ValueRef { // This is a 'c-like' raw string, which differs from // our boxed-and-length-annotated strings. -pub fn C_cstr(cx: &CrateContext, s: @str) -> ValueRef { +pub fn C_cstr(cx: &CrateContext, s: InternedString) -> ValueRef { unsafe { { let const_cstr_cache = cx.const_cstr_cache.borrow(); - match const_cstr_cache.get().find_equiv(&s) { + match const_cstr_cache.get().find(&s) { Some(&llval) => return llval, None => () } } let sc = llvm::LLVMConstStringInContext(cx.llcx, - s.as_ptr() as *c_char, s.len() as c_uint, + s.get().as_ptr() as *c_char, + s.get().len() as c_uint, False); let gsym = token::gensym("str"); @@ -627,9 +629,9 @@ pub fn C_cstr(cx: &CrateContext, s: @str) -> ValueRef { // NB: Do not use `do_spill_noroot` to make this into a constant string, or // you will be kicked off fast isel. See issue #4352 for an example of this. -pub fn C_str_slice(cx: &CrateContext, s: @str) -> ValueRef { +pub fn C_str_slice(cx: &CrateContext, s: InternedString) -> ValueRef { unsafe { - let len = s.len(); + let len = s.get().len(); let cs = llvm::LLVMConstPointerCast(C_cstr(cx, s), Type::i8p().to_ref()); C_struct([cs, C_uint(cx, len)], false) } @@ -970,7 +972,8 @@ pub fn dummy_substs(tps: ~[ty::t]) -> ty::substs { pub fn filename_and_line_num_from_span(bcx: &Block, span: Span) -> (ValueRef, ValueRef) { let loc = bcx.sess().parse_sess.cm.lookup_char_pos(span.lo); - let filename_cstr = C_cstr(bcx.ccx(), loc.file.name); + let filename_cstr = C_cstr(bcx.ccx(), + token::intern_and_get_ident(loc.file.name)); let filename = build::PointerCast(bcx, filename_cstr, Type::i8p()); let line = C_int(bcx.ccx(), loc.line as int); (filename, line) diff --git a/src/librustc/middle/trans/consts.rs b/src/librustc/middle/trans/consts.rs index 79aa536a0dbdc..8f69b3cc2f2da 100644 --- a/src/librustc/middle/trans/consts.rs +++ b/src/librustc/middle/trans/consts.rs @@ -57,12 +57,14 @@ pub fn const_lit(cx: &CrateContext, e: &ast::Expr, lit: ast::Lit) ty_to_str(cx.tcx, lit_int_ty))) } } - ast::LitFloat(fs, t) => C_floating(fs, Type::float_from_ty(t)), - ast::LitFloatUnsuffixed(fs) => { + ast::LitFloat(ref fs, t) => { + C_floating(fs.get(), Type::float_from_ty(t)) + } + ast::LitFloatUnsuffixed(ref fs) => { let lit_float_ty = ty::node_id_to_type(cx.tcx, e.id); match ty::get(lit_float_ty).sty { ty::ty_float(t) => { - C_floating(fs, Type::float_from_ty(t)) + C_floating(fs.get(), Type::float_from_ty(t)) } _ => { cx.sess.span_bug(lit.span, @@ -72,7 +74,7 @@ pub fn const_lit(cx: &CrateContext, e: &ast::Expr, lit: ast::Lit) } ast::LitBool(b) => C_bool(b), ast::LitNil => C_nil(), - ast::LitStr(s, _) => C_str_slice(cx, s), + ast::LitStr(ref s, _) => C_str_slice(cx, (*s).clone()), ast::LitBinary(data) => C_binary_slice(cx, data), } } @@ -312,7 +314,9 @@ fn const_expr_unadjusted(cx: @CrateContext, e: &ast::Expr, unsafe { let _icx = push_ctxt("const_expr"); return match e.node { - ast::ExprLit(lit) => (consts::const_lit(cx, e, *lit), true), + ast::ExprLit(lit) => { + (consts::const_lit(cx, e, (*lit).clone()), true) + } ast::ExprBinary(_, b, e1, e2) => { let (te1, _) = const_expr(cx, e1, is_local); let (te2, _) = const_expr(cx, e2, is_local); diff --git a/src/librustc/middle/trans/context.rs b/src/librustc/middle/trans/context.rs index 8503ce9066bc3..2872d5566339b 100644 --- a/src/librustc/middle/trans/context.rs +++ b/src/librustc/middle/trans/context.rs @@ -19,12 +19,12 @@ use middle::resolve; use middle::trans::adt; use middle::trans::base; use middle::trans::builder::Builder; -use middle::trans::debuginfo; use middle::trans::common::{C_i32, C_null}; -use middle::ty; - +use middle::trans::common::{mono_id,ExternMap,tydesc_info,BuilderRef_res,Stats}; +use middle::trans::base::{decl_crate_map}; +use middle::trans::debuginfo; use middle::trans::type_::Type; - +use middle::ty; use util::sha2::Sha256; use std::cell::{Cell, RefCell}; @@ -33,10 +33,7 @@ use std::hashmap::{HashMap, HashSet}; use std::local_data; use std::libc::c_uint; use syntax::ast; - -use middle::trans::common::{mono_id,ExternMap,tydesc_info,BuilderRef_res,Stats}; - -use middle::trans::base::{decl_crate_map}; +use syntax::parse::token::InternedString; pub struct CrateContext { sess: session::Session, @@ -71,7 +68,7 @@ pub struct CrateContext { // Cache generated vtables vtables: RefCell>, // Cache of constant strings, - const_cstr_cache: RefCell>, + const_cstr_cache: RefCell>, // Reverse-direction for const ptrs cast from globals. // Key is an int, cast from a ValueRef holding a *T, @@ -99,8 +96,8 @@ pub struct CrateContext { llsizingtypes: RefCell>, adt_reprs: RefCell>, symbol_hasher: RefCell, - type_hashcodes: RefCell>, - all_llvm_symbols: RefCell>, + type_hashcodes: RefCell>, + all_llvm_symbols: RefCell>, tcx: ty::ctxt, maps: astencode::Maps, stats: @Stats, diff --git a/src/librustc/middle/trans/controlflow.rs b/src/librustc/middle/trans/controlflow.rs index 8c8c6829e49fd..6de34d98941e8 100644 --- a/src/librustc/middle/trans/controlflow.rs +++ b/src/librustc/middle/trans/controlflow.rs @@ -28,6 +28,8 @@ use syntax::ast; use syntax::ast::Name; use syntax::ast_util; use syntax::codemap::Span; +use syntax::parse::token::InternedString; +use syntax::parse::token; use syntax::visit::Visitor; pub fn trans_stmt<'a>(cx: &'a Block<'a>, @@ -342,14 +344,14 @@ pub fn trans_fail_expr<'a>( ppaux::ty_to_str(tcx, arg_datum.ty)); } } - _ => trans_fail(bcx, sp_opt, @"explicit failure") + _ => trans_fail(bcx, sp_opt, InternedString::new("explicit failure")) } } pub fn trans_fail<'a>( bcx: &'a Block<'a>, sp_opt: Option, - fail_str: @str) + fail_str: InternedString) -> &'a Block<'a> { let _icx = push_ctxt("trans_fail"); let V_fail_str = C_cstr(bcx.ccx(), fail_str); @@ -367,11 +369,11 @@ fn trans_fail_value<'a>( Some(sp) => { let sess = bcx.sess(); let loc = sess.parse_sess.cm.lookup_char_pos(sp.lo); - (C_cstr(bcx.ccx(), loc.file.name), + (C_cstr(bcx.ccx(), token::intern_and_get_ident(loc.file.name)), loc.line as int) } None => { - (C_cstr(bcx.ccx(), @""), 0) + (C_cstr(bcx.ccx(), InternedString::new("")), 0) } }; let V_str = PointerCast(bcx, V_fail_str, Type::i8p()); diff --git a/src/librustc/middle/trans/debuginfo.rs b/src/librustc/middle/trans/debuginfo.rs index 36cc6f3afd16e..b89ec9d19dfbb 100644 --- a/src/librustc/middle/trans/debuginfo.rs +++ b/src/librustc/middle/trans/debuginfo.rs @@ -622,7 +622,8 @@ pub fn create_function_debug_context(cx: &CrateContext, }; // get_template_parameters() will append a `<...>` clause to the function name if necessary. - let mut function_name = token::ident_to_str(&ident).to_owned(); + let function_name_string = token::get_ident(ident.name); + let mut function_name = function_name_string.get().to_owned(); let template_parameters = get_template_parameters(cx, generics, param_substs, @@ -791,7 +792,9 @@ pub fn create_function_debug_context(cx: &CrateContext, let ident = special_idents::type_self; - let param_metadata = token::ident_to_str(&ident).with_c_str(|name| { + let param_metadata_string = token::get_ident(ident.name); + let param_metadata = param_metadata_string.get() + .with_c_str(|name| { unsafe { llvm::LLVMDIBuilderCreateTemplateTypeParameter( DIB(cx), @@ -829,7 +832,9 @@ pub fn create_function_debug_context(cx: &CrateContext, // Again, only create type information if extra_debuginfo is enabled if cx.sess.opts.extra_debuginfo { let actual_type_metadata = type_metadata(cx, actual_type, codemap::DUMMY_SP); - let param_metadata = token::ident_to_str(&ident).with_c_str(|name| { + let param_metadata_string = token::get_ident(ident.name); + let param_metadata = param_metadata_string.get() + .with_c_str(|name| { unsafe { llvm::LLVMDIBuilderCreateTemplateTypeParameter( DIB(cx), @@ -931,10 +936,11 @@ fn declare_local(bcx: &Block, span: Span) { let cx: &CrateContext = bcx.ccx(); - let filename = span_start(cx, span).file.name; + let filename = span_start(cx, span).file.name.clone(); let file_metadata = file_metadata(cx, filename); - let name: &str = token::ident_to_str(&variable_ident); + let variable_ident_string = token::get_ident(variable_ident.name); + let name: &str = variable_ident_string.get(); let loc = span_start(cx, span); let type_metadata = type_metadata(cx, variable_type, span); @@ -1139,9 +1145,10 @@ impl MemberDescriptionFactory for StructMemberDescriptionFactory { -> ~[MemberDescription] { self.fields.map(|field| { let name = if field.ident.name == special_idents::unnamed_field.name { - @"" + ~"" } else { - token::ident_to_str(&field.ident) + let string = token::get_ident(field.ident.name); + string.get().to_str() }; MemberDescription { @@ -1165,7 +1172,7 @@ fn prepare_struct_metadata(cx: &CrateContext, let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, def_id, span); - let file_name = span_start(cx, definition_span).file.name; + let file_name = span_start(cx, definition_span).file.name.clone(); let file_metadata = file_metadata(cx, file_name); let struct_metadata_stub = create_struct_stub(cx, @@ -1244,7 +1251,7 @@ impl MemberDescriptionFactory for TupleMemberDescriptionFactory { -> ~[MemberDescription] { self.component_types.map(|&component_type| { MemberDescription { - name: @"", + name: ~"", llvm_type: type_of::type_of(cx, component_type), type_metadata: type_metadata(cx, component_type, self.span), offset: ComputedMemberOffset, @@ -1322,7 +1329,7 @@ impl MemberDescriptionFactory for GeneralMemberDescriptionFactory { self.file_metadata, codemap::DUMMY_SP); MemberDescription { - name: @"", + name: ~"", llvm_type: variant_llvm_type, type_metadata: variant_type_metadata, offset: FixedMemberOffset { bytes: 0 }, @@ -1332,7 +1339,7 @@ impl MemberDescriptionFactory for GeneralMemberDescriptionFactory { } struct EnumVariantMemberDescriptionFactory { - args: ~[(@str, ty::t)], + args: ~[(~str, ty::t)], discriminant_type_metadata: Option, span: Span, } @@ -1340,9 +1347,9 @@ struct EnumVariantMemberDescriptionFactory { impl MemberDescriptionFactory for EnumVariantMemberDescriptionFactory { fn create_member_descriptions(&self, cx: &CrateContext) -> ~[MemberDescription] { - self.args.iter().enumerate().map(|(i, &(name, ty))| { + self.args.iter().enumerate().map(|(i, &(ref name, ty))| { MemberDescription { - name: name, + name: name.to_str(), llvm_type: type_of::type_of(cx, ty), type_metadata: match self.discriminant_type_metadata { Some(metadata) if i == 0 => metadata, @@ -1362,7 +1369,8 @@ fn describe_enum_variant(cx: &CrateContext, file_metadata: DIFile, span: Span) -> (DICompositeType, Type, @MemberDescriptionFactory) { - let variant_name = token::ident_to_str(&variant_info.name); + let variant_info_string = token::get_ident(variant_info.name.name); + let variant_name = variant_info_string.get(); let variant_llvm_type = Type::struct_(struct_def.fields.map(|&t| type_of::type_of(cx, t)), struct_def.packed); // Could some consistency checks here: size, align, field count, discr type @@ -1395,19 +1403,24 @@ fn describe_enum_variant(cx: &CrateContext, // Get the argument names from the enum variant info let mut arg_names = match variant_info.arg_names { - Some(ref names) => names.map(|ident| token::ident_to_str(ident)), - None => variant_info.args.map(|_| @"") + Some(ref names) => { + names.map(|ident| { + let string = token::get_ident(ident.name); + string.get().to_str() + }) + } + None => variant_info.args.map(|_| ~"") }; // If this is not a univariant enum, there is also the (unnamed) discriminant field if discriminant_type_metadata.is_some() { - arg_names.insert(0, @""); + arg_names.insert(0, ~""); } // Build an array of (field name, field type) pairs to be captured in the factory closure. - let args: ~[(@str, ty::t)] = arg_names.iter() + let args: ~[(~str, ty::t)] = arg_names.iter() .zip(struct_def.fields.iter()) - .map(|(&s, &t)| (s, t)) + .map(|(s, &t)| (s.to_str(), t)) .collect(); let member_description_factory = @@ -1452,7 +1465,8 @@ fn prepare_enum_metadata(cx: &CrateContext, let enumerators_metadata: ~[DIDescriptor] = variants .iter() .map(|v| { - let name: &str = token::ident_to_str(&v.name); + let string = token::get_ident(v.name.name); + let name: &str = string.get(); let discriminant_value = v.disr_val as c_ulonglong; name.with_c_str(|name| { @@ -1580,7 +1594,7 @@ enum MemberOffset { } struct MemberDescription { - name: @str, + name: ~str, llvm_type: Type, type_metadata: DIType, offset: MemberOffset, @@ -1737,31 +1751,31 @@ fn boxed_type_metadata(cx: &CrateContext, let member_descriptions = [ MemberDescription { - name: @"refcnt", + name: ~"refcnt", llvm_type: member_llvm_types[0], type_metadata: type_metadata(cx, int_type, codemap::DUMMY_SP), offset: ComputedMemberOffset, }, MemberDescription { - name: @"tydesc", + name: ~"tydesc", llvm_type: member_llvm_types[1], type_metadata: nil_pointer_type_metadata, offset: ComputedMemberOffset, }, MemberDescription { - name: @"prev", + name: ~"prev", llvm_type: member_llvm_types[2], type_metadata: nil_pointer_type_metadata, offset: ComputedMemberOffset, }, MemberDescription { - name: @"next", + name: ~"next", llvm_type: member_llvm_types[3], type_metadata: nil_pointer_type_metadata, offset: ComputedMemberOffset, }, MemberDescription { - name: @"val", + name: ~"val", llvm_type: member_llvm_types[4], type_metadata: content_type_metadata, offset: ComputedMemberOffset, @@ -1848,19 +1862,19 @@ fn vec_metadata(cx: &CrateContext, let member_descriptions = [ MemberDescription { - name: @"fill", + name: ~"fill", llvm_type: member_llvm_types[0], type_metadata: int_type_metadata, offset: ComputedMemberOffset, }, MemberDescription { - name: @"alloc", + name: ~"alloc", llvm_type: member_llvm_types[1], type_metadata: int_type_metadata, offset: ComputedMemberOffset, }, MemberDescription { - name: @"elements", + name: ~"elements", llvm_type: member_llvm_types[2], type_metadata: array_type_metadata, offset: ComputedMemberOffset, @@ -1917,13 +1931,13 @@ fn vec_slice_metadata(cx: &CrateContext, let member_descriptions = [ MemberDescription { - name: @"data_ptr", + name: ~"data_ptr", llvm_type: member_llvm_types[0], type_metadata: type_metadata(cx, data_ptr_type, span), offset: ComputedMemberOffset, }, MemberDescription { - name: @"length", + name: ~"length", llvm_type: member_llvm_types[1], type_metadata: type_metadata(cx, ty::mk_uint(), span), offset: ComputedMemberOffset, @@ -1996,9 +2010,10 @@ fn trait_metadata(cx: &CrateContext, // the trait's methods. let path = ty::item_path(cx.tcx, def_id); let ident = path.last().unwrap().ident(); + let ident_string = token::get_ident(ident.name); let name = ppaux::trait_store_to_str(cx.tcx, trait_store) + ppaux::mutability_to_str(mutability) + - token::ident_to_str(&ident); + ident_string.get(); // Add type and region parameters let name = ppaux::parameterized(cx.tcx, name, &substs.regions, substs.tps, def_id, true); @@ -2006,7 +2021,7 @@ fn trait_metadata(cx: &CrateContext, let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, def_id, usage_site_span); - let file_name = span_start(cx, definition_span).file.name; + let file_name = span_start(cx, definition_span).file.name.clone(); let file_metadata = file_metadata(cx, file_name); let trait_llvm_type = type_of::type_of(cx, trait_type); @@ -2079,8 +2094,7 @@ fn type_metadata(cx: &CrateContext, pointer_type_metadata(cx, t, vec_metadata) } ty::vstore_box => { - let boxed_vec_metadata = boxed_vec_metadata(cx, i8_t, usage_site_span); - pointer_type_metadata(cx, t, boxed_vec_metadata) + fail!("unexpected managed string") } ty::vstore_slice(_region) => { vec_slice_metadata(cx, t, i8_t, usage_site_span) @@ -2714,7 +2728,7 @@ fn populate_scope_map(cx: &CrateContext, ast::ExprInlineAsm(ast::InlineAsm { inputs: ref inputs, outputs: ref outputs, .. }) => { - // inputs, outputs: ~[(@str, @expr)] + // inputs, outputs: ~[(~str, @expr)] for &(_, exp) in inputs.iter() { walk_expr(cx, exp, scope_stack, scope_map); } @@ -2755,8 +2769,10 @@ impl NamespaceTreeNode { } None => {} } - let name = token::ident_to_str(&node.ident); - output.push_str(format!("{}{}", name.len(), name)); + let string = token::get_ident(node.ident.name); + output.push_str(format!("{}{}", + string.get().len(), + string.get())); } } } @@ -2807,7 +2823,8 @@ fn namespace_for_item(cx: &CrateContext, Some(node) => node.scope, None => ptr::null() }; - let namespace_name = token::ident_to_str(&ident); + let namespace_name_string = token::get_ident(ident.name); + let namespace_name = namespace_name_string.get(); let namespace_metadata = unsafe { namespace_name.with_c_str(|namespace_name| { diff --git a/src/librustc/middle/trans/expr.rs b/src/librustc/middle/trans/expr.rs index d0a01f56b530c..bbe5bdc967bb8 100644 --- a/src/librustc/middle/trans/expr.rs +++ b/src/librustc/middle/trans/expr.rs @@ -543,9 +543,7 @@ fn trans_datum_unadjusted<'a>(bcx: &'a Block<'a>, let heap = heap_exchange; return trans_boxed_expr(bcx, box_ty, contents, contents_ty, heap) } - ast::ExprLit(lit) => { - trans_immediate_lit(bcx, expr, *lit) - } + ast::ExprLit(lit) => trans_immediate_lit(bcx, expr, (*lit).clone()), ast::ExprBinary(_, op, lhs, rhs) => { // if overloaded, would be RvalueDpsExpr { @@ -836,8 +834,8 @@ fn trans_rvalue_dps_unadjusted<'a>(bcx: &'a Block<'a>, } ast::ExprLit(lit) => { match lit.node { - ast::LitStr(s, _) => { - tvec::trans_lit_str(bcx, expr, s, dest) + ast::LitStr(ref s, _) => { + tvec::trans_lit_str(bcx, expr, (*s).clone(), dest) } _ => { bcx.tcx() @@ -1799,9 +1797,9 @@ fn trans_log_level<'a>(bcx: &'a Block<'a>) let external_srcs = ccx.external_srcs.borrow(); srccrate = match external_srcs.get().find(&bcx.fcx.id) { Some(&src) => { - ccx.sess.cstore.get_crate_data(src.crate).name + ccx.sess.cstore.get_crate_data(src.crate).name.clone() } - None => ccx.link_meta.crateid.name.to_managed(), + None => ccx.link_meta.crateid.name.to_str(), }; }; let mut modpath = ~[PathMod(ccx.sess.ident_of(srccrate))]; diff --git a/src/librustc/middle/trans/foreign.rs b/src/librustc/middle/trans/foreign.rs index d9a34e1da7d51..bc9dd767ec670 100644 --- a/src/librustc/middle/trans/foreign.rs +++ b/src/librustc/middle/trans/foreign.rs @@ -31,7 +31,8 @@ use std::vec; use syntax::abi::{Cdecl, Aapcs, C, AbiSet, Win64}; use syntax::abi::{RustIntrinsic, Rust, Stdcall, Fastcall, System}; use syntax::codemap::Span; -use syntax::parse::token::special_idents; +use syntax::parse::token::{InternedString, special_idents}; +use syntax::parse::token; use syntax::{ast}; use syntax::{attr, ast_map}; use util::ppaux::{Repr, UserString}; @@ -135,7 +136,7 @@ pub fn register_foreign_item_fn(ccx: @CrateContext, }; // Register the function as a C extern fn - let lname = link_name(ccx, foreign_item); + let lname = link_name(foreign_item); let tys = foreign_types_for_id(ccx, foreign_item.id); // Make sure the calling convention is right for variadic functions @@ -150,8 +151,12 @@ pub fn register_foreign_item_fn(ccx: @CrateContext, let llfn; { let mut externs = ccx.externs.borrow_mut(); - llfn = base::get_extern_fn(externs.get(), ccx.llmod, lname, - cc, llfn_ty, tys.fn_sig.output); + llfn = base::get_extern_fn(externs.get(), + ccx.llmod, + lname.get(), + cc, + llfn_ty, + tys.fn_sig.output); }; add_argument_attributes(&tys, llfn); @@ -372,9 +377,9 @@ pub fn trans_foreign_mod(ccx: @CrateContext, _ => () } - let lname = link_name(ccx, foreign_item); + let lname = link_name(foreign_item); let mut item_symbols = ccx.item_symbols.borrow_mut(); - item_symbols.get().insert(foreign_item.id, lname.to_owned()); + item_symbols.get().insert(foreign_item.id, lname.get().to_owned()); } } @@ -726,10 +731,10 @@ pub fn trans_rust_fn_with_foreign_abi(ccx: @CrateContext, // This code is kind of a confused mess and needs to be reworked given // the massive simplifications that have occurred. -pub fn link_name(ccx: &CrateContext, i: @ast::ForeignItem) -> @str { +pub fn link_name(i: @ast::ForeignItem) -> InternedString { match attr::first_attr_value_str_by_name(i.attrs, "link_name") { - None => ccx.sess.str_of(i.ident), - Some(ln) => ln, + None => token::get_ident(i.ident.name), + Some(ln) => ln.clone(), } } diff --git a/src/librustc/middle/trans/glue.rs b/src/librustc/middle/trans/glue.rs index ab77d105e5fea..ebcb97b91beb5 100644 --- a/src/librustc/middle/trans/glue.rs +++ b/src/librustc/middle/trans/glue.rs @@ -15,31 +15,31 @@ use back::abi; use back::link::*; -use lib; use lib::llvm::{llvm, ValueRef, True}; +use lib; use middle::lang_items::{FreeFnLangItem, ExchangeFreeFnLangItem}; use middle::trans::adt; use middle::trans::base::*; +use middle::trans::build::*; use middle::trans::callee; use middle::trans::cleanup; use middle::trans::cleanup::CleanupMethods; use middle::trans::common::*; -use middle::trans::build::*; use middle::trans::expr; use middle::trans::machine::*; use middle::trans::reflect; use middle::trans::tvec; +use middle::trans::type_::Type; use middle::trans::type_of::type_of; use middle::ty; -use util::ppaux; use util::ppaux::ty_to_short_str; - -use middle::trans::type_::Type; +use util::ppaux; use std::c_str::ToCStr; use std::cell::Cell; use std::libc::c_uint; use syntax::ast; +use syntax::parse::token; pub fn trans_free<'a>(cx: &'a Block<'a>, v: ValueRef) -> &'a Block<'a> { let _icx = push_ctxt("trans_free"); @@ -470,16 +470,17 @@ pub fn declare_tydesc(ccx: &CrateContext, t: ty::t) -> @tydesc_info { let llsize = llsize_of(ccx, llty); let llalign = llalign_of(ccx, llty); - let name = mangle_internal_name_by_type_and_seq(ccx, t, "tydesc").to_managed(); - note_unique_llvm_symbol(ccx, name); + let name = mangle_internal_name_by_type_and_seq(ccx, t, "tydesc"); debug!("+++ declare_tydesc {} {}", ppaux::ty_to_str(ccx.tcx, t), name); let gvar = name.with_c_str(|buf| { unsafe { llvm::LLVMAddGlobal(ccx.llmod, ccx.tydesc_type.to_ref(), buf) } }); + note_unique_llvm_symbol(ccx, name); - let ty_name = C_str_slice(ccx, ppaux::ty_to_str(ccx.tcx, t).to_managed()); + let ty_name = token::intern_and_get_ident(ppaux::ty_to_str(ccx.tcx, t)); + let ty_name = C_str_slice(ccx, ty_name); let inf = @tydesc_info { ty: t, @@ -497,10 +498,10 @@ pub fn declare_tydesc(ccx: &CrateContext, t: ty::t) -> @tydesc_info { fn declare_generic_glue(ccx: &CrateContext, t: ty::t, llfnty: Type, name: &str) -> ValueRef { let _icx = push_ctxt("declare_generic_glue"); - let fn_nm = mangle_internal_name_by_type_and_seq(ccx, t, (~"glue_" + name)).to_managed(); + let fn_nm = mangle_internal_name_by_type_and_seq(ccx, t, ~"glue_" + name); debug!("{} is for type {}", fn_nm, ppaux::ty_to_str(ccx.tcx, t)); - note_unique_llvm_symbol(ccx, fn_nm); let llfn = decl_cdecl_fn(ccx.llmod, fn_nm, llfnty, ty::mk_nil()); + note_unique_llvm_symbol(ccx, fn_nm); return llfn; } diff --git a/src/librustc/middle/trans/intrinsic.rs b/src/librustc/middle/trans/intrinsic.rs index b662d08062fdf..9bbdd8f9094a7 100644 --- a/src/librustc/middle/trans/intrinsic.rs +++ b/src/librustc/middle/trans/intrinsic.rs @@ -328,8 +328,10 @@ pub fn trans_intrinsic(ccx: @CrateContext, Ret(bcx, td); } "type_id" => { - let hash = ty::hash_crate_independent(ccx.tcx, substs.tys[0], - ccx.link_meta.crate_hash); + let hash = ty::hash_crate_independent( + ccx.tcx, + substs.tys[0], + ccx.link_meta.crate_hash.clone()); // NB: This needs to be kept in lockstep with the TypeId struct in // libstd/unstable/intrinsics.rs let val = C_named_struct(type_of::type_of(ccx, output_type), [C_u64(hash)]); diff --git a/src/librustc/middle/trans/reflect.rs b/src/librustc/middle/trans/reflect.rs index cbfd83309a4a0..25aca8bce52ed 100644 --- a/src/librustc/middle/trans/reflect.rs +++ b/src/librustc/middle/trans/reflect.rs @@ -20,6 +20,7 @@ use middle::trans::datum::*; use middle::trans::glue; use middle::trans::machine; use middle::trans::meth; +use middle::trans::type_::Type; use middle::trans::type_of::*; use middle::ty; use util::ppaux::ty_to_str; @@ -30,9 +31,8 @@ use std::vec; use syntax::ast::DefId; use syntax::ast; use syntax::ast_map::PathName; -use syntax::parse::token::special_idents; - -use middle::trans::type_::Type; +use syntax::parse::token::{InternedString, special_idents}; +use syntax::parse::token; pub struct Reflector<'a> { visitor_val: ValueRef, @@ -55,14 +55,14 @@ impl<'a> Reflector<'a> { C_bool(b) } - pub fn c_slice(&mut self, s: @str) -> ValueRef { + pub fn c_slice(&mut self, s: InternedString) -> ValueRef { // We're careful to not use first class aggregates here because that // will kick us off fast isel. (Issue #4352.) let bcx = self.bcx; let str_vstore = ty::vstore_slice(ty::ReStatic); let str_ty = ty::mk_str(bcx.tcx(), str_vstore); let scratch = rvalue_scratch_datum(bcx, str_ty, ""); - let len = C_uint(bcx.ccx(), s.len()); + let len = C_uint(bcx.ccx(), s.get().len()); let c_str = PointerCast(bcx, C_cstr(bcx.ccx(), s), Type::i8p()); Store(bcx, c_str, GEPi(bcx, scratch.val, [ 0, 0 ])); Store(bcx, len, GEPi(bcx, scratch.val, [ 0, 1 ])); @@ -259,15 +259,19 @@ impl<'a> Reflector<'a> { fields[0].ident.name != special_idents::unnamed_field.name; } - let extra = ~[self.c_slice(ty_to_str(tcx, t).to_managed()), - self.c_bool(named_fields), - self.c_uint(fields.len())] + self.c_size_and_align(t); + let extra = ~[ + self.c_slice(token::intern_and_get_ident(ty_to_str(tcx, + t))), + self.c_bool(named_fields), + self.c_uint(fields.len()) + ] + self.c_size_and_align(t); self.bracketed("class", extra, |this| { for (i, field) in fields.iter().enumerate() { - let extra = ~[this.c_uint(i), - this.c_slice(bcx.ccx().sess.str_of(field.ident)), - this.c_bool(named_fields)] - + this.c_mt(&field.mt); + let extra = ~[ + this.c_uint(i), + this.c_slice(token::get_ident(field.ident.name)), + this.c_bool(named_fields) + ] + this.c_mt(&field.mt); this.visit("class_field", extra); } }) @@ -322,7 +326,7 @@ impl<'a> Reflector<'a> { + self.c_size_and_align(t); self.bracketed("enum", enum_args, |this| { for (i, v) in variants.iter().enumerate() { - let name = ccx.sess.str_of(v.name); + let name = token::get_ident(v.name.name); let variant_args = ~[this.c_uint(i), C_u64(v.disr_val), this.c_uint(v.args.len()), @@ -344,7 +348,9 @@ impl<'a> Reflector<'a> { } ty::ty_trait(_, _, _, _, _) => { - let extra = [self.c_slice(ty_to_str(tcx, t).to_managed())]; + let extra = [ + self.c_slice(token::intern_and_get_ident(ty_to_str(tcx, t))) + ]; self.visit("trait", extra); } diff --git a/src/librustc/middle/trans/tvec.rs b/src/librustc/middle/trans/tvec.rs index 5754a9ba88b99..ea1e0f78f449f 100644 --- a/src/librustc/middle/trans/tvec.rs +++ b/src/librustc/middle/trans/tvec.rs @@ -31,6 +31,7 @@ use middle::ty; use util::ppaux::ty_to_str; use syntax::ast; +use syntax::parse::token::InternedString; // Boxed vector types are in some sense currently a "shorthand" for a box // containing an unboxed vector. This expands a boxed vector type into such an @@ -231,8 +232,11 @@ pub fn trans_slice_vstore<'a>( match content_expr.node { ast::ExprLit(lit) => { match lit.node { - ast::LitStr(s, _) => { - return trans_lit_str(bcx, content_expr, s, dest); + ast::LitStr(ref s, _) => { + return trans_lit_str(bcx, + content_expr, + s.clone(), + dest) } _ => {} } @@ -284,7 +288,7 @@ pub fn trans_slice_vstore<'a>( pub fn trans_lit_str<'a>( bcx: &'a Block<'a>, lit_expr: &ast::Expr, - str_lit: @str, + str_lit: InternedString, dest: Dest) -> &'a Block<'a> { /*! @@ -301,7 +305,7 @@ pub fn trans_lit_str<'a>( Ignore => bcx, SaveIn(lldest) => { unsafe { - let bytes = str_lit.len(); + let bytes = str_lit.get().len(); let llbytes = C_uint(bcx.ccx(), bytes); let llcstr = C_cstr(bcx.ccx(), str_lit); let llcstr = llvm::LLVMConstPointerCast(llcstr, Type::i8p().to_ref()); @@ -336,12 +340,16 @@ pub fn trans_uniq_or_managed_vstore<'a>(bcx: &'a Block<'a>, match content_expr.node { ast::ExprLit(lit) => { match lit.node { - ast::LitStr(s, _) => { - let llptrval = C_cstr(bcx.ccx(), s); - let llptrval = PointerCast(bcx, llptrval, Type::i8p()); - let llsizeval = C_uint(bcx.ccx(), s.len()); + ast::LitStr(ref s, _) => { + let llptrval = C_cstr(bcx.ccx(), (*s).clone()); + let llptrval = PointerCast(bcx, + llptrval, + Type::i8p()); + let llsizeval = C_uint(bcx.ccx(), s.get().len()); let typ = ty::mk_str(bcx.tcx(), ty::vstore_uniq); - let lldestval = rvalue_scratch_datum(bcx, typ, ""); + let lldestval = rvalue_scratch_datum(bcx, + typ, + ""); let alloc_fn = langcall(bcx, Some(lit.span), "", @@ -405,15 +413,13 @@ pub fn write_content<'a>( match content_expr.node { ast::ExprLit(lit) => { match lit.node { - ast::LitStr(s, _) => { + ast::LitStr(ref s, _) => { match dest { - Ignore => { - return bcx; - } + Ignore => return bcx, SaveIn(lldest) => { - let bytes = s.len(); + let bytes = s.get().len(); let llbytes = C_uint(bcx.ccx(), bytes); - let llcstr = C_cstr(bcx.ccx(), s); + let llcstr = C_cstr(bcx.ccx(), (*s).clone()); base::call_memcpy(bcx, lldest, llcstr, @@ -516,7 +522,7 @@ pub fn elements_required(bcx: &Block, content_expr: &ast::Expr) -> uint { match content_expr.node { ast::ExprLit(lit) => { match lit.node { - ast::LitStr(s, _) => s.len(), + ast::LitStr(ref s, _) => s.get().len(), _ => { bcx.tcx().sess.span_bug(content_expr.span, "Unexpected evec content") diff --git a/src/librustc/middle/trans/type_of.rs b/src/librustc/middle/trans/type_of.rs index 86456187d1ad1..b440db66a12ff 100644 --- a/src/librustc/middle/trans/type_of.rs +++ b/src/librustc/middle/trans/type_of.rs @@ -222,7 +222,7 @@ pub fn type_of(cx: &CrateContext, t: ty::t) -> Type { adt::incomplete_type_of(cx, repr, name) } ty::ty_str(ty::vstore_box) => { - Type::at_box(cx, Type::vec(cx.sess.targ_cfg.arch, &Type::i8())).ptr_to() + fail!("unexpected managed string") } ty::ty_vec(ref mt, ty::vstore_box) => { let e_ty = type_of(cx, mt.ty); diff --git a/src/librustc/middle/ty.rs b/src/librustc/middle/ty.rs index f216a1cc0a255..fcf07b70a29d7 100644 --- a/src/librustc/middle/ty.rs +++ b/src/librustc/middle/ty.rs @@ -291,7 +291,7 @@ pub struct ctxt_ { freevars: RefCell, tcache: type_cache, rcache: creader_cache, - short_names_cache: RefCell>, + short_names_cache: RefCell>, needs_unwind_cleanup_cache: RefCell>, tc_cache: RefCell>, ast_ty_to_ty_cache: RefCell>, @@ -3334,9 +3334,10 @@ pub fn field_idx_strict(tcx: ty::ctxt, name: ast::Name, fields: &[field]) -> uint { let mut i = 0u; for f in fields.iter() { if f.ident.name == name { return i; } i += 1u; } + let string = token::get_ident(name); tcx.sess.bug(format!( "No field named `{}` found in the list of fields `{:?}`", - token::interner_get(name), + string.get(), fields.map(|f| tcx.sess.str_of(f.ident)))); } @@ -4155,7 +4156,7 @@ pub fn each_attr(tcx: ctxt, did: DefId, f: |@MetaItem| -> bool) -> bool { pub fn has_attr(tcx: ctxt, did: DefId, attr: &str) -> bool { let mut found = false; each_attr(tcx, did, |item| { - if attr == item.name() { + if item.name().equiv(&attr) { found = true; false } else { @@ -4824,7 +4825,7 @@ pub fn trait_method_of_method(tcx: ctxt, /// Creates a hash of the type `t` which will be the same no matter what crate /// context it's calculated within. This is used by the `type_id` intrinsic. -pub fn hash_crate_independent(tcx: ctxt, t: t, local_hash: @str) -> u64 { +pub fn hash_crate_independent(tcx: ctxt, t: t, local_hash: ~str) -> u64 { use std::hash::{SipState, Streaming}; let mut hash = SipState::new(0, 0); @@ -4855,7 +4856,7 @@ pub fn hash_crate_independent(tcx: ctxt, t: t, local_hash: @str) -> u64 { }; let did = |hash: &mut SipState, did: DefId| { let h = if ast_util::is_local(did) { - local_hash + local_hash.clone() } else { tcx.sess.cstore.get_crate_hash(did.crate) }; diff --git a/src/librustc/middle/typeck/astconv.rs b/src/librustc/middle/typeck/astconv.rs index 565ff4a734585..62d1e13bc7d0e 100644 --- a/src/librustc/middle/typeck/astconv.rs +++ b/src/librustc/middle/typeck/astconv.rs @@ -413,8 +413,17 @@ pub fn ast_ty_to_ty( // will run after this as long as the path isn't a trait. let def_map = tcx.def_map.borrow(); match def_map.get().find(&id) { - Some(&ast::DefPrimTy(ast::TyStr)) if a_seq_ty.mutbl == ast::MutImmutable => { + Some(&ast::DefPrimTy(ast::TyStr)) if + a_seq_ty.mutbl == ast::MutImmutable => { check_path_args(tcx, path, NO_TPS | NO_REGIONS); + match vst { + ty::vstore_box => { + tcx.sess.span_err(path.span, + "managed strings are not \ + supported") + } + _ => {} + } return ty::mk_str(tcx, vst); } Some(&ast::DefTrait(trait_def_id)) => { diff --git a/src/librustc/middle/typeck/check/_match.rs b/src/librustc/middle/typeck/check/_match.rs index 9303bf80208a7..37862943dd7d5 100644 --- a/src/librustc/middle/typeck/check/_match.rs +++ b/src/librustc/middle/typeck/check/_match.rs @@ -339,9 +339,11 @@ pub fn check_struct_pat_fields(pcx: &pat_ctxt, if found_fields.contains(&i) { continue; } + + let string = token::get_ident(field.name); tcx.sess.span_err(span, format!("pattern does not mention field `{}`", - token::interner_get(field.name))); + string.get())); } } } diff --git a/src/librustc/middle/typeck/check/method.rs b/src/librustc/middle/typeck/check/method.rs index 398b4cca015b4..09d04753def22 100644 --- a/src/librustc/middle/typeck/check/method.rs +++ b/src/librustc/middle/typeck/check/method.rs @@ -555,8 +555,10 @@ impl<'a> LookupContext<'a> { return; // already visited } } + + let method_name = token::get_ident(self.m_name); debug!("push_candidates_from_impl: {} {} {}", - token::interner_get(self.m_name), + method_name.get(), impl_info.ident.repr(self.tcx()), impl_info.methods.map(|m| m.ident).repr(self.tcx())); diff --git a/src/librustc/middle/typeck/check/mod.rs b/src/librustc/middle/typeck/check/mod.rs index 31dc0063ad606..4eca5e8a34a9c 100644 --- a/src/librustc/middle/typeck/check/mod.rs +++ b/src/librustc/middle/typeck/check/mod.rs @@ -2335,9 +2335,11 @@ pub fn check_expr_with_unifier(fcx: @FnCtxt, fcx.type_error_message( expr.span, |actual| { + let string = token::get_ident(field); format!("attempted to take value of method `{}` on type `{}` \ - (try writing an anonymous function)", - token::interner_get(field), actual) + (try writing an anonymous function)", + string.get(), + actual) }, expr_t, None); } @@ -2346,9 +2348,11 @@ pub fn check_expr_with_unifier(fcx: @FnCtxt, fcx.type_error_message( expr.span, |actual| { + let string = token::get_ident(field); format!("attempted access of field `{}` on type `{}`, \ - but no field with that name was found", - token::interner_get(field), actual) + but no field with that name was found", + string.get(), + actual) }, expr_t, None); } @@ -2428,8 +2432,8 @@ pub fn check_expr_with_unifier(fcx: @FnCtxt, let name = class_field.name; let (_, seen) = *class_field_map.get(&name); if !seen { - missing_fields.push( - ~"`" + token::interner_get(name) + "`"); + let string = token::get_ident(name); + missing_fields.push(~"`" + string.get() + "`"); } } diff --git a/src/librustc/util/ppaux.rs b/src/librustc/util/ppaux.rs index f391239df3304..8b6cfe88f4110 100644 --- a/src/librustc/util/ppaux.rs +++ b/src/librustc/util/ppaux.rs @@ -824,7 +824,8 @@ impl Repr for ty::Method { impl Repr for ast::Ident { fn repr(&self, _tcx: ctxt) -> ~str { - token::ident_to_str(self).to_owned() + let string = token::get_ident(self.name); + string.get().to_str() } } diff --git a/src/librustdoc/clean.rs b/src/librustdoc/clean.rs index e86122fb7d1e6..4f7e6df95e1cb 100644 --- a/src/librustdoc/clean.rs +++ b/src/librustdoc/clean.rs @@ -11,8 +11,6 @@ //! This module contains the "cleaned" pieces of the AST, and the functions //! that clean them. -use its = syntax::parse::token::ident_to_str; - use syntax; use syntax::ast; use syntax::ast_map; @@ -20,6 +18,8 @@ use syntax::ast_util; use syntax::attr; use syntax::attr::AttributeMethods; use syntax::codemap::Pos; +use syntax::parse::token::InternedString; +use syntax::parse::token; use rustc::metadata::cstore; use rustc::metadata::csearch; @@ -223,9 +223,13 @@ pub enum Attribute { impl Clean for ast::MetaItem { fn clean(&self) -> Attribute { match self.node { - ast::MetaWord(s) => Word(s.to_owned()), - ast::MetaList(ref s, ref l) => List(s.to_owned(), l.clean()), - ast::MetaNameValue(s, ref v) => NameValue(s.to_owned(), lit_to_str(v)) + ast::MetaWord(ref s) => Word(s.get().to_owned()), + ast::MetaList(ref s, ref l) => { + List(s.get().to_owned(), l.clean()) + } + ast::MetaNameValue(ref s, ref v) => { + NameValue(s.get().to_owned(), lit_to_str(v)) + } } } } @@ -238,21 +242,24 @@ impl Clean for ast::Attribute { // This is a rough approximation that gets us what we want. impl<'a> attr::AttrMetaMethods for &'a Attribute { - fn name(&self) -> @str { + fn name(&self) -> InternedString { match **self { - Word(ref n) | List(ref n, _) | NameValue(ref n, _) => - n.to_managed() + Word(ref n) | List(ref n, _) | NameValue(ref n, _) => { + token::intern_and_get_ident(*n) + } } } - fn value_str(&self) -> Option<@str> { + fn value_str(&self) -> Option { match **self { - NameValue(_, ref v) => Some(v.to_managed()), + NameValue(_, ref v) => Some(token::intern_and_get_ident(*v)), _ => None, } } fn meta_item_list<'a>(&'a self) -> Option<&'a [@ast::MetaItem]> { None } - fn name_str_pair(&self) -> Option<(@str, @str)> { None } + fn name_str_pair(&self) -> Option<(InternedString, InternedString)> { + None + } } #[deriving(Clone, Encodable, Decodable)] @@ -867,11 +874,14 @@ impl Clean for ast::PathSegment { } fn path_to_str(p: &ast::Path) -> ~str { - use syntax::parse::token::interner_get; + use syntax::parse::token; let mut s = ~""; let mut first = true; - for i in p.segments.iter().map(|x| interner_get(x.identifier.name)) { + for i in p.segments.iter().map(|x| { + let string = token::get_ident(x.identifier.name); + string.get().to_str() + }) { if !first || p.global { s.push_str("::"); } else { @@ -884,7 +894,8 @@ fn path_to_str(p: &ast::Path) -> ~str { impl Clean<~str> for ast::Ident { fn clean(&self) -> ~str { - its(self).to_owned() + let string = token::get_ident(self.name); + string.get().to_owned() } } @@ -1030,8 +1041,13 @@ pub enum ViewItemInner { impl Clean for ast::ViewItem_ { fn clean(&self) -> ViewItemInner { match self { - &ast::ViewItemExternMod(ref i, ref p, ref id) => - ExternMod(i.clean(), p.map(|(ref x, _)| x.to_owned()), *id), + &ast::ViewItemExternMod(ref i, ref p, ref id) => { + let string = match *p { + None => None, + Some((ref x, _)) => Some(x.get().to_owned()), + }; + ExternMod(i.clean(), string, *id) + } &ast::ViewItemUse(ref vp) => Import(vp.clean()) } } @@ -1137,14 +1153,14 @@ impl ToSource for syntax::codemap::Span { fn lit_to_str(lit: &ast::Lit) -> ~str { match lit.node { - ast::LitStr(st, _) => st.to_owned(), + ast::LitStr(ref st, _) => st.get().to_owned(), ast::LitBinary(data) => format!("{:?}", data.as_slice()), ast::LitChar(c) => ~"'" + std::char::from_u32(c).unwrap().to_str() + "'", ast::LitInt(i, _t) => i.to_str(), ast::LitUint(u, _t) => u.to_str(), ast::LitIntUnsuffixed(i) => i.to_str(), - ast::LitFloat(f, _t) => f.to_str(), - ast::LitFloatUnsuffixed(f) => f.to_str(), + ast::LitFloat(ref f, _t) => f.get().to_str(), + ast::LitFloatUnsuffixed(ref f) => f.get().to_str(), ast::LitBool(b) => b.to_str(), ast::LitNil => ~"", } diff --git a/src/librustdoc/core.rs b/src/librustdoc/core.rs index e4260e367a879..0e2d6c972ae60 100644 --- a/src/librustdoc/core.rs +++ b/src/librustdoc/core.rs @@ -15,6 +15,7 @@ use rustc::middle::privacy; use syntax::ast; use syntax::diagnostic; +use syntax::parse::token; use syntax::parse; use syntax; @@ -71,7 +72,8 @@ fn get_ast_and_resolve(cpath: &Path, let mut cfg = build_configuration(sess); for cfg_ in cfgs.move_iter() { - cfg.push(@dummy_spanned(ast::MetaWord(cfg_.to_managed()))); + let cfg_ = token::intern_and_get_ident(cfg_); + cfg.push(@dummy_spanned(ast::MetaWord(cfg_))); } let crate = phase_1_parse_input(sess, cfg.clone(), &input); diff --git a/src/librustdoc/html/render.rs b/src/librustdoc/html/render.rs index 90ed4a4c744c5..c144907096cce 100644 --- a/src/librustdoc/html/render.rs +++ b/src/librustdoc/html/render.rs @@ -45,6 +45,7 @@ use extra::arc::Arc; use extra::json::ToJson; use syntax::ast; use syntax::attr; +use syntax::parse::token::InternedString; use clean; use doctree; @@ -803,12 +804,13 @@ impl<'a> Item<'a> { impl<'a> fmt::Default for Item<'a> { fn fmt(it: &Item<'a>, fmt: &mut fmt::Formatter) { match attr::find_stability(it.item.attrs.iter()) { - Some(stability) => { + Some(ref stability) => { write!(fmt.buf, "{lvl}", lvl = stability.level.to_str(), reason = match stability.text { - Some(s) => s, None => @"", + Some(ref s) => (*s).clone(), + None => InternedString::new(""), }); } None => {} diff --git a/src/librustdoc/test.rs b/src/librustdoc/test.rs index 9271af9d575ea..12874d1b502a2 100644 --- a/src/librustdoc/test.rs +++ b/src/librustdoc/test.rs @@ -137,7 +137,7 @@ fn runtest(test: &str, cratename: &str, libs: HashSet) { } } -fn maketest(s: &str, cratename: &str) -> @str { +fn maketest(s: &str, cratename: &str) -> ~str { let mut prog = ~r" #[deny(warnings)]; #[allow(unused_variable, dead_assignment, unused_mut, attribute_usage, dead_code)]; @@ -156,7 +156,7 @@ fn maketest(s: &str, cratename: &str) -> @str { prog.push_str("\n}"); } - return prog.to_managed(); + return prog; } pub struct Collector { diff --git a/src/librustpkg/util.rs b/src/librustpkg/util.rs index 1f8962fbd3af0..ba31699a7d04e 100644 --- a/src/librustpkg/util.rs +++ b/src/librustpkg/util.rs @@ -30,6 +30,8 @@ use syntax::ext::base::{ExtCtxt, MacroCrate}; use syntax::{ast, attr, codemap, diagnostic, fold, visit}; use syntax::attr::AttrMetaMethods; use syntax::fold::Folder; +use syntax::parse::token::InternedString; +use syntax::parse::token; use syntax::visit::Visitor; use syntax::util::small_vector::SmallVector; use syntax::crateid::CrateId; @@ -77,7 +79,7 @@ fn fold_mod(m: &ast::Mod, fold: &mut CrateSetup) -> ast::Mod { fn strip_main(item: @ast::Item) -> @ast::Item { @ast::Item { attrs: item.attrs.iter().filter_map(|attr| { - if "main" != attr.name() { + if !attr.name().equiv(&("main")) { Some(*attr) } else { None @@ -101,13 +103,15 @@ fn fold_item(item: @ast::Item, fold: &mut CrateSetup) let mut had_pkg_do = false; for attr in item.attrs.iter() { - if "pkg_do" == attr.name() { + if attr.name().equiv(&("pkg_do")) { had_pkg_do = true; match attr.node.value.node { ast::MetaList(_, ref mis) => { for mi in mis.iter() { match mi.node { - ast::MetaWord(cmd) => cmds.push(cmd.to_owned()), + ast::MetaWord(ref cmd) => { + cmds.push(cmd.get().to_owned()) + } _ => {} }; } @@ -314,7 +318,9 @@ pub fn compile_input(context: &BuildContext, if !attr::contains_name(crate.attrs, "crate_id") { // FIXME (#9639): This needs to handle non-utf8 paths let crateid_attr = - attr::mk_name_value_item_str(@"crate_id", crate_id.to_str().to_managed()); + attr::mk_name_value_item_str( + InternedString::new("crate_id"), + token::intern_and_get_ident(crate_id.to_str())); debug!("crateid attr: {:?}", crateid_attr); crate.attrs.push(attr::mk_attr(crateid_attr)); @@ -466,13 +472,14 @@ impl<'a> CrateInstaller<'a> { match vi.node { // ignore metadata, I guess - ast::ViewItemExternMod(lib_ident, path_opt, _) => { - let lib_name = match path_opt { - Some((p, _)) => p, - None => self.sess.str_of(lib_ident) + ast::ViewItemExternMod(ref lib_ident, ref path_opt, _) => { + let lib_name = match *path_opt { + Some((ref p, _)) => (*p).clone(), + None => token::get_ident(lib_ident.name), }; debug!("Finding and installing... {}", lib_name); - let crate_id: CrateId = from_str(lib_name).expect("valid crate id"); + let crate_id: CrateId = + from_str(lib_name.get()).expect("valid crate id"); // Check standard Rust library path first let whatever = system_library(&self.context.sysroot_to_use(), &crate_id); debug!("system library returned {:?}", whatever); @@ -642,7 +649,7 @@ pub fn find_and_install_dependencies(installer: &mut CrateInstaller, visit::walk_crate(installer, c, ()) } -pub fn mk_string_lit(s: @str) -> ast::Lit { +pub fn mk_string_lit(s: InternedString) -> ast::Lit { Spanned { node: ast::LitStr(s, ast::CookedStr), span: DUMMY_SP diff --git a/src/libstd/at_vec.rs b/src/libstd/at_vec.rs index 18cb470db4da2..cdc2e292fe8a6 100644 --- a/src/libstd/at_vec.rs +++ b/src/libstd/at_vec.rs @@ -338,7 +338,6 @@ mod test { assert_eq!(to_managed::([]), @[]); assert_eq!(to_managed([true]), @[true]); assert_eq!(to_managed([1, 2, 3, 4, 5]), @[1, 2, 3, 4, 5]); - assert_eq!(to_managed([@"abc", @"123"]), @[@"abc", @"123"]); assert_eq!(to_managed([@[42]]), @[@[42]]); } diff --git a/src/libstd/fmt/mod.rs b/src/libstd/fmt/mod.rs index 111eb70eb204a..13e6d80809584 100644 --- a/src/libstd/fmt/mod.rs +++ b/src/libstd/fmt/mod.rs @@ -1167,7 +1167,6 @@ delegate!( u8 to Unsigned) delegate!( u16 to Unsigned) delegate!( u32 to Unsigned) delegate!( u64 to Unsigned) -delegate!(@str to String) delegate!(~str to String) delegate!(&'a str to String) delegate!(bool to Bool) diff --git a/src/libstd/path/mod.rs b/src/libstd/path/mod.rs index 11f23b22c51fa..2282f97a716fb 100644 --- a/src/libstd/path/mod.rs +++ b/src/libstd/path/mod.rs @@ -604,19 +604,6 @@ impl BytesContainer for ~str { fn is_str(_: Option<~str>) -> bool { true } } -impl BytesContainer for @str { - #[inline] - fn container_as_bytes<'a>(&'a self) -> &'a [u8] { - self.as_bytes() - } - #[inline] - fn container_as_str<'a>(&'a self) -> Option<&'a str> { - Some(self.as_slice()) - } - #[inline] - fn is_str(_: Option<@str>) -> bool { true } -} - impl<'a> BytesContainer for &'a [u8] { #[inline] fn container_as_bytes<'a>(&'a self) -> &'a [u8] { diff --git a/src/libstd/path/posix.rs b/src/libstd/path/posix.rs index 707ba18378a83..b6506b517864b 100644 --- a/src/libstd/path/posix.rs +++ b/src/libstd/path/posix.rs @@ -830,7 +830,6 @@ mod tests { t!(s: "a/b/c", ["d", "/e"], "/e"); t!(s: "a/b/c", ["d", "/e", "f"], "/e/f"); t!(s: "a/b/c", [~"d", ~"e"], "a/b/c/d/e"); - t!(s: "a/b/c", [@"d", @"e"], "a/b/c/d/e"); t!(v: b!("a/b/c"), [b!("d"), b!("e")], b!("a/b/c/d/e")); t!(v: b!("a/b/c"), [b!("d"), b!("/e"), b!("f")], b!("/e/f")); t!(v: b!("a/b/c"), [b!("d").to_owned(), b!("e").to_owned()], b!("a/b/c/d/e")); @@ -940,7 +939,6 @@ mod tests { t!(s: "a/b/c", ["..", "d"], "a/b/d"); t!(s: "a/b/c", ["d", "/e", "f"], "/e/f"); t!(s: "a/b/c", [~"d", ~"e"], "a/b/c/d/e"); - t!(s: "a/b/c", [@"d", @"e"], "a/b/c/d/e"); t!(v: b!("a/b/c"), [b!("d"), b!("e")], b!("a/b/c/d/e")); t!(v: b!("a/b/c"), [b!("d").to_owned(), b!("e").to_owned()], b!("a/b/c/d/e")); t!(v: b!("a/b/c"), [to_man(b!("d").to_owned()), to_man(b!("e").to_owned())], diff --git a/src/libstd/path/windows.rs b/src/libstd/path/windows.rs index a07471afc1a67..2578acaf41cd5 100644 --- a/src/libstd/path/windows.rs +++ b/src/libstd/path/windows.rs @@ -1610,7 +1610,6 @@ mod tests { t!(s: "a\\b\\c", ["d", "\\e"], "\\e"); t!(s: "a\\b\\c", ["d", "\\e", "f"], "\\e\\f"); t!(s: "a\\b\\c", [~"d", ~"e"], "a\\b\\c\\d\\e"); - t!(s: "a\\b\\c", [@"d", @"e"], "a\\b\\c\\d\\e"); t!(v: b!("a\\b\\c"), [b!("d"), b!("e")], b!("a\\b\\c\\d\\e")); t!(v: b!("a\\b\\c"), [b!("d"), b!("\\e"), b!("f")], b!("\\e\\f")); t!(v: b!("a\\b\\c"), [b!("d").to_owned(), b!("e").to_owned()], b!("a\\b\\c\\d\\e")); @@ -1755,7 +1754,6 @@ mod tests { t!(s: "a\\b\\c", ["..", "d"], "a\\b\\d"); t!(s: "a\\b\\c", ["d", "\\e", "f"], "\\e\\f"); t!(s: "a\\b\\c", [~"d", ~"e"], "a\\b\\c\\d\\e"); - t!(s: "a\\b\\c", [@"d", @"e"], "a\\b\\c\\d\\e"); t!(v: b!("a\\b\\c"), [b!("d"), b!("e")], b!("a\\b\\c\\d\\e")); t!(v: b!("a\\b\\c"), [b!("d").to_owned(), b!("e").to_owned()], b!("a\\b\\c\\d\\e")); t!(v: b!("a\\b\\c"), [to_man(b!("d").to_owned()), to_man(b!("e").to_owned())], diff --git a/src/libstd/reflect.rs b/src/libstd/reflect.rs index 87655f5911fe7..d0b0f0c264d05 100644 --- a/src/libstd/reflect.rs +++ b/src/libstd/reflect.rs @@ -183,9 +183,6 @@ impl TyVisitor for MovePtrAdaptor { } fn visit_estr_box(&mut self) -> bool { - self.align_to::<@str>(); - if ! self.inner.visit_estr_box() { return false; } - self.bump_past::<@str>(); true } diff --git a/src/libstd/repr.rs b/src/libstd/repr.rs index 1ecc31ec2f4e9..8ecb3395542fd 100644 --- a/src/libstd/repr.rs +++ b/src/libstd/repr.rs @@ -272,10 +272,7 @@ impl<'a> TyVisitor for ReprVisitor<'a> { } fn visit_estr_box(&mut self) -> bool { - self.get::<@str>(|this, s| { - this.writer.write(['@' as u8]); - this.write_escaped_slice(*s); - }) + true } fn visit_estr_uniq(&mut self) -> bool { @@ -628,7 +625,6 @@ fn test_repr() { exact_test(&false, "false"); exact_test(&1.234, "1.234f64"); exact_test(&(&"hello"), "\"hello\""); - exact_test(&(@"hello"), "@\"hello\""); exact_test(&(~"he\u10f3llo"), "~\"he\\u10f3llo\""); exact_test(&(@10), "@10"); diff --git a/src/libstd/send_str.rs b/src/libstd/send_str.rs index b6c9acd26723d..f3471536f91c8 100644 --- a/src/libstd/send_str.rs +++ b/src/libstd/send_str.rs @@ -185,7 +185,6 @@ mod tests { assert_eq!(s.len(), 5); assert_eq!(s.as_slice(), "abcde"); assert_eq!(s.to_str(), ~"abcde"); - assert!(s.equiv(&@"abcde")); assert!(s.lt(&SendStrOwned(~"bcdef"))); assert_eq!(SendStrStatic(""), Default::default()); @@ -193,7 +192,6 @@ mod tests { assert_eq!(o.len(), 5); assert_eq!(o.as_slice(), "abcde"); assert_eq!(o.to_str(), ~"abcde"); - assert!(o.equiv(&@"abcde")); assert!(o.lt(&SendStrStatic("bcdef"))); assert_eq!(SendStrOwned(~""), Default::default()); diff --git a/src/libstd/str.rs b/src/libstd/str.rs index 9cc9799d0c017..290a46d51267d 100644 --- a/src/libstd/str.rs +++ b/src/libstd/str.rs @@ -17,46 +17,35 @@ Unicode string manipulation (`str` type) Rust's string type is one of the core primitive types of the language. While represented by the name `str`, the name `str` is not actually a valid type in Rust. Each string must also be decorated with its ownership. This means that -there are three common kinds of strings in rust: +there are two common kinds of strings in rust: * `~str` - This is an owned string. This type obeys all of the normal semantics of the `~T` types, meaning that it has one, and only one, owner. This type cannot be implicitly copied, and is moved out of when passed to other functions. -* `@str` - This is a managed string. Similarly to `@T`, this type can be - implicitly copied, and each implicit copy will increment the - reference count to the string. This means that there is no "true - owner" of the string, and the string will be deallocated when the - reference count reaches 0. - -* `&str` - Finally, this is the borrowed string type. This type of string can - only be created from one of the other two kinds of strings. As the - name "borrowed" implies, this type of string is owned elsewhere, and - this string cannot be moved out of. +* `&str` - This is the borrowed string type. This type of string can only be + created from the other kind of string. As the name "borrowed" + implies, this type of string is owned elsewhere, and this string + cannot be moved out of. As an example, here's a few different kinds of strings. ```rust -#[feature(managed_boxes)]; - fn main() { let owned_string = ~"I am an owned string"; - let managed_string = @"This string is garbage-collected"; let borrowed_string1 = "This string is borrowed with the 'static lifetime"; let borrowed_string2: &str = owned_string; // owned strings can be borrowed - let borrowed_string3: &str = managed_string; // managed strings can also be borrowed } ``` -From the example above, you can see that rust has 3 different kinds of string -literals. The owned/managed literals correspond to the owned/managed string -types, but the "borrowed literal" is actually more akin to C's concept of a -static string. +From the example above, you can see that rust has 2 different kinds of string +literals. The owned literals correspond to the owned string types, but the +"borrowed literal" is actually more akin to C's concept of a static string. -When a string is declared without a `~` or `@` sigil, then the string is -allocated statically in the rodata of the executable/library. The string then -has the type `&'static str` meaning that the string is valid for the `'static` +When a string is declared without a `~` sigil, then the string is allocated +statically in the rodata of the executable/library. The string then has the +type `&'static str` meaning that the string is valid for the `'static` lifetime, otherwise known as the lifetime of the entire program. As can be inferred from the type, these static strings are not mutable. @@ -89,11 +78,9 @@ The actual representation of strings have direct mappings to vectors: * `~str` is the same as `~[u8]` * `&str` is the same as `&[u8]` -* `@str` is the same as `@[u8]` */ -use at_vec; use cast; use cast::transmute; use char; @@ -158,16 +145,6 @@ impl<'a> ToStr for &'a str { fn to_str(&self) -> ~str { self.to_owned() } } -impl ToStr for @str { - #[inline] - fn to_str(&self) -> ~str { self.to_owned() } -} - -impl<'a> FromStr for @str { - #[inline] - fn from_str(s: &str) -> Option<@str> { Some(s.to_managed()) } -} - /// Convert a byte to a UTF-8 string /// /// # Failure @@ -1141,11 +1118,6 @@ pub mod traits { fn cmp(&self, other: &~str) -> Ordering { self.as_slice().cmp(&other.as_slice()) } } - impl TotalOrd for @str { - #[inline] - fn cmp(&self, other: &@str) -> Ordering { self.as_slice().cmp(&other.as_slice()) } - } - impl<'a> Eq for &'a str { #[inline] fn eq(&self, other: & &'a str) -> bool { @@ -1162,13 +1134,6 @@ pub mod traits { } } - impl Eq for @str { - #[inline] - fn eq(&self, other: &@str) -> bool { - eq_slice((*self), (*other)) - } - } - impl<'a> TotalEq for &'a str { #[inline] fn equals(&self, other: & &'a str) -> bool { @@ -1183,13 +1148,6 @@ pub mod traits { } } - impl TotalEq for @str { - #[inline] - fn equals(&self, other: &@str) -> bool { - eq_slice((*self), (*other)) - } - } - impl<'a> Ord for &'a str { #[inline] fn lt(&self, other: & &'a str) -> bool { self.cmp(other) == Less } @@ -1200,21 +1158,11 @@ pub mod traits { fn lt(&self, other: &~str) -> bool { self.cmp(other) == Less } } - impl Ord for @str { - #[inline] - fn lt(&self, other: &@str) -> bool { self.cmp(other) == Less } - } - impl<'a, S: Str> Equiv for &'a str { #[inline] fn equiv(&self, other: &S) -> bool { eq_slice(*self, other.as_slice()) } } - impl<'a, S: Str> Equiv for @str { - #[inline] - fn equiv(&self, other: &S) -> bool { eq_slice(*self, other.as_slice()) } - } - impl<'a, S: Str> Equiv for ~str { #[inline] fn equiv(&self, other: &S) -> bool { eq_slice(*self, other.as_slice()) } @@ -1251,16 +1199,6 @@ impl<'a> Str for ~str { fn into_owned(self) -> ~str { self } } -impl<'a> Str for @str { - #[inline] - fn as_slice<'a>(&'a self) -> &'a str { - let s: &'a str = *self; s - } - - #[inline] - fn into_owned(self) -> ~str { self.to_owned() } -} - impl<'a> Container for &'a str { #[inline] fn len(&self) -> uint { @@ -1273,11 +1211,6 @@ impl Container for ~str { fn len(&self) -> uint { self.as_slice().len() } } -impl Container for @str { - #[inline] - fn len(&self) -> uint { self.as_slice().len() } -} - impl Mutable for ~str { /// Remove all content, make the string empty #[inline] @@ -1735,9 +1668,6 @@ pub trait StrSlice<'a> { /// Copy a slice into a new owned str. fn to_owned(&self) -> ~str; - /// Copy a slice into a new managed str. - fn to_managed(&self) -> @str; - /// Converts to a vector of `u16` encoded as UTF-16. fn to_utf16(&self) -> ~[u16]; @@ -2247,14 +2177,6 @@ impl<'a> StrSlice<'a> for &'a str { } } - #[inline] - fn to_managed(&self) -> @str { - unsafe { - let v: *&[u8] = cast::transmute(self); - cast::transmute(at_vec::to_managed(*v)) - } - } - fn to_utf16(&self) -> ~[u16] { let mut u = ~[]; for ch in self.chars() { @@ -2683,20 +2605,6 @@ impl DeepClone for ~str { } } -impl Clone for @str { - #[inline] - fn clone(&self) -> @str { - *self - } -} - -impl DeepClone for @str { - #[inline] - fn deep_clone(&self) -> @str { - *self - } -} - impl FromIterator for ~str { #[inline] fn from_iterator>(iterator: &mut T) -> ~str { @@ -2728,10 +2636,6 @@ impl Default for ~str { fn default() -> ~str { ~"" } } -impl Default for @str { - fn default() -> @str { @"" } -} - #[cfg(test)] mod tests { use iter::AdditiveIterator; @@ -3537,12 +3441,6 @@ mod tests { assert_eq!("\U0001d4ea\r".escape_default(), ~"\\U0001d4ea\\r"); } - #[test] - fn test_to_managed() { - assert_eq!("abc".to_managed(), @"abc"); - assert_eq!("abcdef".slice(1, 5).to_managed(), @"bcde"); - } - #[test] fn test_total_ord() { "1234".cmp(& &"123") == Greater; @@ -3580,15 +3478,12 @@ mod tests { let e = $e; assert_eq!(s1 + s2, e.to_owned()); assert_eq!(s1.to_owned() + s2, e.to_owned()); - assert_eq!(s1.to_managed() + s2, e.to_owned()); } } ); t!("foo", "bar", "foobar"); - t!("foo", @"bar", "foobar"); t!("foo", ~"bar", "foobar"); t!("ศไทย中", "华Việt Nam", "ศไทย中华Việt Nam"); - t!("ศไทย中", @"华Việt Nam", "ศไทย中华Việt Nam"); t!("ศไทย中", ~"华Việt Nam", "ศไทย中华Việt Nam"); } @@ -3875,7 +3770,6 @@ mod tests { } t::<&str>(); - t::<@str>(); t::<~str>(); } @@ -3887,7 +3781,6 @@ mod tests { let s = ~"01234"; assert_eq!(5, sum_len(["012", "", "34"])); - assert_eq!(5, sum_len([@"01", @"2", @"34", @""])); assert_eq!(5, sum_len([~"01", ~"2", ~"34", ~""])); assert_eq!(5, sum_len([s.as_slice()])); } @@ -3958,8 +3851,6 @@ mod tests { fn test_from_str() { let owned: Option<~str> = from_str(&"string"); assert_eq!(owned, Some(~"string")); - let managed: Option<@str> = from_str(&"string"); - assert_eq!(managed, Some(@"string")); } } diff --git a/src/libstd/to_bytes.rs b/src/libstd/to_bytes.rs index 8df028f56d509..7ca1590dad07f 100644 --- a/src/libstd/to_bytes.rs +++ b/src/libstd/to_bytes.rs @@ -288,13 +288,6 @@ impl IterBytes for ~str { } } -impl IterBytes for @str { - #[inline] - fn iter_bytes(&self, lsb0: bool, f: Cb) -> bool { - self.as_slice().iter_bytes(lsb0, f) - } -} - impl IterBytes for Option { #[inline] fn iter_bytes(&self, lsb0: bool, f: Cb) -> bool { diff --git a/src/libstd/to_str.rs b/src/libstd/to_str.rs index a58b09d8ecde3..edbf331441772 100644 --- a/src/libstd/to_str.rs +++ b/src/libstd/to_str.rs @@ -195,7 +195,6 @@ mod tests { assert_eq!(false.to_str(), ~"false"); assert_eq!(().to_str(), ~"()"); assert_eq!((~"hi").to_str(), ~"hi"); - assert_eq!((@"hi").to_str(), ~"hi"); } #[test] diff --git a/src/libstd/unstable/raw.rs b/src/libstd/unstable/raw.rs index 8aee26c24b299..c568edd09d1da 100644 --- a/src/libstd/unstable/raw.rs +++ b/src/libstd/unstable/raw.rs @@ -59,7 +59,6 @@ impl Repr<*Box> for @T {} impl Repr<*Box>> for @[T] {} impl Repr<*Vec> for ~[T] {} impl Repr<*String> for ~str {} -impl Repr<*Box> for @str {} // sure would be nice to have this // impl Repr<*Vec> for ~[T] {} diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 40ae98791efbd..a77be0a486efa 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -14,7 +14,8 @@ use codemap::{Span, Spanned, DUMMY_SP}; use abi::AbiSet; use ast_util; use opt_vec::OptVec; -use parse::token::{interner_get, str_to_ident, special_idents}; +use parse::token::{InternedString, special_idents, str_to_ident}; +use parse::token; use std::cell::RefCell; use std::hashmap::HashMap; @@ -125,7 +126,8 @@ pub type Mrk = u32; impl Encodable for Ident { fn encode(&self, s: &mut S) { - s.emit_str(interner_get(self.name)); + let string = token::get_ident(self.name); + s.emit_str(string.get()); } } @@ -295,9 +297,9 @@ pub type MetaItem = Spanned; #[deriving(Clone, Encodable, Decodable, IterBytes)] pub enum MetaItem_ { - MetaWord(@str), - MetaList(@str, ~[@MetaItem]), - MetaNameValue(@str, Lit), + MetaWord(InternedString), + MetaList(InternedString, ~[@MetaItem]), + MetaNameValue(InternedString, Lit), } // can't be derived because the MetaList requires an unordered comparison @@ -721,14 +723,14 @@ pub type Lit = Spanned; #[deriving(Clone, Eq, Encodable, Decodable, IterBytes)] pub enum Lit_ { - LitStr(@str, StrStyle), + LitStr(InternedString, StrStyle), LitBinary(@[u8]), LitChar(u32), LitInt(i64, IntTy), LitUint(u64, UintTy), LitIntUnsuffixed(i64), - LitFloat(@str, FloatTy), - LitFloatUnsuffixed(@str), + LitFloat(InternedString, FloatTy), + LitFloatUnsuffixed(InternedString), LitNil, LitBool(bool), } @@ -897,11 +899,11 @@ pub enum AsmDialect { #[deriving(Clone, Eq, Encodable, Decodable, IterBytes)] pub struct InlineAsm { - asm: @str, + asm: InternedString, asm_str_style: StrStyle, - clobbers: @str, - inputs: ~[(@str, @Expr)], - outputs: ~[(@str, @Expr)], + clobbers: InternedString, + inputs: ~[(InternedString, @Expr)], + outputs: ~[(InternedString, @Expr)], volatile: bool, alignstack: bool, dialect: AsmDialect @@ -1074,7 +1076,7 @@ pub enum ViewItem_ { // optional @str: if present, this is a location (containing // arbitrary characters) from which to fetch the crate sources // For example, extern mod whatever = "github.com/mozilla/rust" - ViewItemExternMod(Ident, Option<(@str, StrStyle)>, NodeId), + ViewItemExternMod(Ident, Option<(InternedString,StrStyle)>, NodeId), ViewItemUse(~[@ViewPath]), } diff --git a/src/libsyntax/ast_map.rs b/src/libsyntax/ast_map.rs index bb66d620d2910..89209ab2104f0 100644 --- a/src/libsyntax/ast_map.rs +++ b/src/libsyntax/ast_map.rs @@ -62,9 +62,10 @@ pub fn path_to_str_with_sep(p: &[PathElem], sep: &str, itr: @IdentInterner) pub fn path_ident_to_str(p: &Path, i: Ident, itr: @IdentInterner) -> ~str { if p.is_empty() { - itr.get(i.name).to_owned() + itr.get(i.name).into_owned() } else { - format!("{}::{}", path_to_str(*p, itr), itr.get(i.name)) + let string = itr.get(i.name); + format!("{}::{}", path_to_str(*p, itr), string.as_slice()) } } @@ -75,7 +76,7 @@ pub fn path_to_str(p: &[PathElem], itr: @IdentInterner) -> ~str { pub fn path_elem_to_str(pe: PathElem, itr: @IdentInterner) -> ~str { match pe { PathMod(s) | PathName(s) | PathPrettyName(s, _) => { - itr.get(s.name).to_owned() + itr.get(s.name).into_owned() } } } @@ -105,7 +106,11 @@ fn pretty_ty(ty: &Ty, itr: @IdentInterner, out: &mut ~str) { // need custom handling. TyNil => { out.push_str("$NIL$"); return } TyPath(ref path, _, _) => { - out.push_str(itr.get(path.segments.last().unwrap().identifier.name)); + out.push_str(itr.get(path.segments + .last() + .unwrap() + .identifier + .name).as_slice()); return } TyTup(ref tys) => { @@ -138,7 +143,8 @@ pub fn impl_pretty_name(trait_ref: &Option, ty: &Ty) -> PathElem { match *trait_ref { None => pretty = ~"", Some(ref trait_ref) => { - pretty = itr.get(trait_ref.path.segments.last().unwrap().identifier.name).to_owned(); + pretty = itr.get(trait_ref.path.segments.last().unwrap().identifier.name) + .into_owned(); pretty.push_char('$'); } }; @@ -489,17 +495,21 @@ pub fn node_id_to_str(map: Map, id: NodeId, itr: @IdentInterner) -> ~str { path_ident_to_str(path, item.ident, itr), abi, id) } Some(NodeMethod(m, _, path)) => { + let name = itr.get(m.ident.name); format!("method {} in {} (id={})", - itr.get(m.ident.name), path_to_str(*path, itr), id) + name.as_slice(), path_to_str(*path, itr), id) } Some(NodeTraitMethod(ref tm, _, path)) => { let m = ast_util::trait_method_to_ty_method(&**tm); + let name = itr.get(m.ident.name); format!("method {} in {} (id={})", - itr.get(m.ident.name), path_to_str(*path, itr), id) + name.as_slice(), path_to_str(*path, itr), id) } Some(NodeVariant(ref variant, _, path)) => { + let name = itr.get(variant.node.name.name); format!("variant {} in {} (id={})", - itr.get(variant.node.name.name), path_to_str(*path, itr), id) + name.as_slice(), + path_to_str(*path, itr), id) } Some(NodeExpr(expr)) => { format!("expr {} (id={})", pprust::expr_to_str(expr, itr), id) diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs index 405de5c5542d0..afedb62105ba7 100644 --- a/src/libsyntax/ast_util.rs +++ b/src/libsyntax/ast_util.rs @@ -25,7 +25,10 @@ use std::num; pub fn path_name_i(idents: &[Ident]) -> ~str { // FIXME: Bad copies (#2543 -- same for everything else that says "bad") - idents.map(|i| token::interner_get(i.name)).connect("::") + idents.map(|i| { + let string = token::get_ident(i.name); + string.get().to_str() + }).connect("::") } // totally scary function: ignores all but the last element, should have diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index c44861bd7d7f7..78e9d3bd46f8c 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -16,24 +16,26 @@ use codemap::{Span, Spanned, spanned, dummy_spanned}; use codemap::BytePos; use diagnostic::SpanHandler; use parse::comments::{doc_comment_style, strip_doc_comment_decoration}; +use parse::token::InternedString; +use parse::token; use crateid::CrateId; use std::hashmap::HashSet; pub trait AttrMetaMethods { - // This could be changed to `fn check_name(&self, name: @str) -> + // This could be changed to `fn check_name(&self, name: InternedString) -> // bool` which would facilitate a side table recording which // attributes/meta items are used/unused. /// Retrieve the name of the meta item, e.g. foo in #[foo], /// #[foo="bar"] and #[foo(bar)] - fn name(&self) -> @str; + fn name(&self) -> InternedString; /** * Gets the string value if self is a MetaNameValue variant * containing a string, otherwise None. */ - fn value_str(&self) -> Option<@str>; + fn value_str(&self) -> Option; /// Gets a list of inner meta items from a list MetaItem type. fn meta_item_list<'a>(&'a self) -> Option<&'a [@MetaItem]>; @@ -41,32 +43,36 @@ pub trait AttrMetaMethods { * If the meta item is a name-value type with a string value then returns * a tuple containing the name and string value, otherwise `None` */ - fn name_str_pair(&self) -> Option<(@str, @str)>; + fn name_str_pair(&self) -> Option<(InternedString,InternedString)>; } impl AttrMetaMethods for Attribute { - fn name(&self) -> @str { self.meta().name() } - fn value_str(&self) -> Option<@str> { self.meta().value_str() } + fn name(&self) -> InternedString { self.meta().name() } + fn value_str(&self) -> Option { + self.meta().value_str() + } fn meta_item_list<'a>(&'a self) -> Option<&'a [@MetaItem]> { self.node.value.meta_item_list() } - fn name_str_pair(&self) -> Option<(@str, @str)> { self.meta().name_str_pair() } + fn name_str_pair(&self) -> Option<(InternedString,InternedString)> { + self.meta().name_str_pair() + } } impl AttrMetaMethods for MetaItem { - fn name(&self) -> @str { + fn name(&self) -> InternedString { match self.node { - MetaWord(n) => n, - MetaNameValue(n, _) => n, - MetaList(n, _) => n + MetaWord(ref n) => (*n).clone(), + MetaNameValue(ref n, _) => (*n).clone(), + MetaList(ref n, _) => (*n).clone(), } } - fn value_str(&self) -> Option<@str> { + fn value_str(&self) -> Option { match self.node { MetaNameValue(_, ref v) => { match v.node { - ast::LitStr(s, _) => Some(s), + ast::LitStr(ref s, _) => Some((*s).clone()), _ => None, } }, @@ -81,19 +87,21 @@ impl AttrMetaMethods for MetaItem { } } - fn name_str_pair(&self) -> Option<(@str, @str)> { + fn name_str_pair(&self) -> Option<(InternedString,InternedString)> { self.value_str().map(|s| (self.name(), s)) } } // Annoying, but required to get test_cfg to work impl AttrMetaMethods for @MetaItem { - fn name(&self) -> @str { (**self).name() } - fn value_str(&self) -> Option<@str> { (**self).value_str() } + fn name(&self) -> InternedString { (**self).name() } + fn value_str(&self) -> Option { (**self).value_str() } fn meta_item_list<'a>(&'a self) -> Option<&'a [@MetaItem]> { (**self).meta_item_list() } - fn name_str_pair(&self) -> Option<(@str, @str)> { (**self).name_str_pair() } + fn name_str_pair(&self) -> Option<(InternedString,InternedString)> { + (**self).name_str_pair() + } } @@ -114,8 +122,10 @@ impl AttributeMethods for Attribute { fn desugar_doc(&self) -> Attribute { if self.node.is_sugared_doc { let comment = self.value_str().unwrap(); - let meta = mk_name_value_item_str(@"doc", - strip_doc_comment_decoration(comment).to_managed()); + let meta = mk_name_value_item_str( + InternedString::new("doc"), + token::intern_and_get_ident(strip_doc_comment_decoration( + comment.get()))); mk_attr(meta) } else { *self @@ -125,20 +135,22 @@ impl AttributeMethods for Attribute { /* Constructors */ -pub fn mk_name_value_item_str(name: @str, value: @str) -> @MetaItem { +pub fn mk_name_value_item_str(name: InternedString, value: InternedString) + -> @MetaItem { let value_lit = dummy_spanned(ast::LitStr(value, ast::CookedStr)); mk_name_value_item(name, value_lit) } -pub fn mk_name_value_item(name: @str, value: ast::Lit) -> @MetaItem { +pub fn mk_name_value_item(name: InternedString, value: ast::Lit) + -> @MetaItem { @dummy_spanned(MetaNameValue(name, value)) } -pub fn mk_list_item(name: @str, items: ~[@MetaItem]) -> @MetaItem { +pub fn mk_list_item(name: InternedString, items: ~[@MetaItem]) -> @MetaItem { @dummy_spanned(MetaList(name, items)) } -pub fn mk_word_item(name: @str) -> @MetaItem { +pub fn mk_word_item(name: InternedString) -> @MetaItem { @dummy_spanned(MetaWord(name)) } @@ -150,12 +162,14 @@ pub fn mk_attr(item: @MetaItem) -> Attribute { }) } -pub fn mk_sugared_doc_attr(text: @str, lo: BytePos, hi: BytePos) -> Attribute { - let style = doc_comment_style(text); +pub fn mk_sugared_doc_attr(text: InternedString, lo: BytePos, hi: BytePos) + -> Attribute { + let style = doc_comment_style(text.get()); let lit = spanned(lo, hi, ast::LitStr(text, ast::CookedStr)); let attr = Attribute_ { style: style, - value: @spanned(lo, hi, MetaNameValue(@"doc", lit)), + value: @spanned(lo, hi, MetaNameValue(InternedString::new("doc"), + lit)), is_sugared_doc: true }; spanned(lo, hi, attr) @@ -178,20 +192,22 @@ pub fn contains_name(metas: &[AM], name: &str) -> bool { debug!("attr::contains_name (name={})", name); metas.iter().any(|item| { debug!(" testing: {}", item.name()); - name == item.name() + item.name().equiv(&name) }) } pub fn first_attr_value_str_by_name(attrs: &[Attribute], name: &str) - -> Option<@str> { + -> Option { attrs.iter() - .find(|at| name == at.name()) + .find(|at| at.name().equiv(&name)) .and_then(|at| at.value_str()) } pub fn last_meta_item_value_str_by_name(items: &[@MetaItem], name: &str) - -> Option<@str> { - items.rev_iter().find(|mi| name == mi.name()).and_then(|i| i.value_str()) + -> Option { + items.rev_iter() + .find(|mi| mi.name().equiv(&name)) + .and_then(|i| i.value_str()) } /* Higher-level applications */ @@ -201,16 +217,16 @@ pub fn sort_meta_items(items: &[@MetaItem]) -> ~[@MetaItem] { // human-readable strings. let mut v = items.iter() .map(|&mi| (mi.name(), mi)) - .collect::<~[(@str, @MetaItem)]>(); + .collect::<~[(InternedString, @MetaItem)]>(); - v.sort_by(|&(a, _), &(b, _)| a.cmp(&b)); + v.sort_by(|&(ref a, _), &(ref b, _)| a.cmp(b)); // There doesn't seem to be a more optimal way to do this v.move_iter().map(|(_, m)| { match m.node { - MetaList(n, ref mis) => { + MetaList(ref n, ref mis) => { @Spanned { - node: MetaList(n, sort_meta_items(*mis)), + node: MetaList((*n).clone(), sort_meta_items(*mis)), .. /*bad*/ (*m).clone() } } @@ -225,7 +241,7 @@ pub fn sort_meta_items(items: &[@MetaItem]) -> ~[@MetaItem] { */ pub fn find_linkage_metas(attrs: &[Attribute]) -> ~[@MetaItem] { let mut result = ~[]; - for attr in attrs.iter().filter(|at| "link" == at.name()) { + for attr in attrs.iter().filter(|at| at.name().equiv(&("link"))) { match attr.meta().node { MetaList(_, ref items) => result.push_all(*items), _ => () @@ -237,7 +253,7 @@ pub fn find_linkage_metas(attrs: &[Attribute]) -> ~[@MetaItem] { pub fn find_crateid(attrs: &[Attribute]) -> Option { match first_attr_value_str_by_name(attrs, "crate_id") { None => None, - Some(id) => from_str::(id), + Some(id) => from_str::(id.get()), } } @@ -254,8 +270,8 @@ pub fn find_inline_attr(attrs: &[Attribute]) -> InlineAttr { // FIXME (#2809)---validate the usage of #[inline] and #[inline] attrs.iter().fold(InlineNone, |ia,attr| { match attr.node.value.node { - MetaWord(n) if "inline" == n => InlineHint, - MetaList(n, ref items) if "inline" == n => { + MetaWord(ref n) if n.equiv(&("inline")) => InlineHint, + MetaList(ref n, ref items) if n.equiv(&("inline")) => { if contains_name(*items, "always") { InlineAlways } else if contains_name(*items, "never") { @@ -284,7 +300,7 @@ pub fn test_cfg> // this doesn't work. let some_cfg_matches = metas.any(|mi| { debug!("testing name: {}", mi.name()); - if "cfg" == mi.name() { // it is a #[cfg()] attribute + if mi.name().equiv(&("cfg")) { // it is a #[cfg()] attribute debug!("is cfg"); no_cfgs = false; // only #[cfg(...)] ones are understood. @@ -294,7 +310,8 @@ pub fn test_cfg> cfg_meta.iter().all(|cfg_mi| { debug!("cfg({}[...])", cfg_mi.name()); match cfg_mi.node { - ast::MetaList(s, ref not_cfgs) if "not" == s => { + ast::MetaList(ref s, ref not_cfgs) + if s.equiv(&("not")) => { debug!("not!"); // inside #[cfg(not(...))], so these need to all // not match. @@ -320,7 +337,7 @@ pub fn test_cfg> /// Represents the #[deprecated="foo"] (etc) attributes. pub struct Stability { level: StabilityLevel, - text: Option<@str> + text: Option } /// The available stability levels. @@ -335,9 +352,10 @@ pub enum StabilityLevel { } /// Find the first stability attribute. `None` if none exists. -pub fn find_stability>(mut metas: It) -> Option { +pub fn find_stability>(mut metas: It) + -> Option { for m in metas { - let level = match m.name().as_slice() { + let level = match m.name().get() { "deprecated" => Deprecated, "experimental" => Experimental, "unstable" => Unstable, @@ -360,7 +378,7 @@ pub fn require_unique_names(diagnostic: @SpanHandler, metas: &[@MetaItem]) { for meta in metas.iter() { let name = meta.name(); - if !set.insert(name) { + if !set.insert(name.clone()) { diagnostic.span_fatal(meta.span, format!("duplicate meta item `{}`", name)); } @@ -384,14 +402,14 @@ pub fn find_repr_attr(diagnostic: @SpanHandler, attr: @ast::MetaItem, acc: ReprA -> ReprAttr { let mut acc = acc; match attr.node { - ast::MetaList(s, ref items) if "repr" == s => { + ast::MetaList(ref s, ref items) if s.equiv(&("repr")) => { for item in items.iter() { match item.node { - ast::MetaWord(word) => { - let hint = match word.as_slice() { + ast::MetaWord(ref word) => { + let hint = match word.get() { // Can't use "extern" because it's not a lexical identifier. "C" => ReprExtern, - _ => match int_type_of_word(word) { + _ => match int_type_of_word(word.get()) { Some(ity) => ReprInt(item.span, ity), None => { // Not a word we recognize diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index d4a412bbe9ff2..2ada3ac16ea66 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -160,7 +160,7 @@ pub struct LocWithOpt { pub struct FileMapAndLine {fm: @FileMap, line: uint} pub struct FileMapAndBytePos {fm: @FileMap, pos: BytePos} -#[deriving(IterBytes)] +#[deriving(Clone, IterBytes)] pub enum MacroFormat { // e.g. #[deriving(...)] MacroAttribute, @@ -168,9 +168,9 @@ pub enum MacroFormat { MacroBang } -#[deriving(IterBytes)] +#[deriving(Clone, IterBytes)] pub struct NameAndSpan { - name: @str, + name: ~str, // the format with which the macro was invoked. format: MacroFormat, span: Option @@ -183,7 +183,7 @@ pub struct ExpnInfo { callee: NameAndSpan } -pub type FileName = @str; +pub type FileName = ~str; pub struct FileLines { @@ -206,7 +206,7 @@ pub struct FileMap { /// e.g. `` name: FileName, /// The complete source code - src: @str, + src: ~str, /// The start position of this source in the CodeMap start_pos: BytePos, /// Locations of lines beginnings in the source code @@ -267,7 +267,7 @@ impl CodeMap { } } - pub fn new_filemap(&self, filename: FileName, src: @str) -> @FileMap { + pub fn new_filemap(&self, filename: FileName, src: ~str) -> @FileMap { let mut files = self.files.borrow_mut(); let start_pos = match files.get().last() { None => 0, @@ -301,7 +301,7 @@ impl CodeMap { pub fn lookup_char_pos_adj(&self, pos: BytePos) -> LocWithOpt { let loc = self.lookup_char_pos(pos); LocWithOpt { - filename: loc.file.name, + filename: loc.file.name.to_str(), line: loc.line, col: loc.col, file: Some(loc.file) @@ -324,7 +324,7 @@ impl CodeMap { pub fn span_to_filename(&self, sp: Span) -> FileName { let lo = self.lookup_char_pos(sp.lo); - lo.file.name + lo.file.name.to_str() } pub fn span_to_lines(&self, sp: Span) -> @FileLines { @@ -468,7 +468,7 @@ mod test { #[test] fn t1 () { let cm = CodeMap::new(); - let fm = cm.new_filemap(@"blork.rs",@"first line.\nsecond line"); + let fm = cm.new_filemap(~"blork.rs",~"first line.\nsecond line"); fm.next_line(BytePos(0)); assert_eq!(&fm.get_line(0),&~"first line."); // TESTING BROKEN BEHAVIOR: @@ -480,7 +480,7 @@ mod test { #[should_fail] fn t2 () { let cm = CodeMap::new(); - let fm = cm.new_filemap(@"blork.rs",@"first line.\nsecond line"); + let fm = cm.new_filemap(~"blork.rs",~"first line.\nsecond line"); // TESTING *REALLY* BROKEN BEHAVIOR: fm.next_line(BytePos(0)); fm.next_line(BytePos(10)); diff --git a/src/libsyntax/ext/asm.rs b/src/libsyntax/ext/asm.rs index 021f0d29d9e23..1a3ebf3ce5d1c 100644 --- a/src/libsyntax/ext/asm.rs +++ b/src/libsyntax/ext/asm.rs @@ -17,6 +17,7 @@ use codemap::Span; use ext::base; use ext::base::*; use parse; +use parse::token::InternedString; use parse::token; enum State { @@ -43,7 +44,7 @@ pub fn expand_asm(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) cx.cfg(), tts.to_owned()); - let mut asm = @""; + let mut asm = InternedString::new(""); let mut asm_str_style = None; let mut outputs = ~[]; let mut inputs = ~[]; @@ -79,10 +80,10 @@ pub fn expand_asm(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) let (constraint, _str_style) = p.parse_str(); - if constraint.starts_with("+") { + if constraint.get().starts_with("+") { cx.span_unimpl(p.last_span, "'+' (read+write) output operand constraint modifier"); - } else if !constraint.starts_with("=") { + } else if !constraint.get().starts_with("=") { cx.span_err(p.last_span, "output operand constraint lacks '='"); } @@ -104,9 +105,9 @@ pub fn expand_asm(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) let (constraint, _str_style) = p.parse_str(); - if constraint.starts_with("=") { + if constraint.get().starts_with("=") { cx.span_err(p.last_span, "input operand constraint contains '='"); - } else if constraint.starts_with("+") { + } else if constraint.get().starts_with("+") { cx.span_err(p.last_span, "input operand constraint contains '+'"); } @@ -137,11 +138,11 @@ pub fn expand_asm(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) Options => { let (option, _str_style) = p.parse_str(); - if "volatile" == option { + if option.equiv(&("volatile")) { volatile = true; - } else if "alignstack" == option { + } else if option.equiv(&("alignstack")) { alignstack = true; - } else if "intel" == option { + } else if option.equiv(&("intel")) { dialect = ast::AsmIntel; } @@ -191,9 +192,9 @@ pub fn expand_asm(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) MRExpr(@ast::Expr { id: ast::DUMMY_NODE_ID, node: ast::ExprInlineAsm(ast::InlineAsm { - asm: asm, + asm: token::intern_and_get_ident(asm.get()), asm_str_style: asm_str_style.unwrap(), - clobbers: cons.to_managed(), + clobbers: token::intern_and_get_ident(cons), inputs: inputs, outputs: outputs, volatile: volatile, diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index adf1eabf9d94d..08098b71ce450 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -16,7 +16,7 @@ use ext; use ext::expand; use parse; use parse::token; -use parse::token::{ident_to_str, intern, str_to_ident}; +use parse::token::{InternedString, intern, str_to_ident}; use util::small_vector::SmallVector; use std::hashmap::HashMap; @@ -31,7 +31,7 @@ use std::unstable::dynamic_lib::DynamicLibrary; // ast::MacInvocTT. pub struct MacroDef { - name: @str, + name: ~str, ext: SyntaxExtension } @@ -335,7 +335,8 @@ impl<'a> ExtCtxt<'a> { Some(@ExpnInfo { call_site: Span {lo: cs.lo, hi: cs.hi, expn_info: self.backtrace}, - callee: *callee}); + callee: (*callee).clone() + }); } } } @@ -396,9 +397,6 @@ impl<'a> ExtCtxt<'a> { pub fn set_trace_macros(&mut self, x: bool) { self.trace_mac = x } - pub fn str_of(&self, id: ast::Ident) -> @str { - ident_to_str(&id) - } pub fn ident_of(&self, st: &str) -> ast::Ident { str_to_ident(st) } @@ -407,11 +405,11 @@ impl<'a> ExtCtxt<'a> { /// Extract a string literal from `expr`, emitting `err_msg` if `expr` /// is not a string literal. This does not stop compilation on error, /// merely emits a non-fatal error and returns None. -pub fn expr_to_str(cx: &ExtCtxt, expr: @ast::Expr, - err_msg: &str) -> Option<(@str, ast::StrStyle)> { +pub fn expr_to_str(cx: &ExtCtxt, expr: @ast::Expr, err_msg: &str) + -> Option<(InternedString, ast::StrStyle)> { match expr.node { ast::ExprLit(l) => match l.node { - ast::LitStr(s, style) => return Some((s, style)), + ast::LitStr(ref s, style) => return Some(((*s).clone(), style)), _ => cx.span_err(l.span, err_msg) }, _ => cx.span_err(expr.span, err_msg) @@ -424,7 +422,9 @@ pub fn expr_to_str(cx: &ExtCtxt, expr: @ast::Expr, /// compilation should call /// `cx.parse_sess.span_diagnostic.abort_if_errors()` (this should be /// done as rarely as possible). -pub fn check_zero_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree], +pub fn check_zero_tts(cx: &ExtCtxt, + sp: Span, + tts: &[ast::TokenTree], name: &str) { if tts.len() != 0 { cx.span_err(sp, format!("{} takes no arguments", name)); @@ -437,13 +437,16 @@ pub fn get_single_str_from_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree], name: &str) - -> Option<@str> { + -> Option<~str> { if tts.len() != 1 { cx.span_err(sp, format!("{} takes 1 argument.", name)); } else { match tts[0] { ast::TTTok(_, token::LIT_STR(ident)) - | ast::TTTok(_, token::LIT_STR_RAW(ident, _)) => return Some(cx.str_of(ident)), + | ast::TTTok(_, token::LIT_STR_RAW(ident, _)) => { + let interned_str = token::get_ident(ident.name); + return Some(interned_str.get().to_str()) + } _ => cx.span_err(sp, format!("{} requires a string.", name)), } } diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index 9ad4f4f7fac2d..c5ee19484668e 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -19,6 +19,7 @@ use fold::Folder; use opt_vec; use opt_vec::OptVec; use parse::token::special_idents; +use parse::token; pub struct Field { ident: ast::Ident, @@ -134,13 +135,13 @@ pub trait AstBuilder { fn expr_vec(&self, sp: Span, exprs: ~[@ast::Expr]) -> @ast::Expr; fn expr_vec_uniq(&self, sp: Span, exprs: ~[@ast::Expr]) -> @ast::Expr; fn expr_vec_slice(&self, sp: Span, exprs: ~[@ast::Expr]) -> @ast::Expr; - fn expr_str(&self, sp: Span, s: @str) -> @ast::Expr; - fn expr_str_uniq(&self, sp: Span, s: @str) -> @ast::Expr; + fn expr_str(&self, sp: Span, s: InternedString) -> @ast::Expr; + fn expr_str_uniq(&self, sp: Span, s: InternedString) -> @ast::Expr; fn expr_some(&self, sp: Span, expr: @ast::Expr) -> @ast::Expr; fn expr_none(&self, sp: Span) -> @ast::Expr; - fn expr_fail(&self, span: Span, msg: @str) -> @ast::Expr; + fn expr_fail(&self, span: Span, msg: InternedString) -> @ast::Expr; fn expr_unreachable(&self, span: Span) -> @ast::Expr; fn pat(&self, span: Span, pat: ast::Pat_) -> @ast::Pat; @@ -228,9 +229,17 @@ pub trait AstBuilder { fn attribute(&self, sp: Span, mi: @ast::MetaItem) -> ast::Attribute; - fn meta_word(&self, sp: Span, w: @str) -> @ast::MetaItem; - fn meta_list(&self, sp: Span, name: @str, mis: ~[@ast::MetaItem]) -> @ast::MetaItem; - fn meta_name_value(&self, sp: Span, name: @str, value: ast::Lit_) -> @ast::MetaItem; + fn meta_word(&self, sp: Span, w: InternedString) -> @ast::MetaItem; + fn meta_list(&self, + sp: Span, + name: InternedString, + mis: ~[@ast::MetaItem]) + -> @ast::MetaItem; + fn meta_name_value(&self, + sp: Span, + name: InternedString, + value: ast::Lit_) + -> @ast::MetaItem; fn view_use(&self, sp: Span, vis: ast::Visibility, vp: ~[@ast::ViewPath]) -> ast::ViewItem; @@ -581,10 +590,10 @@ impl<'a> AstBuilder for ExtCtxt<'a> { fn expr_vec_slice(&self, sp: Span, exprs: ~[@ast::Expr]) -> @ast::Expr { self.expr_vstore(sp, self.expr_vec(sp, exprs), ast::ExprVstoreSlice) } - fn expr_str(&self, sp: Span, s: @str) -> @ast::Expr { + fn expr_str(&self, sp: Span, s: InternedString) -> @ast::Expr { self.expr_lit(sp, ast::LitStr(s, ast::CookedStr)) } - fn expr_str_uniq(&self, sp: Span, s: @str) -> @ast::Expr { + fn expr_str_uniq(&self, sp: Span, s: InternedString) -> @ast::Expr { self.expr_vstore(sp, self.expr_str(sp, s), ast::ExprVstoreUniq) } @@ -612,7 +621,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { self.expr_path(none) } - fn expr_fail(&self, span: Span, msg: @str) -> @ast::Expr { + fn expr_fail(&self, span: Span, msg: InternedString) -> @ast::Expr { let loc = self.codemap().lookup_char_pos(span.lo); self.expr_call_global( span, @@ -623,13 +632,16 @@ impl<'a> AstBuilder for ExtCtxt<'a> { ], ~[ self.expr_str(span, msg), - self.expr_str(span, loc.file.name), + self.expr_str(span, + token::intern_and_get_ident(loc.file.name)), self.expr_uint(span, loc.line), ]) } fn expr_unreachable(&self, span: Span) -> @ast::Expr { - self.expr_fail(span, @"internal error: entered unreachable code") + self.expr_fail(span, + InternedString::new( + "internal error: entered unreachable code")) } @@ -866,13 +878,21 @@ impl<'a> AstBuilder for ExtCtxt<'a> { }) } - fn meta_word(&self, sp: Span, w: @str) -> @ast::MetaItem { + fn meta_word(&self, sp: Span, w: InternedString) -> @ast::MetaItem { @respan(sp, ast::MetaWord(w)) } - fn meta_list(&self, sp: Span, name: @str, mis: ~[@ast::MetaItem]) -> @ast::MetaItem { + fn meta_list(&self, + sp: Span, + name: InternedString, + mis: ~[@ast::MetaItem]) + -> @ast::MetaItem { @respan(sp, ast::MetaList(name, mis)) } - fn meta_name_value(&self, sp: Span, name: @str, value: ast::Lit_) -> @ast::MetaItem { + fn meta_name_value(&self, + sp: Span, + name: InternedString, + value: ast::Lit_) + -> @ast::MetaItem { @respan(sp, ast::MetaNameValue(name, respan(sp, value))) } diff --git a/src/libsyntax/ext/bytes.rs b/src/libsyntax/ext/bytes.rs index 0c9a23be558c8..6852a0cec33ac 100644 --- a/src/libsyntax/ext/bytes.rs +++ b/src/libsyntax/ext/bytes.rs @@ -31,8 +31,8 @@ pub fn expand_syntax_ext(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> // expression is a literal ast::ExprLit(lit) => match lit.node { // string literal, push each byte to vector expression - ast::LitStr(s, _) => { - for byte in s.bytes() { + ast::LitStr(ref s, _) => { + for byte in s.get().bytes() { bytes.push(cx.expr_u8(expr.span, byte)); } } diff --git a/src/libsyntax/ext/cfg.rs b/src/libsyntax/ext/cfg.rs index 9af295c0b113c..295c456c9d0bc 100644 --- a/src/libsyntax/ext/cfg.rs +++ b/src/libsyntax/ext/cfg.rs @@ -21,9 +21,10 @@ use ext::base; use ext::build::AstBuilder; use attr; use attr::*; -use parse; -use parse::token; use parse::attr::ParserAttr; +use parse::token::InternedString; +use parse::token; +use parse; pub fn expand_cfg(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> base::MacResult { let mut p = parse::new_parser_from_tts(cx.parse_sess(), @@ -39,7 +40,7 @@ pub fn expand_cfg(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> base::M } // test_cfg searches for meta items looking like `cfg(foo, ...)` - let in_cfg = &[cx.meta_list(sp, @"cfg", cfgs)]; + let in_cfg = &[cx.meta_list(sp, InternedString::new("cfg"), cfgs)]; let matches_cfg = attr::test_cfg(cx.cfg(), in_cfg.iter().map(|&x| x)); let e = cx.expr_bool(sp, matches_cfg); diff --git a/src/libsyntax/ext/concat.rs b/src/libsyntax/ext/concat.rs index 2a68674af952c..c13f9bf92af02 100644 --- a/src/libsyntax/ext/concat.rs +++ b/src/libsyntax/ext/concat.rs @@ -14,6 +14,7 @@ use ast; use codemap; use ext::base; use ext::build::AstBuilder; +use parse::token; pub fn expand_syntax_ext(cx: &mut base::ExtCtxt, sp: codemap::Span, @@ -28,9 +29,10 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt, match e.node { ast::ExprLit(lit) => { match lit.node { - ast::LitStr(s, _) | ast::LitFloat(s, _) - | ast::LitFloatUnsuffixed(s) => { - accumulator.push_str(s); + ast::LitStr(ref s, _) | + ast::LitFloat(ref s, _) | + ast::LitFloatUnsuffixed(ref s) => { + accumulator.push_str(s.get()); } ast::LitChar(c) => { accumulator.push_char(char::from_u32(c).unwrap()); @@ -55,5 +57,5 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt, } } } - return base::MRExpr(cx.expr_str(sp, accumulator.to_managed())); + base::MRExpr(cx.expr_str(sp, token::intern_and_get_ident(accumulator))) } diff --git a/src/libsyntax/ext/concat_idents.rs b/src/libsyntax/ext/concat_idents.rs index 9dcb5b4cb4c2a..e0d53add6489f 100644 --- a/src/libsyntax/ext/concat_idents.rs +++ b/src/libsyntax/ext/concat_idents.rs @@ -30,7 +30,10 @@ pub fn expand_syntax_ext(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) } } else { match *e { - ast::TTTok(_, token::IDENT(ident,_)) => res_str.push_str(cx.str_of(ident)), + ast::TTTok(_, token::IDENT(ident,_)) => { + let interned_str = token::get_ident(ident.name); + res_str.push_str(interned_str.get()) + } _ => { cx.span_err(sp, "concat_idents! requires ident args."); return MacResult::dummy_expr(); diff --git a/src/libsyntax/ext/deriving/decodable.rs b/src/libsyntax/ext/deriving/decodable.rs index a9268d85c9154..019a4dfe7cca4 100644 --- a/src/libsyntax/ext/deriving/decodable.rs +++ b/src/libsyntax/ext/deriving/decodable.rs @@ -18,6 +18,8 @@ use codemap::Span; use ext::base::ExtCtxt; use ext::build::AstBuilder; use ext::deriving::generic::*; +use parse::token::InternedString; +use parse::token; pub fn expand_deriving_decodable(cx: &ExtCtxt, span: Span, @@ -82,10 +84,15 @@ fn decodable_substructure(cx: &ExtCtxt, trait_span: Span, cx.expr_uint(span, field), lambdadecode]) }); - cx.expr_method_call(trait_span, decoder, cx.ident_of("read_struct"), - ~[cx.expr_str(trait_span, cx.str_of(substr.type_ident)), - cx.expr_uint(trait_span, nfields), - cx.lambda_expr_1(trait_span, result, blkarg)]) + cx.expr_method_call(trait_span, + decoder, + cx.ident_of("read_struct"), + ~[ + cx.expr_str(trait_span, + token::get_ident(substr.type_ident.name)), + cx.expr_uint(trait_span, nfields), + cx.lambda_expr_1(trait_span, result, blkarg) + ]) } StaticEnum(_, ref fields) => { let variant = cx.ident_of("i"); @@ -95,7 +102,8 @@ fn decodable_substructure(cx: &ExtCtxt, trait_span: Span, let rvariant_arg = cx.ident_of("read_enum_variant_arg"); for (i, &(name, v_span, ref parts)) in fields.iter().enumerate() { - variants.push(cx.expr_str(v_span, cx.str_of(name))); + variants.push(cx.expr_str(v_span, + token::get_ident(name.name))); let decoded = decode_static_fields(cx, v_span, @@ -120,9 +128,14 @@ fn decodable_substructure(cx: &ExtCtxt, trait_span: Span, let result = cx.expr_method_call(trait_span, blkdecoder, cx.ident_of("read_enum_variant"), ~[variant_vec, lambda]); - cx.expr_method_call(trait_span, decoder, cx.ident_of("read_enum"), - ~[cx.expr_str(trait_span, cx.str_of(substr.type_ident)), - cx.lambda_expr_1(trait_span, result, blkarg)]) + cx.expr_method_call(trait_span, + decoder, + cx.ident_of("read_enum"), + ~[ + cx.expr_str(trait_span, + token::get_ident(substr.type_ident.name)), + cx.lambda_expr_1(trait_span, result, blkarg) + ]) } _ => cx.bug("expected StaticEnum or StaticStruct in deriving(Decodable)") }; @@ -135,7 +148,7 @@ fn decode_static_fields(cx: &ExtCtxt, trait_span: Span, outer_pat_ident: Ident, fields: &StaticFields, - getarg: |Span, @str, uint| -> @Expr) + getarg: |Span, InternedString, uint| -> @Expr) -> @Expr { match *fields { Unnamed(ref fields) => { @@ -143,7 +156,10 @@ fn decode_static_fields(cx: &ExtCtxt, cx.expr_ident(trait_span, outer_pat_ident) } else { let fields = fields.iter().enumerate().map(|(i, &span)| { - getarg(span, format!("_field{}", i).to_managed(), i) + getarg(span, + token::intern_and_get_ident(format!("_field{}", + i)), + i) }).collect(); cx.expr_call_ident(trait_span, outer_pat_ident, fields) @@ -152,7 +168,9 @@ fn decode_static_fields(cx: &ExtCtxt, Named(ref fields) => { // use the field's span to get nicer error messages. let fields = fields.iter().enumerate().map(|(i, &(name, span))| { - cx.field_imm(span, name, getarg(span, cx.str_of(name), i)) + cx.field_imm(span, + name, + getarg(span, token::get_ident(name.name), i)) }).collect(); cx.expr_struct_ident(trait_span, outer_pat_ident, fields) } diff --git a/src/libsyntax/ext/deriving/encodable.rs b/src/libsyntax/ext/deriving/encodable.rs index 9a8861f2e70e2..c50c9f18389c2 100644 --- a/src/libsyntax/ext/deriving/encodable.rs +++ b/src/libsyntax/ext/deriving/encodable.rs @@ -80,6 +80,7 @@ use codemap::Span; use ext::base::ExtCtxt; use ext::build::AstBuilder; use ext::deriving::generic::*; +use parse::token; pub fn expand_deriving_encodable(cx: &ExtCtxt, span: Span, @@ -125,10 +126,17 @@ fn encodable_substructure(cx: &ExtCtxt, trait_span: Span, Struct(ref fields) => { let emit_struct_field = cx.ident_of("emit_struct_field"); let mut stmts = ~[]; - for (i, &FieldInfo { name, self_, span, .. }) in fields.iter().enumerate() { + for (i, &FieldInfo { + name, + self_, + span, + .. + }) in fields.iter().enumerate() { let name = match name { - Some(id) => cx.str_of(id), - None => format!("_field{}", i).to_managed() + Some(id) => token::get_ident(id.name), + None => { + token::intern_and_get_ident(format!("_field{}", i)) + } }; let enc = cx.expr_method_call(span, self_, encode, ~[blkencoder]); let lambda = cx.lambda_expr_1(span, enc, blkarg); @@ -141,10 +149,15 @@ fn encodable_substructure(cx: &ExtCtxt, trait_span: Span, } let blk = cx.lambda_stmts_1(trait_span, stmts, blkarg); - cx.expr_method_call(trait_span, encoder, cx.ident_of("emit_struct"), - ~[cx.expr_str(trait_span, cx.str_of(substr.type_ident)), - cx.expr_uint(trait_span, fields.len()), - blk]) + cx.expr_method_call(trait_span, + encoder, + cx.ident_of("emit_struct"), + ~[ + cx.expr_str(trait_span, + token::get_ident(substr.type_ident.name)), + cx.expr_uint(trait_span, fields.len()), + blk + ]) } EnumMatching(idx, variant, ref fields) => { @@ -167,7 +180,8 @@ fn encodable_substructure(cx: &ExtCtxt, trait_span: Span, } let blk = cx.lambda_stmts_1(trait_span, stmts, blkarg); - let name = cx.expr_str(trait_span, cx.str_of(variant.node.name)); + let name = cx.expr_str(trait_span, + token::get_ident(variant.node.name.name)); let call = cx.expr_method_call(trait_span, blkencoder, cx.ident_of("emit_enum_variant"), ~[name, @@ -175,11 +189,14 @@ fn encodable_substructure(cx: &ExtCtxt, trait_span: Span, cx.expr_uint(trait_span, fields.len()), blk]); let blk = cx.lambda_expr_1(trait_span, call, blkarg); - let ret = cx.expr_method_call(trait_span, encoder, + let ret = cx.expr_method_call(trait_span, + encoder, cx.ident_of("emit_enum"), - ~[cx.expr_str(trait_span, - cx.str_of(substr.type_ident)), - blk]); + ~[ + cx.expr_str(trait_span, + token::get_ident(substr.type_ident.name)), + blk + ]); cx.expr_block(cx.block(trait_span, ~[me], Some(ret))) } diff --git a/src/libsyntax/ext/deriving/generic.rs b/src/libsyntax/ext/deriving/generic.rs index 9ebb771f5da5b..7f06e93a2e851 100644 --- a/src/libsyntax/ext/deriving/generic.rs +++ b/src/libsyntax/ext/deriving/generic.rs @@ -184,6 +184,8 @@ use ext::build::AstBuilder; use codemap; use codemap::Span; use opt_vec; +use parse::token::InternedString; +use parse::token; use std::vec; @@ -396,8 +398,10 @@ impl<'a> TraitDef<'a> { let doc_attr = cx.attribute( self.span, cx.meta_name_value(self.span, - @"doc", - ast::LitStr(@"Automatically derived.", ast::CookedStr))); + InternedString::new("doc"), + ast::LitStr(token::intern_and_get_ident( + "Automatically derived."), + ast::CookedStr))); cx.item( self.span, ::parse::token::special_idents::clownshoes_extensions, @@ -567,7 +571,14 @@ impl<'a> MethodDef<'a> { let body_block = trait_.cx.block_expr(body); let attrs = if self.inline { - ~[trait_.cx.attribute(trait_.span, trait_.cx.meta_word(trait_.span, @"inline"))] + ~[ + trait_.cx + .attribute(trait_.span, + trait_.cx + .meta_word(trait_.span, + InternedString::new( + "inline"))) + ] } else { ~[] }; @@ -933,7 +944,7 @@ impl<'a> TraitDef<'a> { to_set.expn_info = Some(@codemap::ExpnInfo { call_site: to_set, callee: codemap::NameAndSpan { - name: format!("deriving({})", trait_name).to_managed(), + name: format!("deriving({})", trait_name), format: codemap::MacroAttribute, span: Some(self.span) } diff --git a/src/libsyntax/ext/deriving/mod.rs b/src/libsyntax/ext/deriving/mod.rs index 652f5ebe6c70c..9c487146639bb 100644 --- a/src/libsyntax/ext/deriving/mod.rs +++ b/src/libsyntax/ext/deriving/mod.rs @@ -75,12 +75,12 @@ pub fn expand_meta_deriving(cx: &ExtCtxt, MetaList(_, ref titems) => { titems.rev_iter().fold(in_items, |in_items, &titem| { match titem.node { - MetaNameValue(tname, _) | - MetaList(tname, _) | - MetaWord(tname) => { + MetaNameValue(ref tname, _) | + MetaList(ref tname, _) | + MetaWord(ref tname) => { macro_rules! expand(($func:path) => ($func(cx, titem.span, titem, in_items))); - match tname.as_slice() { + match tname.get() { "Clone" => expand!(clone::expand_deriving_clone), "DeepClone" => expand!(clone::expand_deriving_deep_clone), diff --git a/src/libsyntax/ext/deriving/primitive.rs b/src/libsyntax/ext/deriving/primitive.rs index a4e606f53c0c2..e2f72e8708551 100644 --- a/src/libsyntax/ext/deriving/primitive.rs +++ b/src/libsyntax/ext/deriving/primitive.rs @@ -14,6 +14,7 @@ use codemap::Span; use ext::base::ExtCtxt; use ext::build::AstBuilder; use ext::deriving::generic::*; +use parse::token::InternedString; pub fn expand_deriving_from_primitive(cx: &ExtCtxt, span: Span, @@ -73,13 +74,13 @@ fn cs_from(name: &str, cx: &ExtCtxt, trait_span: Span, substr: &Substructure) -> match *substr.fields { StaticStruct(..) => { cx.span_err(trait_span, "`FromPrimitive` cannot be derived for structs"); - return cx.expr_fail(trait_span, @""); + return cx.expr_fail(trait_span, InternedString::new("")); } StaticEnum(enum_def, _) => { if enum_def.variants.is_empty() { cx.span_err(trait_span, "`FromPrimitive` cannot be derived for enums with no variants"); - return cx.expr_fail(trait_span, @""); + return cx.expr_fail(trait_span, InternedString::new("")); } let mut arms = ~[]; @@ -91,7 +92,8 @@ fn cs_from(name: &str, cx: &ExtCtxt, trait_span: Span, substr: &Substructure) -> cx.span_err(trait_span, "`FromPrimitive` cannot be derived for \ enum variants with arguments"); - return cx.expr_fail(trait_span, @""); + return cx.expr_fail(trait_span, + InternedString::new("")); } let span = variant.span; @@ -117,7 +119,8 @@ fn cs_from(name: &str, cx: &ExtCtxt, trait_span: Span, substr: &Substructure) -> cx.span_err(trait_span, "`FromPrimitive` cannot be derived for enums \ with struct variants"); - return cx.expr_fail(trait_span, @""); + return cx.expr_fail(trait_span, + InternedString::new("")); } } } diff --git a/src/libsyntax/ext/deriving/to_str.rs b/src/libsyntax/ext/deriving/to_str.rs index 81453a5a10b05..6101d647ca5dd 100644 --- a/src/libsyntax/ext/deriving/to_str.rs +++ b/src/libsyntax/ext/deriving/to_str.rs @@ -14,6 +14,8 @@ use codemap::Span; use ext::base::ExtCtxt; use ext::build::AstBuilder; use ext::deriving::generic::*; +use parse::token::InternedString; +use parse::token; pub fn expand_deriving_to_str(cx: &ExtCtxt, span: Span, @@ -47,18 +49,22 @@ pub fn expand_deriving_to_str(cx: &ExtCtxt, // doesn't invoke the to_str() method on each field. Hence we mirror // the logic of the repr_to_str() method, but with tweaks to call to_str() // on sub-fields. -fn to_str_substructure(cx: &ExtCtxt, span: Span, - substr: &Substructure) -> @Expr { +fn to_str_substructure(cx: &ExtCtxt, span: Span, substr: &Substructure) + -> @Expr { let to_str = cx.ident_of("to_str"); - let doit = |start: &str, end: @str, name: ast::Ident, + let doit = |start: &str, + end: InternedString, + name: ast::Ident, fields: &[FieldInfo]| { if fields.len() == 0 { - cx.expr_str_uniq(span, cx.str_of(name)) + cx.expr_str_uniq(span, token::get_ident(name.name)) } else { let buf = cx.ident_of("buf"); - let start = cx.str_of(name) + start; - let init = cx.expr_str_uniq(span, start.to_managed()); + let interned_str = token::get_ident(name.name); + let start = + token::intern_and_get_ident(interned_str.get() + start); + let init = cx.expr_str_uniq(span, start); let mut stmts = ~[cx.stmt_let(span, true, buf, init)]; let push_str = cx.ident_of("push_str"); @@ -70,38 +76,53 @@ fn to_str_substructure(cx: &ExtCtxt, span: Span, for (i, &FieldInfo {name, span, self_, .. }) in fields.iter().enumerate() { if i > 0 { - push(cx.expr_str(span, @", ")); + push(cx.expr_str(span, InternedString::new(", "))); } match name { None => {} Some(id) => { - let name = cx.str_of(id) + ": "; - push(cx.expr_str(span, name.to_managed())); + let interned_id = token::get_ident(id.name); + let name = interned_id.get() + ": "; + push(cx.expr_str(span, + token::intern_and_get_ident(name))); } } push(cx.expr_method_call(span, self_, to_str, ~[])); } push(cx.expr_str(span, end)); - cx.expr_block(cx.block(span, stmts, Some(cx.expr_ident(span, buf)))) + cx.expr_block(cx.block(span, stmts, Some(cx.expr_ident(span, + buf)))) } }; return match *substr.fields { Struct(ref fields) => { if fields.len() == 0 || fields[0].name.is_none() { - doit("(", @")", substr.type_ident, *fields) + doit("(", + InternedString::new(")"), + substr.type_ident, + *fields) } else { - doit("{", @"}", substr.type_ident, *fields) + doit("{", + InternedString::new("}"), + substr.type_ident, + *fields) } } EnumMatching(_, variant, ref fields) => { match variant.node.kind { ast::TupleVariantKind(..) => - doit("(", @")", variant.node.name, *fields), + doit("(", + InternedString::new(")"), + variant.node.name, + *fields), ast::StructVariantKind(..) => - doit("{", @"}", variant.node.name, *fields), + doit("{", + InternedString::new("}"), + variant.node.name, + *fields), } } diff --git a/src/libsyntax/ext/env.rs b/src/libsyntax/ext/env.rs index a9b40ea7ec638..c23a1ce1e28d5 100644 --- a/src/libsyntax/ext/env.rs +++ b/src/libsyntax/ext/env.rs @@ -19,6 +19,7 @@ use codemap::Span; use ext::base::*; use ext::base; use ext::build::AstBuilder; +use parse::token; use std::os; @@ -52,7 +53,11 @@ pub fn expand_env(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) Some((v, _style)) => v }; let msg = match exprs.len() { - 1 => format!("environment variable `{}` not defined", var).to_managed(), + 1 => { + token::intern_and_get_ident(format!("environment variable `{}` \ + not defined", + var)) + } 2 => { match expr_to_str(cx, exprs[1], "expected string literal") { None => return MacResult::dummy_expr(), @@ -65,12 +70,12 @@ pub fn expand_env(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) } }; - let e = match os::getenv(var) { + let e = match os::getenv(var.get()) { None => { - cx.span_err(sp, msg); + cx.span_err(sp, msg.get()); cx.expr_uint(sp, 0) } - Some(s) => cx.expr_str(sp, s.to_managed()) + Some(s) => cx.expr_str(sp, token::intern_and_get_ident(s)) }; MRExpr(e) } diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 1ffff03a80f4a..d8d98b2779316 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -22,8 +22,8 @@ use codemap::{Span, Spanned, ExpnInfo, NameAndSpan, MacroBang, MacroAttribute}; use ext::base::*; use fold::*; use parse; +use parse::token::{fresh_mark, fresh_name, intern}; use parse::token; -use parse::token::{fresh_mark, fresh_name, ident_to_str, intern}; use visit; use visit::Visitor; use util::small_vector::SmallVector; @@ -54,13 +54,14 @@ pub fn expand_expr(e: @ast::Expr, fld: &mut MacroExpander) -> @ast::Expr { return e; } let extname = &pth.segments[0].identifier; - let extnamestr = ident_to_str(extname); + let extnamestr = token::get_ident(extname.name); // leaving explicit deref here to highlight unbox op: let marked_after = match fld.extsbox.find(&extname.name) { None => { fld.cx.span_err( pth.span, - format!("macro undefined: '{}'", extnamestr)); + format!("macro undefined: '{}'", + extnamestr.get())); // let compilation continue return e; @@ -69,7 +70,7 @@ pub fn expand_expr(e: @ast::Expr, fld: &mut MacroExpander) -> @ast::Expr { fld.cx.bt_push(ExpnInfo { call_site: e.span, callee: NameAndSpan { - name: extnamestr, + name: extnamestr.get().to_str(), format: MacroBang, span: exp_span, }, @@ -94,7 +95,7 @@ pub fn expand_expr(e: @ast::Expr, fld: &mut MacroExpander) -> @ast::Expr { pth.span, format!( "non-expr macro in expr pos: {}", - extnamestr + extnamestr.get() ) ); return e; @@ -107,7 +108,8 @@ pub fn expand_expr(e: @ast::Expr, fld: &mut MacroExpander) -> @ast::Expr { _ => { fld.cx.span_err( pth.span, - format!("'{}' is not a tt-style macro", extnamestr) + format!("'{}' is not a tt-style macro", + extnamestr.get()) ); return e; } @@ -221,12 +223,12 @@ pub fn expand_mod_items(module_: &ast::Mod, fld: &mut MacroExpander) -> ast::Mod item.attrs.rev_iter().fold(~[*item], |items, attr| { let mname = attr.name(); - match fld.extsbox.find(&intern(mname)) { + match fld.extsbox.find(&intern(mname.get())) { Some(&ItemDecorator(dec_fn)) => { fld.cx.bt_push(ExpnInfo { call_site: attr.span, callee: NameAndSpan { - name: mname, + name: mname.get().to_str(), format: MacroAttribute, span: None } @@ -295,28 +297,31 @@ pub fn expand_item_mac(it: @ast::Item, fld: &mut MacroExpander) }; let extname = &pth.segments[0].identifier; - let extnamestr = ident_to_str(extname); + let extnamestr = token::get_ident(extname.name); let fm = fresh_mark(); let expanded = match fld.extsbox.find(&extname.name) { None => { fld.cx.span_err(pth.span, - format!("macro undefined: '{}!'", extnamestr)); + format!("macro undefined: '{}!'", + extnamestr.get())); // let compilation continue return SmallVector::zero(); } Some(&NormalTT(ref expander, span)) => { if it.ident.name != parse::token::special_idents::invalid.name { + let string = token::get_ident(it.ident.name); fld.cx.span_err(pth.span, format!("macro {}! expects no ident argument, \ - given '{}'", extnamestr, - ident_to_str(&it.ident))); + given '{}'", + extnamestr.get(), + string.get())); return SmallVector::zero(); } fld.cx.bt_push(ExpnInfo { call_site: it.span, callee: NameAndSpan { - name: extnamestr, + name: extnamestr.get().to_str(), format: MacroBang, span: span } @@ -328,13 +333,14 @@ pub fn expand_item_mac(it: @ast::Item, fld: &mut MacroExpander) Some(&IdentTT(ref expander, span)) => { if it.ident.name == parse::token::special_idents::invalid.name { fld.cx.span_err(pth.span, - format!("macro {}! expects an ident argument", extnamestr)); + format!("macro {}! expects an ident argument", + extnamestr.get())); return SmallVector::zero(); } fld.cx.bt_push(ExpnInfo { call_site: it.span, callee: NameAndSpan { - name: extnamestr, + name: extnamestr.get().to_str(), format: MacroBang, span: span } @@ -344,7 +350,9 @@ pub fn expand_item_mac(it: @ast::Item, fld: &mut MacroExpander) expander.expand(fld.cx, it.span, it.ident, marked_tts) } _ => { - fld.cx.span_err(it.span, format!("{}! is not legal in item position", extnamestr)); + fld.cx.span_err(it.span, + format!("{}! is not legal in item position", + extnamestr.get())); return SmallVector::zero(); } }; @@ -356,7 +364,9 @@ pub fn expand_item_mac(it: @ast::Item, fld: &mut MacroExpander) .collect() } MRExpr(_) => { - fld.cx.span_err(pth.span, format!("expr macro in item position: {}", extnamestr)); + fld.cx.span_err(pth.span, + format!("expr macro in item position: {}", + extnamestr.get())); return SmallVector::zero(); } MRAny(any_macro) => { @@ -385,7 +395,7 @@ pub fn expand_view_item(vi: &ast::ViewItem, fld: &mut MacroExpander) -> ast::ViewItem { let should_load = vi.attrs.iter().any(|attr| { - "phase" == attr.name() && + attr.name().get() == "phase" && attr.meta_item_list().map_or(false, |phases| { attr::contains_name(phases, "syntax") }) @@ -402,15 +412,18 @@ fn load_extern_macros(crate: &ast::ViewItem, fld: &mut MacroExpander) { let MacroCrate { lib, cnum } = fld.cx.loader.load_crate(crate); let crate_name = match crate.node { - ast::ViewItemExternMod(ref name, _, _) => token::ident_to_str(name), + ast::ViewItemExternMod(ref name, _, _) => { + let string = token::get_ident(name.name); + string.get().to_str() + }, _ => unreachable!(), }; - let name = format!("<{} macros>", crate_name).to_managed(); + let name = format!("<{} macros>", crate_name); let exported_macros = fld.cx.loader.get_exported_macros(cnum); for source in exported_macros.iter() { - let item = parse::parse_item_from_source_str(name, - source.to_managed(), + let item = parse::parse_item_from_source_str(name.clone(), + (*source).clone(), fld.cx.cfg(), fld.cx.parse_sess()) .expect("expected a serialized item"); @@ -475,10 +488,11 @@ pub fn expand_stmt(s: &Stmt, fld: &mut MacroExpander) -> SmallVector<@Stmt> { return SmallVector::zero(); } let extname = &pth.segments[0].identifier; - let extnamestr = ident_to_str(extname); + let extnamestr = token::get_ident(extname.name); let marked_after = match fld.extsbox.find(&extname.name) { None => { - fld.cx.span_err(pth.span, format!("macro undefined: '{}'", extnamestr)); + fld.cx.span_err(pth.span, format!("macro undefined: '{}'", + extnamestr.get())); return SmallVector::zero(); } @@ -486,7 +500,7 @@ pub fn expand_stmt(s: &Stmt, fld: &mut MacroExpander) -> SmallVector<@Stmt> { fld.cx.bt_push(ExpnInfo { call_site: s.span, callee: NameAndSpan { - name: extnamestr, + name: extnamestr.get().to_str(), format: MacroBang, span: exp_span, } @@ -511,7 +525,8 @@ pub fn expand_stmt(s: &Stmt, fld: &mut MacroExpander) -> SmallVector<@Stmt> { MRAny(any_macro) => any_macro.make_stmt(), _ => { fld.cx.span_err(pth.span, - format!("non-stmt macro in stmt pos: {}", extnamestr)); + format!("non-stmt macro in stmt pos: {}", + extnamestr.get())); return SmallVector::zero(); } }; @@ -520,7 +535,8 @@ pub fn expand_stmt(s: &Stmt, fld: &mut MacroExpander) -> SmallVector<@Stmt> { } _ => { - fld.cx.span_err(pth.span, format!("'{}' is not a tt-style macro", extnamestr)); + fld.cx.span_err(pth.span, format!("'{}' is not a tt-style macro", + extnamestr.get())); return SmallVector::zero(); } }; @@ -945,7 +961,7 @@ mod test { use fold::*; use ext::base::{CrateLoader, MacroCrate}; use parse; - use parse::token::{fresh_mark, gensym, intern, ident_to_str}; + use parse::token::{fresh_mark, gensym, intern}; use parse::token; use util::parser_testing::{string_to_crate, string_to_crate_and_sess}; use util::parser_testing::{string_to_pat, string_to_tts, strs_to_idents}; @@ -1009,11 +1025,11 @@ mod test { // make sure that macros can leave scope #[should_fail] #[test] fn macros_cant_escape_fns_test () { - let src = @"fn bogus() {macro_rules! z (() => (3+4))}\ + let src = ~"fn bogus() {macro_rules! z (() => (3+4))}\ fn inty() -> int { z!() }"; let sess = parse::new_parse_sess(None); let crate_ast = parse::parse_crate_from_source_str( - @"", + ~"", src, ~[],sess); // should fail: @@ -1024,11 +1040,11 @@ mod test { // make sure that macros can leave scope for modules #[should_fail] #[test] fn macros_cant_escape_mods_test () { - let src = @"mod foo {macro_rules! z (() => (3+4))}\ + let src = ~"mod foo {macro_rules! z (() => (3+4))}\ fn inty() -> int { z!() }"; let sess = parse::new_parse_sess(None); let crate_ast = parse::parse_crate_from_source_str( - @"", + ~"", src, ~[],sess); // should fail: @@ -1038,22 +1054,22 @@ mod test { // macro_escape modules shouldn't cause macros to leave scope #[test] fn macros_can_escape_flattened_mods_test () { - let src = @"#[macro_escape] mod foo {macro_rules! z (() => (3+4))}\ + let src = ~"#[macro_escape] mod foo {macro_rules! z (() => (3+4))}\ fn inty() -> int { z!() }"; let sess = parse::new_parse_sess(None); let crate_ast = parse::parse_crate_from_source_str( - @"", + ~"", src, ~[], sess); // should fail: let mut loader = ErrLoader; - expand_crate(sess,&mut loader,~[],crate_ast); + expand_crate(sess, &mut loader, ~[], crate_ast); } #[test] fn test_contains_flatten (){ - let attr1 = make_dummy_attr (@"foo"); - let attr2 = make_dummy_attr (@"bar"); - let escape_attr = make_dummy_attr (@"macro_escape"); + let attr1 = make_dummy_attr ("foo"); + let attr2 = make_dummy_attr ("bar"); + let escape_attr = make_dummy_attr ("macro_escape"); let attrs1 = ~[attr1, escape_attr, attr2]; assert_eq!(contains_macro_escape (attrs1),true); let attrs2 = ~[attr1,attr2]; @@ -1061,13 +1077,13 @@ mod test { } // make a MetaWord outer attribute with the given name - fn make_dummy_attr(s: @str) -> ast::Attribute { + fn make_dummy_attr(s: &str) -> ast::Attribute { Spanned { span:codemap::DUMMY_SP, node: Attribute_ { style: AttrOuter, value: @Spanned { - node: MetaWord(s), + node: MetaWord(token::intern_and_get_ident(s)), span: codemap::DUMMY_SP, }, is_sugared_doc: false, @@ -1077,7 +1093,7 @@ mod test { #[test] fn renaming () { - let item_ast = string_to_crate(@"fn f() -> int { a }"); + let item_ast = string_to_crate(~"fn f() -> int { a }"); let a_name = intern("a"); let a2_name = gensym("a2"); let mut renamer = new_rename_folder(ast::Ident{name:a_name,ctxt:EMPTY_CTXT}, @@ -1116,7 +1132,7 @@ mod test { // pprust::print_crate_(&mut s, crate); //} - fn expand_crate_str(crate_str: @str) -> ast::Crate { + fn expand_crate_str(crate_str: ~str) -> ast::Crate { let (crate_ast,ps) = string_to_crate_and_sess(crate_str); // the cfg argument actually does matter, here... let mut loader = ErrLoader; @@ -1134,7 +1150,7 @@ mod test { //} #[test] fn macro_tokens_should_match(){ - expand_crate_str(@"macro_rules! m((a)=>(13)) fn main(){m!(a);}"); + expand_crate_str(~"macro_rules! m((a)=>(13)) fn main(){m!(a);}"); } // renaming tests expand a crate and then check that the bindings match @@ -1208,9 +1224,9 @@ mod test { fn run_renaming_test(t: &RenamingTest, test_idx: uint) { let invalid_name = token::special_idents::invalid.name; let (teststr, bound_connections, bound_ident_check) = match *t { - (ref str,ref conns, bic) => (str.to_managed(), conns.clone(), bic) + (ref str,ref conns, bic) => (str.to_owned(), conns.clone(), bic) }; - let cr = expand_crate_str(teststr.to_managed()); + let cr = expand_crate_str(teststr.to_owned()); // find the bindings: let mut name_finder = new_name_finder(~[]); visit::walk_crate(&mut name_finder,&cr,()); @@ -1260,9 +1276,12 @@ mod test { println!("uh oh, matches but shouldn't:"); println!("varref: {:?}",varref); // good lord, you can't make a path with 0 segments, can you? + let string = token::get_ident(varref.segments[0] + .identifier + .name); println!("varref's first segment's uint: {}, and string: \"{}\"", varref.segments[0].identifier.name, - ident_to_str(&varref.segments[0].identifier)); + string.get()); println!("binding: {:?}", bindings[binding_idx]); ast_util::display_sctable(get_sctable()); } @@ -1273,7 +1292,7 @@ mod test { } #[test] fn fmt_in_macro_used_inside_module_macro() { - let crate_str = @"macro_rules! fmt_wrap(($b:expr)=>($b.to_str())) + let crate_str = ~"macro_rules! fmt_wrap(($b:expr)=>($b.to_str())) macro_rules! foo_module (() => (mod generated { fn a() { let xx = 147; fmt_wrap!(xx);}})) foo_module!() "; @@ -1284,7 +1303,10 @@ foo_module!() let bindings = name_finder.ident_accumulator; let cxbinds : ~[&ast::Ident] = - bindings.iter().filter(|b|{@"xx" == (ident_to_str(*b))}).collect(); + bindings.iter().filter(|b| { + let string = token::get_ident(b.name); + "xx" == string.get() + }).collect(); let cxbind = match cxbinds { [b] => b, _ => fail!("expected just one binding for ext_cx") @@ -1296,9 +1318,13 @@ foo_module!() let varrefs = path_finder.path_accumulator; // the xx binding should bind all of the xx varrefs: - for (idx,v) in varrefs.iter().filter(|p|{ p.segments.len() == 1 - && (@"xx" == (ident_to_str(&p.segments[0].identifier))) - }).enumerate() { + for (idx,v) in varrefs.iter().filter(|p|{ + p.segments.len() == 1 + && { + let string = token::get_ident(p.segments[0].identifier.name); + "xx" == string.get() + } + }).enumerate() { if (mtwt_resolve(v.segments[0].identifier) != resolved_binding) { println!("uh oh, xx binding didn't match xx varref:"); println!("this is xx varref \\# {:?}",idx); @@ -1323,7 +1349,7 @@ foo_module!() #[test] fn pat_idents(){ - let pat = string_to_pat(@"(a,Foo{x:c @ (b,9),y:Bar(4,d)})"); + let pat = string_to_pat(~"(a,Foo{x:c @ (b,9),y:Bar(4,d)})"); let mut pat_idents = new_name_finder(~[]); pat_idents.visit_pat(pat, ()); assert_eq!(pat_idents.ident_accumulator, diff --git a/src/libsyntax/ext/format.rs b/src/libsyntax/ext/format.rs index bbf6f7fff7f9e..ba1d5efdd49cd 100644 --- a/src/libsyntax/ext/format.rs +++ b/src/libsyntax/ext/format.rs @@ -14,23 +14,24 @@ use codemap::{Span, respan}; use ext::base::*; use ext::base; use ext::build::AstBuilder; -use rsparse = parse; -use parse::token; use opt_vec; +use parse::token::InternedString; +use parse::token; +use rsparse = parse; use std::fmt::parse; use std::hashmap::{HashMap, HashSet}; use std::vec; #[deriving(Eq)] enum ArgumentType { - Known(@str), + Known(~str), Unsigned, String, } enum Position { Exact(uint), - Named(@str), + Named(~str), } struct Context<'a> { @@ -42,12 +43,12 @@ struct Context<'a> { args: ~[@ast::Expr], arg_types: ~[Option], // Parsed named expressions and the types that we've found for them so far - names: HashMap<@str, @ast::Expr>, - name_types: HashMap<@str, ArgumentType>, + names: HashMap<~str, @ast::Expr>, + name_types: HashMap<~str, ArgumentType>, // Collection of the compiled `rt::Piece` structures pieces: ~[@ast::Expr], - name_positions: HashMap<@str, uint>, + name_positions: HashMap<~str, uint>, method_statics: ~[@ast::Item], // Updated as arguments are consumed or methods are entered @@ -104,10 +105,11 @@ impl<'a> Context<'a> { return (extra, None); } }; - let name = self.ecx.str_of(ident); + let interned_name = token::get_ident(ident.name); + let name = interned_name.get(); p.expect(&token::EQ); let e = p.parse_expr(); - match self.names.find(&name) { + match self.names.find_equiv(&name) { None => {} Some(prev) => { self.ecx.span_err(e.span, format!("duplicate argument \ @@ -117,7 +119,7 @@ impl<'a> Context<'a> { continue } } - self.names.insert(name, e); + self.names.insert(name.to_str(), e); } else { self.args.push(p.parse_expr()); self.arg_types.push(None); @@ -156,13 +158,13 @@ impl<'a> Context<'a> { Exact(i) } parse::ArgumentIs(i) => Exact(i), - parse::ArgumentNamed(s) => Named(s.to_managed()), + parse::ArgumentNamed(s) => Named(s.to_str()), }; // and finally the method being applied match arg.method { None => { - let ty = Known(arg.format.ty.to_managed()); + let ty = Known(arg.format.ty.to_str()); self.verify_arg_type(pos, ty); } Some(ref method) => { self.verify_method(pos, *method); } @@ -184,7 +186,7 @@ impl<'a> Context<'a> { self.verify_arg_type(Exact(i), Unsigned); } parse::CountIsName(s) => { - self.verify_arg_type(Named(s.to_managed()), Unsigned); + self.verify_arg_type(Named(s.to_str()), Unsigned); } parse::CountIsNextParam => { if self.check_positional_ok() { @@ -259,7 +261,13 @@ impl<'a> Context<'a> { self.ecx.span_err(self.fmtsp, msg); return; } - self.verify_same(self.args[arg].span, ty, self.arg_types[arg]); + { + let arg_type = match self.arg_types[arg] { + None => None, + Some(ref x) => Some(x) + }; + self.verify_same(self.args[arg].span, &ty, arg_type); + } if self.arg_types[arg].is_none() { self.arg_types[arg] = Some(ty); } @@ -274,10 +282,9 @@ impl<'a> Context<'a> { return; } }; - self.verify_same(span, ty, - self.name_types.find(&name).map(|&x| x)); + self.verify_same(span, &ty, self.name_types.find(&name)); if !self.name_types.contains_key(&name) { - self.name_types.insert(name, ty); + self.name_types.insert(name.clone(), ty); } // Assign this named argument a slot in the arguments array if // it hasn't already been assigned a slot. @@ -297,30 +304,36 @@ impl<'a> Context<'a> { /// /// Obviously `Some(Some(x)) != Some(Some(y))`, but we consider it true /// that: `Some(None) == Some(Some(x))` - fn verify_same(&self, sp: Span, ty: ArgumentType, - before: Option) { + fn verify_same(&self, + sp: Span, + ty: &ArgumentType, + before: Option<&ArgumentType>) { let cur = match before { None => return, Some(t) => t, }; - if ty == cur { return } + if *ty == *cur { + return + } match (cur, ty) { - (Known(cur), Known(ty)) => { + (&Known(ref cur), &Known(ref ty)) => { self.ecx.span_err(sp, format!("argument redeclared with type `{}` when \ - it was previously `{}`", ty, cur)); + it was previously `{}`", + *ty, + *cur)); } - (Known(cur), _) => { + (&Known(ref cur), _) => { self.ecx.span_err(sp, format!("argument used to format with `{}` was \ attempted to not be used for formatting", - cur)); + *cur)); } - (_, Known(ty)) => { + (_, &Known(ref ty)) => { self.ecx.span_err(sp, format!("argument previously used as a format \ argument attempted to be used as `{}`", - ty)); + *ty)); } (_, _) => { self.ecx.span_err(sp, "argument declared with multiple formats"); @@ -333,13 +346,18 @@ impl<'a> Context<'a> { fn static_attrs(&self) -> ~[ast::Attribute] { // Flag statics as `address_insignificant` so LLVM can merge duplicate // globals as much as possible (which we're generating a whole lot of). - let unnamed = self.ecx.meta_word(self.fmtsp, @"address_insignificant"); + let unnamed = self.ecx + .meta_word(self.fmtsp, + InternedString::new( + "address_insignificant")); let unnamed = self.ecx.attribute(self.fmtsp, unnamed); // Do not warn format string as dead code - let dead_code = self.ecx.meta_word(self.fmtsp, @"dead_code"); + let dead_code = self.ecx.meta_word(self.fmtsp, + InternedString::new("dead_code")); let allow_dead_code = self.ecx.meta_list(self.fmtsp, - @"allow", ~[dead_code]); + InternedString::new("allow"), + ~[dead_code]); let allow_dead_code = self.ecx.attribute(self.fmtsp, allow_dead_code); return ~[unnamed, allow_dead_code]; } @@ -391,9 +409,8 @@ impl<'a> Context<'a> { self.ecx.expr_path(path) } parse::CountIsName(n) => { - let n = n.to_managed(); - let i = match self.name_positions.find_copy(&n) { - Some(i) => i, + let i = match self.name_positions.find_equiv(&n) { + Some(&i) => i, None => 0, // error already emitted elsewhere }; let i = i + self.args.len(); @@ -410,7 +427,7 @@ impl<'a> Context<'a> { let result = arm.result.iter().map(|p| { self.trans_piece(p) }).collect(); - let s = arm.selector.to_managed(); + let s = token::intern_and_get_ident(arm.selector); let selector = self.ecx.expr_str(sp, s); self.ecx.expr_struct(sp, p, ~[ self.ecx.field_imm(sp, @@ -486,8 +503,12 @@ impl<'a> Context<'a> { match *piece { parse::String(s) => { - self.ecx.expr_call_global(sp, rtpath("String"), - ~[self.ecx.expr_str(sp, s.to_managed())]) + let s = token::intern_and_get_ident(s); + self.ecx.expr_call_global(sp, + rtpath("String"), + ~[ + self.ecx.expr_str(sp, s) + ]) } parse::CurrentArgument => { let nil = self.ecx.expr_lit(sp, ast::LitNil); @@ -509,9 +530,8 @@ impl<'a> Context<'a> { // Named arguments are converted to positional arguments at // the end of the list of arguments parse::ArgumentNamed(n) => { - let n = n.to_managed(); - let i = match self.name_positions.find_copy(&n) { - Some(i) => i, + let i = match self.name_positions.find_equiv(&n) { + Some(&i) => i, None => 0, // error already emitted elsewhere }; let i = i + self.args.len(); @@ -623,14 +643,17 @@ impl<'a> Context<'a> { locals.push(self.format_arg(e.span, Exact(i), self.ecx.expr_ident(e.span, name))); } - for (&name, &e) in self.names.iter() { - if !self.name_types.contains_key(&name) { continue } + for (name, &e) in self.names.iter() { + if !self.name_types.contains_key(name) { + continue + } - let lname = self.ecx.ident_of(format!("__arg{}", name)); + let lname = self.ecx.ident_of(format!("__arg{}", *name)); let e = self.ecx.expr_addr_of(e.span, e); lets.push(self.ecx.stmt_let(e.span, false, lname, e)); - names[*self.name_positions.get(&name)] = - Some(self.format_arg(e.span, Named(name), + names[*self.name_positions.get(name)] = + Some(self.format_arg(e.span, + Named((*name).clone()), self.ecx.expr_ident(e.span, lname))); } @@ -672,16 +695,16 @@ impl<'a> Context<'a> { Some(result))) } - fn format_arg(&self, sp: Span, argno: Position, - arg: @ast::Expr) -> @ast::Expr { + fn format_arg(&self, sp: Span, argno: Position, arg: @ast::Expr) + -> @ast::Expr { let ty = match argno { - Exact(i) => self.arg_types[i].unwrap(), - Named(s) => *self.name_types.get(&s) + Exact(ref i) => self.arg_types[*i].get_ref(), + Named(ref s) => self.name_types.get(s) }; - let fmt_trait = match ty { - Known(tyname) => { - match tyname.as_slice() { + let fmt_trait = match *ty { + Known(ref tyname) => { + match (*tyname).as_slice() { "" => "Default", "?" => "Poly", "b" => "Bool", @@ -698,8 +721,9 @@ impl<'a> Context<'a> { "x" => "LowerHex", "X" => "UpperHex", _ => { - self.ecx.span_err(sp, format!("unknown format trait \ - `{}`", tyname)); + self.ecx.span_err(sp, + format!("unknown format trait `{}`", + *tyname)); "Dummy" } } @@ -757,8 +781,9 @@ pub fn expand_args(ecx: &mut ExtCtxt, sp: Span, // Be sure to recursively expand macros just in case the format string uses // a macro to build the format expression. let expr = cx.ecx.expand_expr(efmt); - let fmt = match expr_to_str(cx.ecx, expr, - "format argument must be a string literal.") { + let fmt = match expr_to_str(cx.ecx, + expr, + "format argument must be a string literal.") { Some((fmt, _)) => fmt, None => return MacResult::dummy_expr() }; @@ -770,7 +795,7 @@ pub fn expand_args(ecx: &mut ExtCtxt, sp: Span, cx.ecx.span_err(efmt.span, m); } }).inside(|| { - for piece in parse::Parser::new(fmt) { + for piece in parse::Parser::new(fmt.get()) { if !err { cx.verify_piece(&piece); let piece = cx.trans_piece(&piece); diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index 6faed270875c4..bd1ac616f52d2 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -31,6 +31,7 @@ use parse; pub mod rt { use ast; use ext::base::ExtCtxt; + use parse::token; use parse; use print::pprust; @@ -65,132 +66,134 @@ pub mod rt { pub trait ToSource { // Takes a thing and generates a string containing rust code for it. - fn to_source(&self) -> @str; + fn to_source(&self) -> ~str; } impl ToSource for ast::Ident { - fn to_source(&self) -> @str { - ident_to_str(self) + fn to_source(&self) -> ~str { + let this = get_ident(self.name); + this.get().to_owned() } } impl ToSource for @ast::Item { - fn to_source(&self) -> @str { - pprust::item_to_str(*self, get_ident_interner()).to_managed() + fn to_source(&self) -> ~str { + pprust::item_to_str(*self, get_ident_interner()) } } impl<'a> ToSource for &'a [@ast::Item] { - fn to_source(&self) -> @str { - self.map(|i| i.to_source()).connect("\n\n").to_managed() + fn to_source(&self) -> ~str { + self.map(|i| i.to_source()).connect("\n\n") } } impl ToSource for ast::Ty { - fn to_source(&self) -> @str { - pprust::ty_to_str(self, get_ident_interner()).to_managed() + fn to_source(&self) -> ~str { + pprust::ty_to_str(self, get_ident_interner()) } } impl<'a> ToSource for &'a [ast::Ty] { - fn to_source(&self) -> @str { - self.map(|i| i.to_source()).connect(", ").to_managed() + fn to_source(&self) -> ~str { + self.map(|i| i.to_source()).connect(", ") } } impl ToSource for Generics { - fn to_source(&self) -> @str { - pprust::generics_to_str(self, get_ident_interner()).to_managed() + fn to_source(&self) -> ~str { + pprust::generics_to_str(self, get_ident_interner()) } } impl ToSource for @ast::Expr { - fn to_source(&self) -> @str { - pprust::expr_to_str(*self, get_ident_interner()).to_managed() + fn to_source(&self) -> ~str { + pprust::expr_to_str(*self, get_ident_interner()) } } impl ToSource for ast::Block { - fn to_source(&self) -> @str { - pprust::block_to_str(self, get_ident_interner()).to_managed() + fn to_source(&self) -> ~str { + pprust::block_to_str(self, get_ident_interner()) } } impl<'a> ToSource for &'a str { - fn to_source(&self) -> @str { - let lit = dummy_spanned(ast::LitStr(self.to_managed(), ast::CookedStr)); - pprust::lit_to_str(&lit).to_managed() + fn to_source(&self) -> ~str { + let lit = dummy_spanned(ast::LitStr( + token::intern_and_get_ident(*self), ast::CookedStr)); + pprust::lit_to_str(&lit) } } impl ToSource for int { - fn to_source(&self) -> @str { + fn to_source(&self) -> ~str { let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI)); - pprust::lit_to_str(&lit).to_managed() + pprust::lit_to_str(&lit) } } impl ToSource for i8 { - fn to_source(&self) -> @str { + fn to_source(&self) -> ~str { let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI8)); - pprust::lit_to_str(&lit).to_managed() + pprust::lit_to_str(&lit) } } impl ToSource for i16 { - fn to_source(&self) -> @str { + fn to_source(&self) -> ~str { let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI16)); - pprust::lit_to_str(&lit).to_managed() + pprust::lit_to_str(&lit) } } impl ToSource for i32 { - fn to_source(&self) -> @str { + fn to_source(&self) -> ~str { let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI32)); - pprust::lit_to_str(&lit).to_managed() + pprust::lit_to_str(&lit) } } impl ToSource for i64 { - fn to_source(&self) -> @str { + fn to_source(&self) -> ~str { let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI64)); - pprust::lit_to_str(&lit).to_managed() + pprust::lit_to_str(&lit) } } impl ToSource for uint { - fn to_source(&self) -> @str { + fn to_source(&self) -> ~str { let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU)); - pprust::lit_to_str(&lit).to_managed() + pprust::lit_to_str(&lit) } } impl ToSource for u8 { - fn to_source(&self) -> @str { + fn to_source(&self) -> ~str { let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU8)); - pprust::lit_to_str(&lit).to_managed() + pprust::lit_to_str(&lit) } } impl ToSource for u16 { - fn to_source(&self) -> @str { + fn to_source(&self) -> ~str { let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU16)); - pprust::lit_to_str(&lit).to_managed() + pprust::lit_to_str(&lit) } } impl ToSource for u32 { - fn to_source(&self) -> @str { + fn to_source(&self) -> ~str { let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU32)); - pprust::lit_to_str(&lit).to_managed() + pprust::lit_to_str(&lit) } } impl ToSource for u64 { - fn to_source(&self) -> @str { + fn to_source(&self) -> ~str { let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU64)); - pprust::lit_to_str(&lit).to_managed() + pprust::lit_to_str(&lit) } } @@ -237,52 +240,49 @@ pub mod rt { impl_to_tokens!(u64) pub trait ExtParseUtils { - fn parse_item(&self, s: @str) -> @ast::Item; - fn parse_expr(&self, s: @str) -> @ast::Expr; - fn parse_stmt(&self, s: @str) -> @ast::Stmt; - fn parse_tts(&self, s: @str) -> ~[ast::TokenTree]; + fn parse_item(&self, s: ~str) -> @ast::Item; + fn parse_expr(&self, s: ~str) -> @ast::Expr; + fn parse_stmt(&self, s: ~str) -> @ast::Stmt; + fn parse_tts(&self, s: ~str) -> ~[ast::TokenTree]; } impl<'a> ExtParseUtils for ExtCtxt<'a> { - fn parse_item(&self, s: @str) -> @ast::Item { + fn parse_item(&self, s: ~str) -> @ast::Item { let res = parse::parse_item_from_source_str( - @"", + "".to_str(), s, self.cfg(), self.parse_sess()); match res { Some(ast) => ast, None => { - error!("Parse error with ```\n{}\n```", s); + error!("Parse error"); fail!() } } } - fn parse_stmt(&self, s: @str) -> @ast::Stmt { - parse::parse_stmt_from_source_str( - @"", - s, - self.cfg(), - ~[], - self.parse_sess()) + fn parse_stmt(&self, s: ~str) -> @ast::Stmt { + parse::parse_stmt_from_source_str("".to_str(), + s, + self.cfg(), + ~[], + self.parse_sess()) } - fn parse_expr(&self, s: @str) -> @ast::Expr { - parse::parse_expr_from_source_str( - @"", - s, - self.cfg(), - self.parse_sess()) + fn parse_expr(&self, s: ~str) -> @ast::Expr { + parse::parse_expr_from_source_str("".to_str(), + s, + self.cfg(), + self.parse_sess()) } - fn parse_tts(&self, s: @str) -> ~[ast::TokenTree] { - parse::parse_tts_from_source_str( - @"", - s, - self.cfg(), - self.parse_sess()) + fn parse_tts(&self, s: ~str) -> ~[ast::TokenTree] { + parse::parse_tts_from_source_str("".to_str(), + s, + self.cfg(), + self.parse_sess()) } } @@ -349,7 +349,7 @@ fn id_ext(str: &str) -> ast::Ident { // Lift an ident to the expr that evaluates to that ident. fn mk_ident(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> @ast::Expr { - let e_str = cx.expr_str(sp, cx.str_of(ident)); + let e_str = cx.expr_str(sp, token::get_ident(ident.name)); cx.expr_method_call(sp, cx.expr_ident(sp, id_ext("ext_cx")), id_ext("ident_of"), diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index a9f94da7a98cb..44f3bb379f63e 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -16,7 +16,8 @@ use ext::base::*; use ext::base; use ext::build::AstBuilder; use parse; -use parse::token::{get_ident_interner}; +use parse::token::get_ident_interner; +use parse::token; use print::pprust; use std::io; @@ -57,21 +58,26 @@ pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) let topmost = topmost_expn_info(cx.backtrace().unwrap()); let loc = cx.codemap().lookup_char_pos(topmost.call_site.lo); - let filename = loc.file.name; + let filename = token::intern_and_get_ident(loc.file.name); base::MRExpr(cx.expr_str(topmost.call_site, filename)) } pub fn expand_stringify(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> base::MacResult { let s = pprust::tts_to_str(tts, get_ident_interner()); - base::MRExpr(cx.expr_str(sp, s.to_managed())) + base::MRExpr(cx.expr_str(sp, token::intern_and_get_ident(s))) } pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> base::MacResult { base::check_zero_tts(cx, sp, tts, "module_path!"); - base::MRExpr(cx.expr_str(sp, - cx.mod_path().map(|x| cx.str_of(*x)).connect("::").to_managed())) + let string = cx.mod_path() + .map(|x| { + let interned_str = token::get_ident(x.name); + interned_str.get().to_str() + }) + .connect("::"); + base::MRExpr(cx.expr_str(sp, token::intern_and_get_ident(string))) } // include! : parse the given file as an expr @@ -113,11 +119,11 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) Some(src) => { // Add this input file to the code map to make it available as // dependency information - let src = src.to_managed(); - let filename = file.display().to_str().to_managed(); + let filename = file.display().to_str(); + let interned = token::intern_and_get_ident(src); cx.parse_sess.cm.new_filemap(filename, src); - base::MRExpr(cx.expr_str(sp, src)) + base::MRExpr(cx.expr_str(sp, interned)) } None => { cx.span_err(sp, format!("{} wasn't a utf-8 file", file.display())); diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index d5a30a7cf1186..6d1b8dd235854 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -18,7 +18,7 @@ use parse::lexer::*; //resolve bug? use parse::ParseSess; use parse::attr::ParserAttr; use parse::parser::{LifetimeAndTypesWithoutColons, Parser}; -use parse::token::{Token, EOF, to_str, Nonterminal, get_ident_interner, ident_to_str}; +use parse::token::{Token, EOF, to_str, Nonterminal, get_ident_interner}; use parse::token; use std::hashmap::HashMap; @@ -183,8 +183,9 @@ pub fn nameize(p_s: @ParseSess, ms: &[Matcher], res: &[@NamedMatch]) node: MatchNonterminal(ref bind_name, _, idx), span: sp } => { if ret_val.contains_key(bind_name) { - p_s.span_diagnostic.span_fatal(sp, - "Duplicated bind name: "+ ident_to_str(bind_name)) + let string = token::get_ident(bind_name.name); + p_s.span_diagnostic + .span_fatal(sp, "Duplicated bind name: " + string.get()) } ret_val.insert(*bind_name, res[idx]); } @@ -364,8 +365,11 @@ pub fn parse(sess: @ParseSess, let nts = bb_eis.map(|ei| { match ei.elts[ei.idx].node { MatchNonterminal(ref bind,ref name,_) => { - format!("{} ('{}')", ident_to_str(name), - ident_to_str(bind)) + let bind_string = token::get_ident(bind.name); + let name_string = token::get_ident(name.name); + format!("{} ('{}')", + name_string.get(), + bind_string.get()) } _ => fail!() } }).connect(" or "); @@ -388,8 +392,9 @@ pub fn parse(sess: @ParseSess, let mut ei = bb_eis.pop().unwrap(); match ei.elts[ei.idx].node { MatchNonterminal(_, ref name, idx) => { + let name_string = token::get_ident(name.name); ei.matches[idx].push(@MatchedNonterminal( - parse_nt(&mut rust_parser, ident_to_str(name)))); + parse_nt(&mut rust_parser, name_string.get()))); ei.idx += 1u; } _ => fail!() diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index aabd9c694f7c0..c179e9959e0be 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -21,8 +21,9 @@ use ext::tt::macro_parser::{parse, parse_or_else}; use parse::lexer::{new_tt_reader, Reader}; use parse::parser::Parser; use parse::attr::ParserAttr; -use parse::token::{get_ident_interner, special_idents, gensym_ident, ident_to_str}; +use parse::token::{get_ident_interner, special_idents, gensym_ident}; use parse::token::{FAT_ARROW, SEMI, NtMatchers, NtTT, EOF}; +use parse::token; use print; use std::cell::RefCell; use util::small_vector::SmallVector; @@ -112,10 +113,11 @@ fn generic_extension(cx: &ExtCtxt, rhses: &[@NamedMatch]) -> MacResult { if cx.trace_macros() { + let interned_name = token::get_ident(name.name); println!("{}! \\{ {} \\}", - cx.str_of(name), - print::pprust::tt_to_str(&TTDelim(@arg.to_owned()), - get_ident_interner())); + interned_name.get(), + print::pprust::tt_to_str(&TTDelim(@arg.to_owned()), + get_ident_interner())); } // Which arm's failure should we report? (the one furthest along) @@ -229,7 +231,7 @@ pub fn add_new_extension(cx: &mut ExtCtxt, }; return MRDef(MacroDef { - name: ident_to_str(&name), + name: token::get_ident(name.name).get().to_str(), ext: NormalTT(exp, Some(sp)) }); } diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 87a2f374c9005..d2fa24b1cfede 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -14,7 +14,7 @@ use codemap::{Span, DUMMY_SP}; use diagnostic::SpanHandler; use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal}; use parse::token::{EOF, INTERPOLATED, IDENT, Token, NtIdent}; -use parse::token::{ident_to_str}; +use parse::token; use parse::lexer::TokenAndSpan; use std::cell::{Cell, RefCell}; @@ -122,9 +122,10 @@ fn lookup_cur_matched(r: &TtReader, name: Ident) -> @NamedMatch { match matched_opt { Some(s) => lookup_cur_matched_by_matched(r, s), None => { + let name_string = token::get_ident(name.name); r.sp_diag.span_fatal(r.cur_span.get(), format!("unknown macro variable `{}`", - ident_to_str(&name))); + name_string.get())); } } } @@ -145,11 +146,11 @@ fn lis_merge(lhs: LockstepIterSize, rhs: LockstepIterSize) -> LockstepIterSize { LisContradiction(_) => rhs.clone(), LisConstraint(r_len, _) if l_len == r_len => lhs.clone(), LisConstraint(r_len, ref r_id) => { - let l_n = ident_to_str(l_id); - let r_n = ident_to_str(r_id); + let l_n = token::get_ident(l_id.name); + let r_n = token::get_ident(r_id.name); LisContradiction(format!("Inconsistent lockstep iteration: \ '{}' has {} items, but '{}' has {}", - l_n, l_len, r_n, r_len)) + l_n.get(), l_len, r_n.get(), r_len)) } } } @@ -313,10 +314,11 @@ pub fn tt_next_token(r: &TtReader) -> TokenAndSpan { return ret_val; } MatchedSeq(..) => { + let string = token::get_ident(ident.name); r.sp_diag.span_fatal( r.cur_span.get(), /* blame the macro writer */ format!("variable '{}' is still repeating at this depth", - ident_to_str(&ident))); + string.get())); } } } diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index 8dac13f1e31a9..8f5bbc2cdad18 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -321,15 +321,14 @@ fn fold_meta_item_(mi: @MetaItem, fld: &mut T) -> @MetaItem { @Spanned { node: match mi.node { - MetaWord(id) => MetaWord(id), - MetaList(id, ref mis) => { + MetaWord(ref id) => MetaWord((*id).clone()), + MetaList(ref id, ref mis) => { let fold_meta_item = |x| fold_meta_item_(x, fld); - MetaList( - id, - mis.map(|e| fold_meta_item(*e)) - ) + MetaList((*id).clone(), mis.map(|e| fold_meta_item(*e))) + } + MetaNameValue(ref id, ref s) => { + MetaNameValue((*id).clone(), (*s).clone()) } - MetaNameValue(id, s) => MetaNameValue(id, s) }, span: fld.new_span(mi.span) } } @@ -498,12 +497,10 @@ fn fold_variant_arg_(va: &VariantArg, folder: &mut T) -> VariantArg { pub fn noop_fold_view_item(vi: &ViewItem, folder: &mut T) -> ViewItem{ let inner_view_item = match vi.node { - ViewItemExternMod(ref ident, - string, - node_id) => { + ViewItemExternMod(ref ident, ref string, node_id) => { ViewItemExternMod(ident.clone(), - string, - folder.new_id(node_id)) + (*string).clone(), + folder.new_id(node_id)) } ViewItemUse(ref view_paths) => { ViewItemUse(folder.fold_view_paths(*view_paths)) @@ -815,8 +812,12 @@ pub fn noop_fold_expr(e: @Expr, folder: &mut T) -> @Expr { } ExprInlineAsm(ref a) => { ExprInlineAsm(InlineAsm { - inputs: a.inputs.map(|&(c, input)| (c, folder.fold_expr(input))), - outputs: a.outputs.map(|&(c, out)| (c, folder.fold_expr(out))), + inputs: a.inputs.map(|&(ref c, input)| { + ((*c).clone(), folder.fold_expr(input)) + }), + outputs: a.outputs.map(|&(ref c, out)| { + ((*c).clone(), folder.fold_expr(out)) + }), .. (*a).clone() }) } @@ -898,7 +899,8 @@ mod test { // make sure idents get transformed everywhere #[test] fn ident_transformation () { let mut zz_fold = ToZzIdentFolder; - let ast = string_to_crate(@"#[a] mod b {fn c (d : e, f : g) {h!(i,j,k);l;m}}"); + let ast = string_to_crate( + ~"#[a] mod b {fn c (d : e, f : g) {h!(i,j,k);l;m}}"); assert_pred!(matches_codepattern, "matches_codepattern", pprust::to_str(&mut zz_fold.fold_crate(ast),fake_print_crate, @@ -909,8 +911,9 @@ mod test { // even inside macro defs.... #[test] fn ident_transformation_in_defs () { let mut zz_fold = ToZzIdentFolder; - let ast = string_to_crate(@"macro_rules! a {(b $c:expr $(d $e:token)f+ -=> (g $(d $d $e)+))} "); + let ast = string_to_crate( + ~"macro_rules! a {(b $c:expr $(d $e:token)f+ => \ + (g $(d $d $e)+))} "); assert_pred!(matches_codepattern, "matches_codepattern", pprust::to_str(&mut zz_fold.fold_crate(ast),fake_print_crate, diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index e7630a668558d..c9bea78d02db5 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -45,7 +45,7 @@ impl ParserAttr for Parser { } token::DOC_COMMENT(s) => { let attr = ::attr::mk_sugared_doc_attr( - self.id_to_str(s), + self.id_to_interned_str(s), self.span.lo, self.span.hi ); @@ -133,7 +133,7 @@ impl ParserAttr for Parser { } token::DOC_COMMENT(s) => { self.bump(); - ::attr::mk_sugared_doc_attr(self.id_to_str(s), + ::attr::mk_sugared_doc_attr(self.id_to_interned_str(s), self.span.lo, self.span.hi) } @@ -157,7 +157,7 @@ impl ParserAttr for Parser { fn parse_meta_item(&mut self) -> @ast::MetaItem { let lo = self.span.lo; let ident = self.parse_ident(); - let name = self.id_to_str(ident); + let name = self.id_to_interned_str(ident); match self.token { token::EQ => { self.bump(); diff --git a/src/libsyntax/parse/comments.rs b/src/libsyntax/parse/comments.rs index aa5e4e01ae0a0..7165e7b404f1c 100644 --- a/src/libsyntax/parse/comments.rs +++ b/src/libsyntax/parse/comments.rs @@ -54,7 +54,6 @@ pub fn doc_comment_style(comment: &str) -> ast::AttrStyle { } pub fn strip_doc_comment_decoration(comment: &str) -> ~str { - /// remove whitespace-only lines from the start/end of lines fn vertical_trim(lines: ~[~str]) -> ~[~str] { let mut i = 0u; @@ -348,10 +347,10 @@ pub struct Literal { // probably not a good thing. pub fn gather_comments_and_literals(span_diagnostic: @diagnostic::SpanHandler, - path: @str, + path: ~str, srdr: &mut io::Reader) -> (~[Comment], ~[Literal]) { - let src = str::from_utf8_owned(srdr.read_to_end()).unwrap().to_managed(); + let src = str::from_utf8_owned(srdr.read_to_end()).unwrap(); let cm = CodeMap::new(); let filemap = cm.new_filemap(path, src); let rdr = lexer::new_low_level_string_reader(span_diagnostic, filemap); diff --git a/src/libsyntax/parse/lexer.rs b/src/libsyntax/parse/lexer.rs index 2521bb515f769..8c55990289aa8 100644 --- a/src/libsyntax/parse/lexer.rs +++ b/src/libsyntax/parse/lexer.rs @@ -42,7 +42,6 @@ pub struct TokenAndSpan { pub struct StringReader { span_diagnostic: @SpanHandler, - src: @str, // The absolute offset within the codemap of the next character to read pos: Cell, // The absolute offset within the codemap of the last character read(curr) @@ -73,7 +72,6 @@ pub fn new_low_level_string_reader(span_diagnostic: @SpanHandler, let initial_char = '\n'; let r = @StringReader { span_diagnostic: span_diagnostic, - src: filemap.src, pos: Cell::new(filemap.start_pos), last_pos: Cell::new(filemap.start_pos), col: Cell::new(CharPos(0)), @@ -93,7 +91,6 @@ pub fn new_low_level_string_reader(span_diagnostic: @SpanHandler, fn dup_string_reader(r: @StringReader) -> @StringReader { @StringReader { span_diagnostic: r.span_diagnostic, - src: r.src, pos: Cell::new(r.pos.get()), last_pos: Cell::new(r.last_pos.get()), col: Cell::new(r.col.get()), @@ -188,7 +185,7 @@ fn fatal_span_verbose(rdr: @StringReader, -> ! { let mut m = m; m.push_str(": "); - let s = rdr.src.slice( + let s = rdr.filemap.src.slice( byte_offset(rdr, from_pos).to_uint(), byte_offset(rdr, to_pos).to_uint()); m.push_str(s); @@ -239,7 +236,7 @@ fn with_str_from_to( end: BytePos, f: |s: &str| -> T) -> T { - f(rdr.src.slice( + f(rdr.filemap.src.slice( byte_offset(rdr, start).to_uint(), byte_offset(rdr, end).to_uint())) } @@ -249,12 +246,12 @@ fn with_str_from_to( pub fn bump(rdr: &StringReader) { rdr.last_pos.set(rdr.pos.get()); let current_byte_offset = byte_offset(rdr, rdr.pos.get()).to_uint(); - if current_byte_offset < (rdr.src).len() { + if current_byte_offset < (rdr.filemap.src).len() { assert!(rdr.curr.get() != unsafe { transmute(-1u32) }); // FIXME: #8971: unsound let last_char = rdr.curr.get(); - let next = rdr.src.char_range_at(current_byte_offset); + let next = rdr.filemap.src.char_range_at(current_byte_offset); let byte_offset_diff = next.next - current_byte_offset; rdr.pos.set(rdr.pos.get() + Pos::from_uint(byte_offset_diff)); rdr.curr.set(next.ch); @@ -277,8 +274,8 @@ pub fn is_eof(rdr: @StringReader) -> bool { } pub fn nextch(rdr: @StringReader) -> char { let offset = byte_offset(rdr, rdr.pos.get()).to_uint(); - if offset < (rdr.src).len() { - return rdr.src.char_at(offset); + if offset < (rdr.filemap.src).len() { + return rdr.filemap.src.char_at(offset); } else { return unsafe { transmute(-1u32) }; } // FIXME: #8971: unsound } @@ -975,9 +972,9 @@ mod test { } // open a string reader for the given string - fn setup(teststr: @str) -> Env { + fn setup(teststr: ~str) -> Env { let cm = CodeMap::new(); - let fm = cm.new_filemap(@"zebra.rs", teststr); + let fm = cm.new_filemap(~"zebra.rs", teststr); let span_handler = diagnostic::mk_span_handler(diagnostic::mk_handler(None),@cm); Env { @@ -987,7 +984,7 @@ mod test { #[test] fn t1 () { let Env {string_reader} = - setup(@"/* my source file */ \ + setup(~"/* my source file */ \ fn main() { println!(\"zebra\"); }\n"); let id = str_to_ident("fn"); let tok1 = string_reader.next_token(); @@ -1023,14 +1020,14 @@ mod test { } #[test] fn doublecolonparsing () { - let env = setup (@"a b"); + let env = setup (~"a b"); check_tokenization (env, ~[mk_ident("a",false), mk_ident("b",false)]); } #[test] fn dcparsing_2 () { - let env = setup (@"a::b"); + let env = setup (~"a::b"); check_tokenization (env, ~[mk_ident("a",true), token::MOD_SEP, @@ -1038,7 +1035,7 @@ mod test { } #[test] fn dcparsing_3 () { - let env = setup (@"a ::b"); + let env = setup (~"a ::b"); check_tokenization (env, ~[mk_ident("a",false), token::MOD_SEP, @@ -1046,7 +1043,7 @@ mod test { } #[test] fn dcparsing_4 () { - let env = setup (@"a:: b"); + let env = setup (~"a:: b"); check_tokenization (env, ~[mk_ident("a",true), token::MOD_SEP, @@ -1054,28 +1051,28 @@ mod test { } #[test] fn character_a() { - let env = setup(@"'a'"); + let env = setup(~"'a'"); let TokenAndSpan {tok, sp: _} = env.string_reader.next_token(); assert_eq!(tok,token::LIT_CHAR('a' as u32)); } #[test] fn character_space() { - let env = setup(@"' '"); + let env = setup(~"' '"); let TokenAndSpan {tok, sp: _} = env.string_reader.next_token(); assert_eq!(tok, token::LIT_CHAR(' ' as u32)); } #[test] fn character_escaped() { - let env = setup(@"'\\n'"); + let env = setup(~"'\\n'"); let TokenAndSpan {tok, sp: _} = env.string_reader.next_token(); assert_eq!(tok, token::LIT_CHAR('\n' as u32)); } #[test] fn lifetime_name() { - let env = setup(@"'abc"); + let env = setup(~"'abc"); let TokenAndSpan {tok, sp: _} = env.string_reader.next_token(); let id = token::str_to_ident("abc"); @@ -1083,7 +1080,7 @@ mod test { } #[test] fn raw_string() { - let env = setup(@"r###\"\"#a\\b\x00c\"\"###"); + let env = setup(~"r###\"\"#a\\b\x00c\"\"###"); let TokenAndSpan {tok, sp: _} = env.string_reader.next_token(); let id = token::str_to_ident("\"#a\\b\x00c\""); @@ -1097,7 +1094,7 @@ mod test { } #[test] fn nested_block_comments() { - let env = setup(@"/* /* */ */'a'"); + let env = setup(~"/* /* */ */'a'"); let TokenAndSpan {tok, sp: _} = env.string_reader.next_token(); assert_eq!(tok,token::LIT_CHAR('a' as u32)); diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index e026a11cafe37..cec9f7c2d9f11 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -89,12 +89,11 @@ pub fn parse_crate_attrs_from_file( return inner; } -pub fn parse_crate_from_source_str( - name: @str, - source: @str, - cfg: ast::CrateConfig, - sess: @ParseSess -) -> ast::Crate { +pub fn parse_crate_from_source_str(name: ~str, + source: ~str, + cfg: ast::CrateConfig, + sess: @ParseSess) + -> ast::Crate { let mut p = new_parser_from_source_str(sess, /*bad*/ cfg.clone(), name, @@ -102,12 +101,11 @@ pub fn parse_crate_from_source_str( maybe_aborted(p.parse_crate_mod(),p) } -pub fn parse_crate_attrs_from_source_str( - name: @str, - source: @str, - cfg: ast::CrateConfig, - sess: @ParseSess -) -> ~[ast::Attribute] { +pub fn parse_crate_attrs_from_source_str(name: ~str, + source: ~str, + cfg: ast::CrateConfig, + sess: @ParseSess) + -> ~[ast::Attribute] { let mut p = new_parser_from_source_str(sess, /*bad*/ cfg.clone(), name, @@ -116,44 +114,40 @@ pub fn parse_crate_attrs_from_source_str( return inner; } -pub fn parse_expr_from_source_str( - name: @str, - source: @str, - cfg: ast::CrateConfig, - sess: @ParseSess -) -> @ast::Expr { +pub fn parse_expr_from_source_str(name: ~str, + source: ~str, + cfg: ast::CrateConfig, + sess: @ParseSess) + -> @ast::Expr { let mut p = new_parser_from_source_str(sess, cfg, name, source); maybe_aborted(p.parse_expr(), p) } -pub fn parse_item_from_source_str( - name: @str, - source: @str, - cfg: ast::CrateConfig, - sess: @ParseSess -) -> Option<@ast::Item> { +pub fn parse_item_from_source_str(name: ~str, + source: ~str, + cfg: ast::CrateConfig, + sess: @ParseSess) + -> Option<@ast::Item> { let mut p = new_parser_from_source_str(sess, cfg, name, source); let attrs = p.parse_outer_attributes(); maybe_aborted(p.parse_item(attrs),p) } -pub fn parse_meta_from_source_str( - name: @str, - source: @str, - cfg: ast::CrateConfig, - sess: @ParseSess -) -> @ast::MetaItem { +pub fn parse_meta_from_source_str(name: ~str, + source: ~str, + cfg: ast::CrateConfig, + sess: @ParseSess) + -> @ast::MetaItem { let mut p = new_parser_from_source_str(sess, cfg, name, source); maybe_aborted(p.parse_meta_item(),p) } -pub fn parse_stmt_from_source_str( - name: @str, - source: @str, - cfg: ast::CrateConfig, - attrs: ~[ast::Attribute], - sess: @ParseSess -) -> @ast::Stmt { +pub fn parse_stmt_from_source_str(name: ~str, + source: ~str, + cfg: ast::CrateConfig, + attrs: ~[ast::Attribute], + sess: @ParseSess) + -> @ast::Stmt { let mut p = new_parser_from_source_str( sess, cfg, @@ -163,12 +157,11 @@ pub fn parse_stmt_from_source_str( maybe_aborted(p.parse_stmt(attrs),p) } -pub fn parse_tts_from_source_str( - name: @str, - source: @str, - cfg: ast::CrateConfig, - sess: @ParseSess -) -> ~[ast::TokenTree] { +pub fn parse_tts_from_source_str(name: ~str, + source: ~str, + cfg: ast::CrateConfig, + sess: @ParseSess) + -> ~[ast::TokenTree] { let mut p = new_parser_from_source_str( sess, cfg, @@ -183,9 +176,9 @@ pub fn parse_tts_from_source_str( // Create a new parser from a source string pub fn new_parser_from_source_str(sess: @ParseSess, cfg: ast::CrateConfig, - name: @str, - source: @str) - -> Parser { + name: ~str, + source: ~str) + -> Parser { filemap_to_parser(sess,string_to_filemap(sess,source,name),cfg) } @@ -248,20 +241,17 @@ pub fn file_to_filemap(sess: @ParseSess, path: &Path, spanopt: Option) }; match str::from_utf8_owned(bytes) { Some(s) => { - return string_to_filemap(sess, s.to_managed(), - path.as_str().unwrap().to_managed()); - } - None => { - err(format!("{} is not UTF-8 encoded", path.display())) + return string_to_filemap(sess, s, path.as_str().unwrap().to_str()) } + None => err(format!("{} is not UTF-8 encoded", path.display())), } unreachable!() } // given a session and a string, add the string to // the session's codemap and return the new filemap -pub fn string_to_filemap(sess: @ParseSess, source: @str, path: @str) - -> @FileMap { +pub fn string_to_filemap(sess: @ParseSess, source: ~str, path: ~str) + -> @FileMap { sess.cm.new_filemap(path, source) } @@ -324,7 +314,7 @@ mod test { } #[test] fn path_exprs_1() { - assert_eq!(string_to_expr(@"a"), + assert_eq!(string_to_expr(~"a"), @ast::Expr{ id: ast::DUMMY_NODE_ID, node: ast::ExprPath(ast::Path { @@ -343,7 +333,7 @@ mod test { } #[test] fn path_exprs_2 () { - assert_eq!(string_to_expr(@"::a::b"), + assert_eq!(string_to_expr(~"::a::b"), @ast::Expr { id: ast::DUMMY_NODE_ID, node: ast::ExprPath(ast::Path { @@ -368,12 +358,12 @@ mod test { #[should_fail] #[test] fn bad_path_expr_1() { - string_to_expr(@"::abc::def::return"); + string_to_expr(~"::abc::def::return"); } // check the token-tree-ization of macros #[test] fn string_to_tts_macro () { - let tts = string_to_tts(@"macro_rules! zip (($a)=>($a))"); + let tts = string_to_tts(~"macro_rules! zip (($a)=>($a))"); match tts { [ast::TTTok(_,_), ast::TTTok(_,token::NOT), @@ -417,7 +407,7 @@ mod test { } #[test] fn string_to_tts_1 () { - let tts = string_to_tts(@"fn a (b : int) { b; }"); + let tts = string_to_tts(~"fn a (b : int) { b; }"); assert_eq!(to_json_str(&tts), ~"[\ {\ @@ -546,7 +536,7 @@ mod test { } #[test] fn ret_expr() { - assert_eq!(string_to_expr(@"return d"), + assert_eq!(string_to_expr(~"return d"), @ast::Expr{ id: ast::DUMMY_NODE_ID, node:ast::ExprRet(Some(@ast::Expr{ @@ -569,7 +559,7 @@ mod test { } #[test] fn parse_stmt_1 () { - assert_eq!(string_to_stmt(@"b;"), + assert_eq!(string_to_stmt(~"b;"), @Spanned{ node: ast::StmtExpr(@ast::Expr { id: ast::DUMMY_NODE_ID, @@ -595,7 +585,7 @@ mod test { } #[test] fn parse_ident_pat () { - let mut parser = string_to_parser(@"b"); + let mut parser = string_to_parser(~"b"); assert_eq!(parser.parse_pat(), @ast::Pat{id: ast::DUMMY_NODE_ID, node: ast::PatIdent( @@ -619,7 +609,7 @@ mod test { // check the contents of the tt manually: #[test] fn parse_fundecl () { // this test depends on the intern order of "fn" and "int" - assert_eq!(string_to_item(@"fn a (b : int) { b; }"), + assert_eq!(string_to_item(~"fn a (b : int) { b; }"), Some( @ast::Item{ident:str_to_ident("a"), attrs:~[], @@ -711,12 +701,12 @@ mod test { #[test] fn parse_exprs () { // just make sure that they parse.... - string_to_expr(@"3 + 4"); - string_to_expr(@"a::z.froob(b,@(987+3))"); + string_to_expr(~"3 + 4"); + string_to_expr(~"a::z.froob(b,@(987+3))"); } #[test] fn attrs_fix_bug () { - string_to_item(@"pub fn mk_file_writer(path: &Path, flags: &[FileFlag]) + string_to_item(~"pub fn mk_file_writer(path: &Path, flags: &[FileFlag]) -> Result<@Writer, ~str> { #[cfg(windows)] fn wb() -> c_int { diff --git a/src/libsyntax/parse/obsolete.rs b/src/libsyntax/parse/obsolete.rs index 6aa1afee206eb..c2a093972682e 100644 --- a/src/libsyntax/parse/obsolete.rs +++ b/src/libsyntax/parse/obsolete.rs @@ -22,7 +22,6 @@ use codemap::{Span, respan}; use parse::parser::Parser; use parse::token; -use std::str; use std::to_bytes; /// The specific types of unsupported syntax @@ -46,6 +45,7 @@ pub enum ObsoleteSyntax { ObsoleteMultipleImport, ObsoleteExternModAttributesInParens, ObsoleteManagedPattern, + ObsoleteManagedString, } impl to_bytes::IterBytes for ObsoleteSyntax { @@ -155,6 +155,10 @@ impl ParserObsoleteMethods for Parser { "use a nested `match` expression instead of a managed box \ pattern" ), + ObsoleteManagedString => ( + "managed string", + "use `Rc<~str>` instead of a managed string" + ), }; self.report(sp, kind, kind_str, desc); @@ -183,7 +187,8 @@ impl ParserObsoleteMethods for Parser { fn is_obsolete_ident(&mut self, ident: &str) -> bool { match self.token { token::IDENT(sid, _) => { - str::eq_slice(self.id_to_str(sid), ident) + let interned_string = token::get_ident(sid.name); + interned_string.equiv(&ident) } _ => false } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 04a984ba95d92..050735b4dfd62 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -71,10 +71,9 @@ use parse::common::{seq_sep_trailing_disallowed, seq_sep_trailing_allowed}; use parse::lexer::Reader; use parse::lexer::TokenAndSpan; use parse::obsolete::*; -use parse::token::{can_begin_expr, get_ident_interner, ident_to_str, is_ident}; -use parse::token::{is_ident_or_path}; -use parse::token::{is_plain_ident, INTERPOLATED, keywords, special_idents}; -use parse::token::{token_to_binop}; +use parse::token::{INTERPOLATED, InternedString, can_begin_expr, get_ident}; +use parse::token::{get_ident_interner, is_ident, is_ident_or_path}; +use parse::token::{is_plain_ident, keywords, special_idents, token_to_binop}; use parse::token; use parse::{new_sub_parser_from_file, ParseSess}; use opt_vec; @@ -345,7 +344,7 @@ pub struct Parser { /// extra detail when the same error is seen twice obsolete_set: HashSet, /// Used to determine the path to externally loaded source files - mod_path_stack: ~[@str], + mod_path_stack: ~[InternedString], /// Stack of spans of open delimiters. Used for error message. open_braces: ~[Span], /* do not copy the parser; its state is tied to outside state */ @@ -531,10 +530,11 @@ impl Parser { // otherwise, eat it. pub fn expect_keyword(&mut self, kw: keywords::Keyword) { if !self.eat_keyword(kw) { - let id_str = self.id_to_str(kw.to_ident()).to_str(); + let id_ident = kw.to_ident(); + let id_interned_str = token::get_ident(id_ident.name); let token_str = self.this_token_to_str(); self.fatal(format!("expected `{}`, found `{}`", - id_str, + id_interned_str.get(), token_str)) } } @@ -802,8 +802,8 @@ impl Parser { self.sess.span_diagnostic.handler().abort_if_errors(); } - pub fn id_to_str(&mut self, id: Ident) -> @str { - get_ident_interner().get(id.name) + pub fn id_to_interned_str(&mut self, id: Ident) -> InternedString { + get_ident(id.name) } // Is the current token one of the keywords that signals a bare function @@ -1401,11 +1401,18 @@ impl Parser { token::LIT_INT(i, it) => LitInt(i, it), token::LIT_UINT(u, ut) => LitUint(u, ut), token::LIT_INT_UNSUFFIXED(i) => LitIntUnsuffixed(i), - token::LIT_FLOAT(s, ft) => LitFloat(self.id_to_str(s), ft), - token::LIT_FLOAT_UNSUFFIXED(s) => - LitFloatUnsuffixed(self.id_to_str(s)), - token::LIT_STR(s) => LitStr(self.id_to_str(s), ast::CookedStr), - token::LIT_STR_RAW(s, n) => LitStr(self.id_to_str(s), ast::RawStr(n)), + token::LIT_FLOAT(s, ft) => { + LitFloat(self.id_to_interned_str(s), ft) + } + token::LIT_FLOAT_UNSUFFIXED(s) => { + LitFloatUnsuffixed(self.id_to_interned_str(s)) + } + token::LIT_STR(s) => { + LitStr(self.id_to_interned_str(s), ast::CookedStr) + } + token::LIT_STR_RAW(s, n) => { + LitStr(self.id_to_interned_str(s), ast::RawStr(n)) + } token::LPAREN => { self.expect(&token::RPAREN); LitNil }, _ => { self.unexpected_last(tok); } } @@ -2288,7 +2295,10 @@ impl Parser { ex = match e.node { ExprVec(..) | ExprRepeat(..) => ExprVstore(e, ExprVstoreBox), - ExprLit(lit) if lit_is_str(lit) => ExprVstore(e, ExprVstoreBox), + ExprLit(lit) if lit_is_str(lit) => { + self.obsolete(self.last_span, ObsoleteManagedString); + ExprVstore(e, ExprVstoreBox) + } _ => self.mk_unary(UnBox, e) }; } @@ -3429,7 +3439,9 @@ impl Parser { loop { match self.token { token::LIFETIME(lifetime) => { - if "static" == self.id_to_str(lifetime) { + let lifetime_interned_string = + token::get_ident(lifetime.name); + if lifetime_interned_string.equiv(&("static")) { result.push(RegionTyParamBound); } else { self.span_err(self.span, @@ -3976,8 +3988,9 @@ impl Parser { fields.push(self.parse_struct_decl_field()); } if fields.len() == 0 { + let string = get_ident_interner().get(class_name.name); self.fatal(format!("Unit-like struct definition should be written as `struct {};`", - get_ident_interner().get(class_name.name))); + string.as_slice())); } self.bump(); } else if self.token == token::LPAREN { @@ -4148,11 +4161,11 @@ impl Parser { } fn push_mod_path(&mut self, id: Ident, attrs: &[Attribute]) { - let default_path = token::interner_get(id.name); + let default_path = self.id_to_interned_str(id); let file_path = match ::attr::first_attr_value_str_by_name(attrs, "path") { Some(d) => d, - None => default_path + None => default_path, }; self.mod_path_stack.push(file_path) } @@ -4175,7 +4188,8 @@ impl Parser { outer_attrs, "path") { Some(d) => dir_path.join(d), None => { - let mod_name = token::interner_get(id.name).to_owned(); + let mod_string = token::get_ident(id.name); + let mod_name = mod_string.get().to_owned(); let default_path_str = mod_name + ".rs"; let secondary_path_str = mod_name + "/mod.rs"; let default_path = dir_path.join(default_path_str.as_slice()); @@ -4530,7 +4544,8 @@ impl Parser { token::LIT_STR(s) | token::LIT_STR_RAW(s, _) => { self.bump(); - let the_string = ident_to_str(&s); + let identifier_string = token::get_ident(s.name); + let the_string = identifier_string.get(); let mut abis = AbiSet::empty(); for word in the_string.words() { match abi::lookup(word) { @@ -4866,7 +4881,6 @@ impl Parser { let first_ident = self.parse_ident(); let mut path = ~[first_ident]; - debug!("parsed view path: {}", self.id_to_str(first_ident)); match self.token { token::EQ => { // x = foo::bar @@ -5125,17 +5139,20 @@ impl Parser { } } - pub fn parse_optional_str(&mut self) -> Option<(@str, ast::StrStyle)> { + pub fn parse_optional_str(&mut self) + -> Option<(InternedString, ast::StrStyle)> { let (s, style) = match self.token { - token::LIT_STR(s) => (s, ast::CookedStr), - token::LIT_STR_RAW(s, n) => (s, ast::RawStr(n)), + token::LIT_STR(s) => (self.id_to_interned_str(s), ast::CookedStr), + token::LIT_STR_RAW(s, n) => { + (self.id_to_interned_str(s), ast::RawStr(n)) + } _ => return None }; self.bump(); - Some((ident_to_str(&s), style)) + Some((s, style)) } - pub fn parse_str(&mut self) -> (@str, StrStyle) { + pub fn parse_str(&mut self) -> (InternedString, StrStyle) { match self.parse_optional_str() { Some(s) => { s } _ => self.fatal("expected string literal") diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 68e2f44ebb184..fa53f021cdbdb 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -12,12 +12,15 @@ use ast; use ast::{P, Name, Mrk}; use ast_util; use parse::token; -use util::interner::StrInterner; +use util::interner::{RcStr, StrInterner}; use util::interner; +use extra::serialize::{Decodable, Decoder, Encodable, Encoder}; use std::cast; use std::char; +use std::fmt; use std::local_data; +use std::path::BytesContainer; #[allow(non_camel_case_types)] #[deriving(Clone, Encodable, Decodable, Eq, IterBytes)] @@ -185,32 +188,44 @@ pub fn to_str(input: @IdentInterner, t: &Token) -> ~str { } LIT_INT_UNSUFFIXED(i) => { i.to_str() } LIT_FLOAT(ref s, t) => { - let mut body = ident_to_str(s).to_owned(); + let body_string = get_ident(s.name); + let mut body = body_string.get().to_str(); if body.ends_with(".") { body.push_char('0'); // `10.f` is not a float literal } body + ast_util::float_ty_to_str(t) } LIT_FLOAT_UNSUFFIXED(ref s) => { - let mut body = ident_to_str(s).to_owned(); + let body_string = get_ident(s.name); + let mut body = body_string.get().to_owned(); if body.ends_with(".") { body.push_char('0'); // `10.f` is not a float literal } body } - LIT_STR(ref s) => { format!("\"{}\"", ident_to_str(s).escape_default()) } + LIT_STR(ref s) => { + let literal_string = get_ident(s.name); + format!("\"{}\"", literal_string.get().escape_default()) + } LIT_STR_RAW(ref s, n) => { + let literal_string = get_ident(s.name); format!("r{delim}\"{string}\"{delim}", - delim="#".repeat(n), string=ident_to_str(s)) + delim="#".repeat(n), string=literal_string.get()) } /* Name components */ - IDENT(s, _) => input.get(s.name).to_owned(), - LIFETIME(s) => format!("'{}", input.get(s.name)), + IDENT(s, _) => input.get(s.name).into_owned(), + LIFETIME(s) => { + let name = input.get(s.name); + format!("'{}", name.as_slice()) + } UNDERSCORE => ~"_", /* Other */ - DOC_COMMENT(ref s) => ident_to_str(s).to_owned(), + DOC_COMMENT(ref s) => { + let comment_string = get_ident(s.name); + comment_string.get().to_str() + } EOF => ~"", INTERPOLATED(ref nt) => { match nt { @@ -525,6 +540,101 @@ pub fn get_ident_interner() -> @IdentInterner { } } +/// Represents a string stored in the task-local interner. Because the +/// interner lives for the life of the task, this can be safely treated as an +/// immortal string, as long as it never crosses between tasks. +/// +/// FIXME(pcwalton): You must be careful about what you do in the destructors +/// of objects stored in TLS, because they may run after the interner is +/// destroyed. In particular, they must not access string contents. This can +/// be fixed in the future by just leaking all strings until task death +/// somehow. +#[no_send] +#[deriving(Clone, Eq, IterBytes, Ord, TotalEq, TotalOrd)] +pub struct InternedString { + priv string: RcStr, +} + +#[unsafe_destructor] +impl Drop for InternedString { + fn drop(&mut self) { + // No-op just to make this not implicitly copyable. + } +} + +impl InternedString { + #[inline] + pub fn new(string: &'static str) -> InternedString { + InternedString { + string: RcStr::new(string), + } + } + + #[inline] + fn new_from_rc_str(string: RcStr) -> InternedString { + InternedString { + string: string, + } + } + + #[inline] + pub fn get<'a>(&'a self) -> &'a str { + self.string.as_slice() + } +} + +impl BytesContainer for InternedString { + fn container_as_bytes<'a>(&'a self) -> &'a [u8] { + // FIXME(pcwalton): This is a workaround for the incorrect signature + // of `BytesContainer`, which is itself a workaround for the lack of + // DST. + unsafe { + let this = self.get(); + cast::transmute(this.container_as_bytes()) + } + } +} + +impl fmt::Default for InternedString { + fn fmt(obj: &InternedString, f: &mut fmt::Formatter) { + write!(f.buf, "{}", obj.string.as_slice()); + } +} + +impl<'a> Equiv<&'a str> for InternedString { + fn equiv(&self, other: & &'a str) -> bool { + (*other) == self.string.as_slice() + } +} + +impl Decodable for InternedString { + fn decode(d: &mut D) -> InternedString { + let interner = get_ident_interner(); + get_ident(interner.intern(d.read_str())) + } +} + +impl Encodable for InternedString { + fn encode(&self, e: &mut E) { + e.emit_str(self.string.as_slice()) + } +} + +/// Returns the string contents of an identifier, using the task-local +/// interner. +#[inline] +pub fn get_ident(idx: Name) -> InternedString { + let interner = get_ident_interner(); + InternedString::new_from_rc_str(interner.get(idx)) +} + +/// Interns and returns the string contents of an identifier, using the +/// task-local interner. +#[inline] +pub fn intern_and_get_ident(s: &str) -> InternedString { + get_ident(intern(s)) +} + /* for when we don't care about the contents; doesn't interact with TLD or serialization */ pub fn mk_fake_ident_interner() -> @IdentInterner { @@ -532,6 +642,7 @@ pub fn mk_fake_ident_interner() -> @IdentInterner { } // maps a string to its interned representation +#[inline] pub fn intern(str : &str) -> Name { let interner = get_ident_interner(); interner.intern(str) @@ -543,16 +654,6 @@ pub fn gensym(str : &str) -> Name { interner.gensym(str) } -// map an interned representation back to a string -pub fn interner_get(name : Name) -> @str { - get_ident_interner().get(name) -} - -// maps an identifier to the string that it corresponds to -pub fn ident_to_str(id : &ast::Ident) -> @str { - interner_get(id.name) -} - // maps a string to an identifier with an empty syntax context pub fn str_to_ident(str : &str) -> ast::Ident { ast::Ident::new(intern(str)) @@ -576,28 +677,6 @@ pub fn fresh_name(src : &ast::Ident) -> Name { gensym(format!("{}_{}",ident_to_str(src),num))*/ } -// it looks like there oughta be a str_ptr_eq fn, but no one bothered to implement it? - -// determine whether two @str values are pointer-equal -pub fn str_ptr_eq(a : @str, b : @str) -> bool { - unsafe { - let p : uint = cast::transmute(a); - let q : uint = cast::transmute(b); - let result = p == q; - // got to transmute them back, to make sure the ref count is correct: - let _junk1 : @str = cast::transmute(p); - let _junk2 : @str = cast::transmute(q); - result - } -} - -// return true when two identifiers refer (through the intern table) to the same ptr_eq -// string. This is used to compare identifiers in places where hygienic comparison is -// not wanted (i.e. not lexical vars). -pub fn ident_spelling_eq(a : &ast::Ident, b : &ast::Ident) -> bool { - str_ptr_eq(interner_get(a.name),interner_get(b.name)) -} - // create a fresh mark. pub fn fresh_mark() -> Mrk { gensym("mark") @@ -669,23 +748,4 @@ mod test { let a1 = mark_ident(a,92); assert!(mtwt_token_eq(&IDENT(a,true),&IDENT(a1,false))); } - - - #[test] fn str_ptr_eq_tests(){ - let a = @"abc"; - let b = @"abc"; - let c = a; - assert!(str_ptr_eq(a,c)); - assert!(!str_ptr_eq(a,b)); - } - - #[test] fn fresh_name_pointer_sharing() { - let ghi = str_to_ident("ghi"); - assert_eq!(ident_to_str(&ghi),@"ghi"); - assert!(str_ptr_eq(ident_to_str(&ghi),ident_to_str(&ghi))) - let fresh = ast::Ident::new(fresh_name(&ghi)); - assert_eq!(ident_to_str(&fresh),@"ghi"); - assert!(str_ptr_eq(ident_to_str(&ghi),ident_to_str(&fresh))); - } - } diff --git a/src/libsyntax/print/pp.rs b/src/libsyntax/print/pp.rs index 902d9e1c28468..3e1f5b4cfb353 100644 --- a/src/libsyntax/print/pp.rs +++ b/src/libsyntax/print/pp.rs @@ -84,7 +84,7 @@ pub struct BeginToken { #[deriving(Clone)] pub enum Token { - String(@str, int), + String(~str, int), Break(BreakToken), Begin(BeginToken), End, @@ -131,7 +131,7 @@ pub fn buf_str(toks: ~[Token], szs: ~[int], left: uint, right: uint, if i != left { s.push_str(", "); } - s.push_str(format!("{}={}", szs[i], tok_str(toks[i]))); + s.push_str(format!("{}={}", szs[i], tok_str(toks[i].clone()))); i += 1u; i %= n; } @@ -285,7 +285,9 @@ pub struct Printer { } impl Printer { - pub fn last_token(&mut self) -> Token { self.token[self.right] } + pub fn last_token(&mut self) -> Token { + self.token[self.right].clone() + } // be very careful with this! pub fn replace_last_token(&mut self, t: Token) { self.token[self.right] = t; @@ -296,8 +298,8 @@ impl Printer { Eof => { if !self.scan_stack_empty { self.check_stack(0); - self.advance_left(self.token[self.left], - self.size[self.left]); + let left = self.token[self.left].clone(); + self.advance_left(left, self.size[self.left]); } self.indent(0); } @@ -341,16 +343,16 @@ impl Printer { self.size[self.right] = -self.right_total; self.right_total += b.blank_space; } - String(s, len) => { + String(ref s, len) => { if self.scan_stack_empty { debug!("pp String('{}')/print ~[{},{}]", - s, self.left, self.right); - self.print(t, len); + *s, self.left, self.right); + self.print(t.clone(), len); } else { debug!("pp String('{}')/buffer ~[{},{}]", - s, self.left, self.right); + *s, self.left, self.right); self.advance_right(); - self.token[self.right] = t; + self.token[self.right] = t.clone(); self.size[self.right] = len; self.right_total += len; self.check_stream(); @@ -370,7 +372,8 @@ impl Printer { self.size[self.scan_pop_bottom()] = SIZE_INFINITY; } } - self.advance_left(self.token[self.left], self.size[self.left]); + let left = self.token[self.left].clone(); + self.advance_left(left, self.size[self.left]); if self.left != self.right { self.check_stream(); } } } @@ -414,7 +417,7 @@ impl Printer { debug!("advnce_left ~[{},{}], sizeof({})={}", self.left, self.right, self.left, L); if L >= 0 { - self.print(x, L); + self.print(x.clone(), L); match x { Break(b) => self.left_total += b.blank_space, String(_, len) => { @@ -425,8 +428,8 @@ impl Printer { if self.left != self.right { self.left += 1u; self.left %= self.buf_len; - self.advance_left(self.token[self.left], - self.size[self.left]); + let left = self.token[self.left].clone(); + self.advance_left(left, self.size[self.left]); } } } @@ -483,7 +486,7 @@ impl Printer { write!(self.out, "{}", s); } pub fn print(&mut self, x: Token, L: int) { - debug!("print {} {} (remaining line space={})", tok_str(x), L, + debug!("print {} {} (remaining line space={})", tok_str(x.clone()), L, self.space); debug!("{}", buf_str(self.token.clone(), self.size.clone(), @@ -583,15 +586,15 @@ pub fn end(p: &mut Printer) { p.pretty_print(End); } pub fn eof(p: &mut Printer) { p.pretty_print(Eof); } pub fn word(p: &mut Printer, wrd: &str) { - p.pretty_print(String(/* bad */ wrd.to_managed(), wrd.len() as int)); + p.pretty_print(String(/* bad */ wrd.to_str(), wrd.len() as int)); } pub fn huge_word(p: &mut Printer, wrd: &str) { - p.pretty_print(String(/* bad */ wrd.to_managed(), SIZE_INFINITY)); + p.pretty_print(String(/* bad */ wrd.to_str(), SIZE_INFINITY)); } pub fn zero_word(p: &mut Printer, wrd: &str) { - p.pretty_print(String(/* bad */ wrd.to_managed(), 0)); + p.pretty_print(String(/* bad */ wrd.to_str(), 0)); } pub fn spaces(p: &mut Printer, n: uint) { break_offset(p, n, 0); } diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 2783284ea8b05..2e20560b9cac3 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -19,7 +19,7 @@ use codemap::{CodeMap, BytePos}; use codemap; use diagnostic; use parse::classify::expr_is_simple_block; -use parse::token::{IdentInterner, ident_to_str, interner_get}; +use parse::token::IdentInterner; use parse::{comments, token}; use parse; use print::pp::{break_offset, word, space, zerobreak, hardbreak}; @@ -117,7 +117,7 @@ pub fn print_crate(cm: @CodeMap, intr: @IdentInterner, span_diagnostic: @diagnostic::SpanHandler, crate: &ast::Crate, - filename: @str, + filename: ~str, input: &mut io::Reader, out: ~io::Writer, ann: @PpAnn, @@ -897,7 +897,7 @@ pub fn print_attribute(s: &mut State, attr: &ast::Attribute) { maybe_print_comment(s, attr.span.lo); if attr.node.is_sugared_doc { let comment = attr.value_str().unwrap(); - word(&mut s.s, comment); + word(&mut s.s, comment.get()); } else { word(&mut s.s, "#["); print_meta_item(s, attr.meta()); @@ -1466,25 +1466,25 @@ pub fn print_expr(s: &mut State, expr: &ast::Expr) { word(&mut s.s, "asm!"); } popen(s); - print_string(s, a.asm, a.asm_str_style); + print_string(s, a.asm.get(), a.asm_str_style); word_space(s, ":"); - for &(co, o) in a.outputs.iter() { - print_string(s, co, ast::CookedStr); + for &(ref co, o) in a.outputs.iter() { + print_string(s, co.get(), ast::CookedStr); popen(s); print_expr(s, o); pclose(s); word_space(s, ","); } word_space(s, ":"); - for &(co, o) in a.inputs.iter() { - print_string(s, co, ast::CookedStr); + for &(ref co, o) in a.inputs.iter() { + print_string(s, co.get(), ast::CookedStr); popen(s); print_expr(s, o); pclose(s); word_space(s, ","); } word_space(s, ":"); - print_string(s, a.clobbers, ast::CookedStr); + print_string(s, a.clobbers.get(), ast::CookedStr); pclose(s); } ast::ExprMac(ref m) => print_mac(s, m), @@ -1539,11 +1539,13 @@ pub fn print_decl(s: &mut State, decl: &ast::Decl) { } pub fn print_ident(s: &mut State, ident: ast::Ident) { - word(&mut s.s, ident_to_str(&ident)); + let string = token::get_ident(ident.name); + word(&mut s.s, string.get()); } pub fn print_name(s: &mut State, name: ast::Name) { - word(&mut s.s, interner_get(name)); + let string = token::get_ident(name); + word(&mut s.s, string.get()); } pub fn print_for_decl(s: &mut State, loc: &ast::Local, coll: &ast::Expr) { @@ -1930,14 +1932,14 @@ pub fn print_generics(s: &mut State, generics: &ast::Generics) { pub fn print_meta_item(s: &mut State, item: &ast::MetaItem) { ibox(s, indent_unit); match item.node { - ast::MetaWord(name) => word(&mut s.s, name), - ast::MetaNameValue(name, value) => { - word_space(s, name); + ast::MetaWord(ref name) => word(&mut s.s, name.get()), + ast::MetaNameValue(ref name, ref value) => { + word_space(s, name.get()); word_space(s, "="); - print_literal(s, &value); + print_literal(s, value); } - ast::MetaList(name, ref items) => { - word(&mut s.s, name); + ast::MetaList(ref name, ref items) => { + word(&mut s.s, name.get()); popen(s); commasep(s, Consistent, @@ -1998,7 +2000,7 @@ pub fn print_view_item(s: &mut State, item: &ast::ViewItem) { space(&mut s.s); word(&mut s.s, "="); space(&mut s.s); - print_string(s, *p, style); + print_string(s, p.get(), style); } } @@ -2172,7 +2174,7 @@ pub fn print_literal(s: &mut State, lit: &ast::Lit) { _ => () } match lit.node { - ast::LitStr(st, style) => print_string(s, st, style), + ast::LitStr(ref st, style) => print_string(s, st.get(), style), ast::LitChar(ch) => { let mut res = ~"'"; char::from_u32(ch).unwrap().escape_default(|c| res.push_char(c)); @@ -2202,10 +2204,10 @@ pub fn print_literal(s: &mut State, lit: &ast::Lit) { word(&mut s.s, (i as u64).to_str_radix(10u)); } } - ast::LitFloat(f, t) => { - word(&mut s.s, f.to_owned() + ast_util::float_ty_to_str(t)); + ast::LitFloat(ref f, t) => { + word(&mut s.s, f.get() + ast_util::float_ty_to_str(t)); } - ast::LitFloatUnsuffixed(f) => word(&mut s.s, f), + ast::LitFloatUnsuffixed(ref f) => word(&mut s.s, f.get()), ast::LitNil => word(&mut s.s, "()"), ast::LitBool(val) => { if val { word(&mut s.s, "true"); } else { word(&mut s.s, "false"); } diff --git a/src/libsyntax/util/interner.rs b/src/libsyntax/util/interner.rs index fdc54f1f140a6..fc3e55dcde256 100644 --- a/src/libsyntax/util/interner.rs +++ b/src/libsyntax/util/interner.rs @@ -14,9 +14,11 @@ use ast::Name; +use std::cast; use std::cell::RefCell; use std::cmp::Equiv; use std::hashmap::HashMap; +use std::rc::Rc; pub struct Interner { priv map: @RefCell>, @@ -82,11 +84,49 @@ impl Interner { } } +#[deriving(Clone, Eq, IterBytes, Ord)] +pub struct RcStr { + priv string: Rc<~str>, +} + +impl TotalEq for RcStr { + fn equals(&self, other: &RcStr) -> bool { + self.as_slice().equals(&other.as_slice()) + } +} + +impl TotalOrd for RcStr { + fn cmp(&self, other: &RcStr) -> Ordering { + self.as_slice().cmp(&other.as_slice()) + } +} + +impl Str for RcStr { + #[inline] + fn as_slice<'a>(&'a self) -> &'a str { + let s: &'a str = *self.string.borrow(); + s + } + + #[inline] + fn into_owned(self) -> ~str { + self.string.borrow().to_owned() + } +} + +impl RcStr { + pub fn new(string: &str) -> RcStr { + RcStr { + string: Rc::new(string.to_owned()), + } + } +} + // A StrInterner differs from Interner in that it accepts // references rather than @ ones, resulting in less allocation. pub struct StrInterner { - priv map: @RefCell>, - priv vect: @RefCell<~[@str]>, + priv map: @RefCell>, + priv vect: @RefCell<~[RcStr]>, } // when traits can extend traits, we should extend index to get [] @@ -112,8 +152,8 @@ impl StrInterner { } let new_idx = self.len() as Name; - let val = val.to_managed(); - map.get().insert(val, new_idx); + let val = RcStr::new(val); + map.get().insert(val.clone(), new_idx); let mut vect = self.vect.borrow_mut(); vect.get().push(val); new_idx @@ -123,7 +163,7 @@ impl StrInterner { let new_idx = self.len() as Name; // leave out of .map to avoid colliding let mut vect = self.vect.borrow_mut(); - vect.get().push(val.to_managed()); + vect.get().push(RcStr::new(val)); new_idx } @@ -141,14 +181,24 @@ impl StrInterner { let new_idx = self.len() as Name; // leave out of map to avoid colliding let mut vect = self.vect.borrow_mut(); - let existing = vect.get()[idx]; + let existing = vect.get()[idx].clone(); vect.get().push(existing); new_idx } - pub fn get(&self, idx: Name) -> @str { + pub fn get(&self, idx: Name) -> RcStr { let vect = self.vect.borrow(); - vect.get()[idx] + vect.get()[idx].clone() + } + + /// Returns this string with lifetime tied to the interner. Since + /// strings may never be removed from the interner, this is safe. + pub fn get_ref<'a>(&'a self, idx: Name) -> &'a str { + let vect = self.vect.borrow(); + let s: &str = vect.get()[idx].as_slice(); + unsafe { + cast::transmute(s) + } } pub fn len(&self) -> uint { @@ -156,7 +206,7 @@ impl StrInterner { vect.get().len() } - pub fn find_equiv>(&self, val: &Q) + pub fn find_equiv>(&self, val: &Q) -> Option { let map = self.map.borrow(); match map.get().find_equiv(val) { @@ -172,42 +222,46 @@ mod tests { #[test] #[should_fail] fn i1 () { - let i : Interner<@str> = Interner::new(); + let i : Interner = Interner::new(); i.get(13); } #[test] fn interner_tests () { - let i : Interner<@str> = Interner::new(); + let i : Interner = Interner::new(); // first one is zero: - assert_eq!(i.intern(@"dog"), 0); + assert_eq!(i.intern(RcStr::new("dog")), 0); // re-use gets the same entry: - assert_eq!(i.intern(@"dog"), 0); + assert_eq!(i.intern(RcStr::new("dog")), 0); // different string gets a different #: - assert_eq!(i.intern(@"cat"), 1); - assert_eq!(i.intern(@"cat"), 1); + assert_eq!(i.intern(RcStr::new("cat")), 1); + assert_eq!(i.intern(RcStr::new("cat")), 1); // dog is still at zero - assert_eq!(i.intern(@"dog"), 0); + assert_eq!(i.intern(RcStr::new("dog")), 0); // gensym gets 3 - assert_eq!(i.gensym(@"zebra" ), 2); + assert_eq!(i.gensym(RcStr::new("zebra") ), 2); // gensym of same string gets new number : - assert_eq!(i.gensym (@"zebra" ), 3); + assert_eq!(i.gensym (RcStr::new("zebra") ), 3); // gensym of *existing* string gets new number: - assert_eq!(i.gensym(@"dog"), 4); - assert_eq!(i.get(0), @"dog"); - assert_eq!(i.get(1), @"cat"); - assert_eq!(i.get(2), @"zebra"); - assert_eq!(i.get(3), @"zebra"); - assert_eq!(i.get(4), @"dog"); + assert_eq!(i.gensym(RcStr::new("dog")), 4); + assert_eq!(i.get(0), RcStr::new("dog")); + assert_eq!(i.get(1), RcStr::new("cat")); + assert_eq!(i.get(2), RcStr::new("zebra")); + assert_eq!(i.get(3), RcStr::new("zebra")); + assert_eq!(i.get(4), RcStr::new("dog")); } #[test] fn i3 () { - let i : Interner<@str> = Interner::prefill([@"Alan",@"Bob",@"Carol"]); - assert_eq!(i.get(0), @"Alan"); - assert_eq!(i.get(1), @"Bob"); - assert_eq!(i.get(2), @"Carol"); - assert_eq!(i.intern(@"Bob"), 1); + let i : Interner = Interner::prefill([ + RcStr::new("Alan"), + RcStr::new("Bob"), + RcStr::new("Carol") + ]); + assert_eq!(i.get(0), RcStr::new("Alan")); + assert_eq!(i.get(1), RcStr::new("Bob")); + assert_eq!(i.get(2), RcStr::new("Carol")); + assert_eq!(i.intern(RcStr::new("Bob")), 1); } #[test] @@ -230,13 +284,13 @@ mod tests { assert_eq!(i.gensym("dog"), 4); // gensym tests again with gensym_copy: assert_eq!(i.gensym_copy(2), 5); - assert_eq!(i.get(5), @"zebra"); + assert_eq!(i.get(5), RcStr::new("zebra")); assert_eq!(i.gensym_copy(2), 6); - assert_eq!(i.get(6), @"zebra"); - assert_eq!(i.get(0), @"dog"); - assert_eq!(i.get(1), @"cat"); - assert_eq!(i.get(2), @"zebra"); - assert_eq!(i.get(3), @"zebra"); - assert_eq!(i.get(4), @"dog"); + assert_eq!(i.get(6), RcStr::new("zebra")); + assert_eq!(i.get(0), RcStr::new("dog")); + assert_eq!(i.get(1), RcStr::new("cat")); + assert_eq!(i.get(2), RcStr::new("zebra")); + assert_eq!(i.get(3), RcStr::new("zebra")); + assert_eq!(i.get(4), RcStr::new("dog")); } } diff --git a/src/libsyntax/util/parser_testing.rs b/src/libsyntax/util/parser_testing.rs index dd3ae168149eb..58c2bed7a45a7 100644 --- a/src/libsyntax/util/parser_testing.rs +++ b/src/libsyntax/util/parser_testing.rs @@ -17,29 +17,29 @@ use parse::token; // map a string to tts, using a made-up filename: return both the TokenTree's // and the ParseSess -pub fn string_to_tts_and_sess (source_str : @str) -> (~[ast::TokenTree], @ParseSess) { +pub fn string_to_tts_and_sess (source_str : ~str) -> (~[ast::TokenTree], @ParseSess) { let ps = new_parse_sess(None); - (filemap_to_tts(ps,string_to_filemap(ps,source_str,@"bogofile")),ps) + (filemap_to_tts(ps,string_to_filemap(ps,source_str,~"bogofile")),ps) } // map a string to tts, using a made-up filename: -pub fn string_to_tts(source_str : @str) -> ~[ast::TokenTree] { +pub fn string_to_tts(source_str : ~str) -> ~[ast::TokenTree] { let (tts,_) = string_to_tts_and_sess(source_str); tts } -pub fn string_to_parser_and_sess(source_str: @str) -> (Parser,@ParseSess) { +pub fn string_to_parser_and_sess(source_str: ~str) -> (Parser,@ParseSess) { let ps = new_parse_sess(None); - (new_parser_from_source_str(ps,~[],@"bogofile",source_str),ps) + (new_parser_from_source_str(ps,~[],~"bogofile",source_str),ps) } // map string to parser (via tts) -pub fn string_to_parser(source_str: @str) -> Parser { +pub fn string_to_parser(source_str: ~str) -> Parser { let (p,_) = string_to_parser_and_sess(source_str); p } -fn with_error_checking_parse(s: @str, f: |&mut Parser| -> T) -> T { +fn with_error_checking_parse(s: ~str, f: |&mut Parser| -> T) -> T { let mut p = string_to_parser(s); let x = f(&mut p); p.abort_if_errors(); @@ -47,34 +47,34 @@ fn with_error_checking_parse(s: @str, f: |&mut Parser| -> T) -> T { } // parse a string, return a crate. -pub fn string_to_crate (source_str : @str) -> ast::Crate { +pub fn string_to_crate (source_str : ~str) -> ast::Crate { with_error_checking_parse(source_str, |p| { p.parse_crate_mod() }) } // parse a string, return a crate and the ParseSess -pub fn string_to_crate_and_sess (source_str : @str) -> (ast::Crate,@ParseSess) { +pub fn string_to_crate_and_sess (source_str : ~str) -> (ast::Crate,@ParseSess) { let (mut p,ps) = string_to_parser_and_sess(source_str); (p.parse_crate_mod(),ps) } // parse a string, return an expr -pub fn string_to_expr (source_str : @str) -> @ast::Expr { +pub fn string_to_expr (source_str : ~str) -> @ast::Expr { with_error_checking_parse(source_str, |p| { p.parse_expr() }) } // parse a string, return an item -pub fn string_to_item (source_str : @str) -> Option<@ast::Item> { +pub fn string_to_item (source_str : ~str) -> Option<@ast::Item> { with_error_checking_parse(source_str, |p| { p.parse_item(~[]) }) } // parse a string, return a stmt -pub fn string_to_stmt(source_str : @str) -> @ast::Stmt { +pub fn string_to_stmt(source_str : ~str) -> @ast::Stmt { with_error_checking_parse(source_str, |p| { p.parse_stmt(~[]) }) @@ -82,7 +82,7 @@ pub fn string_to_stmt(source_str : @str) -> @ast::Stmt { // parse a string, return a pat. Uses "irrefutable"... which doesn't // (currently) affect parsing. -pub fn string_to_pat(source_str : @str) -> @ast::Pat { +pub fn string_to_pat(source_str : ~str) -> @ast::Pat { string_to_parser(source_str).parse_pat() } diff --git a/src/test/compile-fail/estr-subtyping.rs b/src/test/compile-fail/estr-subtyping.rs index 7dc99074f72f1..d99d29fb81080 100644 --- a/src/test/compile-fail/estr-subtyping.rs +++ b/src/test/compile-fail/estr-subtyping.rs @@ -8,26 +8,15 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#[feature(managed_boxes)]; - -fn wants_box(x: @str) { } fn wants_uniq(x: ~str) { } fn wants_slice(x: &str) { } -fn has_box(x: @str) { - wants_box(x); - wants_uniq(x); //~ ERROR str storage differs: expected `~` but found `@` - wants_slice(x); -} - fn has_uniq(x: ~str) { - wants_box(x); //~ ERROR str storage differs: expected `@` but found `~` wants_uniq(x); wants_slice(x); } fn has_slice(x: &str) { - wants_box(x); //~ ERROR str storage differs: expected `@` but found `&` wants_uniq(x); //~ ERROR str storage differs: expected `~` but found `&` wants_slice(x); } diff --git a/src/test/compile-fail/lint-heap-memory.rs b/src/test/compile-fail/lint-heap-memory.rs index c02da1beeb732..5e8c83558fb9e 100644 --- a/src/test/compile-fail/lint-heap-memory.rs +++ b/src/test/compile-fail/lint-heap-memory.rs @@ -25,8 +25,6 @@ fn main() { @[1]; //~ ERROR type uses managed //~^ ERROR type uses managed fn f(_: @Clone) {} //~ ERROR type uses managed - @""; //~ ERROR type uses managed - //~^ ERROR type uses managed ~2; //~ ERROR type uses owned ~[1]; //~ ERROR type uses owned diff --git a/src/test/run-pass/auto-ref-slice-plus-ref.rs b/src/test/run-pass/auto-ref-slice-plus-ref.rs index eae791b6b08a6..7bcf98c3f41bc 100644 --- a/src/test/run-pass/auto-ref-slice-plus-ref.rs +++ b/src/test/run-pass/auto-ref-slice-plus-ref.rs @@ -34,7 +34,6 @@ pub fn main() { (&[1]).test_imm(); ("test").test_imm(); (~"test").test_imm(); - (@"test").test_imm(); (&"test").test_imm(); // FIXME: Other types of mutable vecs don't currently exist diff --git a/src/test/run-pass/borrowed-ptr-pattern-infallible.rs b/src/test/run-pass/borrowed-ptr-pattern-infallible.rs index 77484b8da4a7d..07a13e5395fc3 100644 --- a/src/test/run-pass/borrowed-ptr-pattern-infallible.rs +++ b/src/test/run-pass/borrowed-ptr-pattern-infallible.rs @@ -11,8 +11,7 @@ #[feature(managed_boxes)]; pub fn main() { - let (&x, &y, &z) = (&3, &'a', &@"No pets!"); + let (&x, &y) = (&3, &'a'); assert_eq!(x, 3); assert_eq!(y, 'a'); - assert_eq!(z, @"No pets!"); } diff --git a/src/test/run-pass/borrowed-ptr-pattern.rs b/src/test/run-pass/borrowed-ptr-pattern.rs index 11751ed6ade3b..7ccb40c8e7b37 100644 --- a/src/test/run-pass/borrowed-ptr-pattern.rs +++ b/src/test/run-pass/borrowed-ptr-pattern.rs @@ -17,5 +17,4 @@ fn foo(x: &T) -> T{ pub fn main() { assert_eq!(foo(&3), 3); assert_eq!(foo(&'a'), 'a'); - assert_eq!(foo(&@"Dogs rule, cats drool"), @"Dogs rule, cats drool"); } diff --git a/src/test/run-pass/estr-shared.rs b/src/test/run-pass/estr-shared.rs deleted file mode 100644 index 73837a46df707..0000000000000 --- a/src/test/run-pass/estr-shared.rs +++ /dev/null @@ -1,15 +0,0 @@ -// Copyright 2012 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -#[feature(managed_boxes)]; - -pub fn main() { - let _x : @str = @"hello"; -} diff --git a/src/test/run-pass/ifmt.rs b/src/test/run-pass/ifmt.rs index 610cba1eb1fa6..cc59ce5d8b245 100644 --- a/src/test/run-pass/ifmt.rs +++ b/src/test/run-pass/ifmt.rs @@ -58,7 +58,6 @@ pub fn main() { t!(format!("{}", 1.0f64), "1"); t!(format!("{}", "a"), "a"); t!(format!("{}", ~"a"), "a"); - t!(format!("{}", @"a"), "a"); t!(format!("{}", false), "false"); t!(format!("{}", 'a'), "a"); @@ -73,7 +72,6 @@ pub fn main() { t!(format!("{:X}", 10u), "A"); t!(format!("{:s}", "foo"), "foo"); t!(format!("{:s}", ~"foo"), "foo"); - t!(format!("{:s}", @"foo"), "foo"); t!(format!("{:p}", 0x1234 as *int), "0x1234"); t!(format!("{:p}", 0x1234 as *mut int), "0x1234"); t!(format!("{:d}", A), "aloha"); diff --git a/src/test/run-pass/issue-3574.rs b/src/test/run-pass/issue-3574.rs index eb59b3e12b610..ace27c5ea11f6 100644 --- a/src/test/run-pass/issue-3574.rs +++ b/src/test/run-pass/issue-3574.rs @@ -26,5 +26,4 @@ pub fn main() { assert!(compare("foo", "foo")); assert!(compare(~"foo", ~"foo")); - assert!(compare(@"foo", @"foo")); } diff --git a/src/test/run-pass/issue-4092.rs b/src/test/run-pass/issue-4092.rs deleted file mode 100644 index 62174a70d07fe..0000000000000 --- a/src/test/run-pass/issue-4092.rs +++ /dev/null @@ -1,18 +0,0 @@ -// Copyright 2012 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -#[feature(managed_boxes)]; - -use std::hashmap::HashMap; - -pub fn main() { - let mut x = HashMap::new(); - x.insert((@"abc", 0), 0); -} diff --git a/src/test/run-pass/match-borrowed_str.rs b/src/test/run-pass/match-borrowed_str.rs index acff2de548eed..b0f31f70f53c0 100644 --- a/src/test/run-pass/match-borrowed_str.rs +++ b/src/test/run-pass/match-borrowed_str.rs @@ -43,19 +43,15 @@ fn g2(ref_1: &str, ref_2: &str) -> ~str { } pub fn main() { - assert_eq!(f1(@"a"), ~"found a"); assert_eq!(f1(~"b"), ~"found b"); assert_eq!(f1(&"c"), ~"not found"); assert_eq!(f1("d"), ~"not found"); - assert_eq!(f2(@"a"), ~"found a"); assert_eq!(f2(~"b"), ~"found b"); assert_eq!(f2(&"c"), ~"not found (c)"); assert_eq!(f2("d"), ~"not found (d)"); - assert_eq!(g1(@"a", @"b"), ~"found a,b"); assert_eq!(g1(~"b", ~"c"), ~"found b,c"); assert_eq!(g1(&"c", &"d"), ~"not found"); assert_eq!(g1("d", "e"), ~"not found"); - assert_eq!(g2(@"a", @"b"), ~"found a,b"); assert_eq!(g2(~"b", ~"c"), ~"found b,c"); assert_eq!(g2(&"c", &"d"), ~"not found (c, d)"); assert_eq!(g2("d", "e"), ~"not found (d, e)"); diff --git a/src/test/run-pass/nullable-pointer-iotareduction.rs b/src/test/run-pass/nullable-pointer-iotareduction.rs index acb7fe12360e1..32803283bf940 100644 --- a/src/test/run-pass/nullable-pointer-iotareduction.rs +++ b/src/test/run-pass/nullable-pointer-iotareduction.rs @@ -79,7 +79,6 @@ pub fn main() { check_type!(~18: ~int); check_type!(@19: @int); check_type!(~"foo": ~str); - check_type!(@"bar": @str); check_type!(~[20, 22]: ~[int]); check_type!(@[]: @[int]); check_type!(@[24, 26]: @[int]); diff --git a/src/test/run-pass/nullable-pointer-size.rs b/src/test/run-pass/nullable-pointer-size.rs index 228b91a4532cd..5117d0849fb97 100644 --- a/src/test/run-pass/nullable-pointer-size.rs +++ b/src/test/run-pass/nullable-pointer-size.rs @@ -41,7 +41,6 @@ pub fn main() { check_type!(~int); check_type!(@int); check_type!(~str); - check_type!(@str); check_type!(~[int]); check_type!(@[int]); check_type!(extern fn()); diff --git a/src/test/run-pass/reflect-visit-data.rs b/src/test/run-pass/reflect-visit-data.rs index 6dec5fdaa1cba..38000a43518b4 100644 --- a/src/test/run-pass/reflect-visit-data.rs +++ b/src/test/run-pass/reflect-visit-data.rs @@ -180,9 +180,6 @@ impl TyVisitor for ptr_visit_adaptor { } fn visit_estr_box(&mut self) -> bool { - self.align_to::<@str>(); - if ! self.inner().visit_estr_box() { return false; } - self.bump_past::<@str>(); true } diff --git a/src/test/run-pass/send_str_hashmap.rs b/src/test/run-pass/send_str_hashmap.rs index 1e3bd5897a9d5..dc7e51c3c2364 100644 --- a/src/test/run-pass/send_str_hashmap.rs +++ b/src/test/run-pass/send_str_hashmap.rs @@ -63,11 +63,6 @@ pub fn main() { assert_eq!(map.find_equiv(&(~"cde")), Some(&c)); assert_eq!(map.find_equiv(&(~"def")), Some(&d)); - assert_eq!(map.find_equiv(&(@"abc")), Some(&a)); - assert_eq!(map.find_equiv(&(@"bcd")), Some(&b)); - assert_eq!(map.find_equiv(&(@"cde")), Some(&c)); - assert_eq!(map.find_equiv(&(@"def")), Some(&d)); - assert_eq!(map.find_equiv(&SendStrStatic("abc")), Some(&a)); assert_eq!(map.find_equiv(&SendStrStatic("bcd")), Some(&b)); assert_eq!(map.find_equiv(&SendStrStatic("cde")), Some(&c));