1
Fork 0

libsyntax: Remove uses of ~str from libsyntax, and fix fallout

This commit is contained in:
Patrick Walton 2014-05-07 16:33:43 -07:00
parent e454851813
commit 7f8f3dcf17
50 changed files with 773 additions and 629 deletions

View file

@ -536,7 +536,7 @@ pub fn crate_id_hash(crate_id: &CrateId) -> ~str {
// the crate id in the hash because lookups are only done by (name/vers), // the crate id in the hash because lookups are only done by (name/vers),
// not by path. // not by path.
let mut s = Sha256::new(); let mut s = Sha256::new();
s.input_str(crate_id.short_name_with_version()); s.input_str(crate_id.short_name_with_version().as_slice());
truncated_hash_result(&mut s).slice_to(8).to_owned() truncated_hash_result(&mut s).slice_to(8).to_owned()
} }
@ -566,7 +566,7 @@ fn symbol_hash(tcx: &ty::ctxt,
// to be independent of one another in the crate. // to be independent of one another in the crate.
symbol_hasher.reset(); symbol_hasher.reset();
symbol_hasher.input_str(link_meta.crateid.name); symbol_hasher.input_str(link_meta.crateid.name.as_slice());
symbol_hasher.input_str("-"); symbol_hasher.input_str("-");
symbol_hasher.input_str(link_meta.crate_hash.as_str()); symbol_hasher.input_str(link_meta.crate_hash.as_str());
symbol_hasher.input_str("-"); symbol_hasher.input_str("-");

View file

@ -143,8 +143,8 @@ pub fn build_configuration(sess: &Session) -> ast::CrateConfig {
fn parse_cfgspecs(cfgspecs: Vec<~str> ) fn parse_cfgspecs(cfgspecs: Vec<~str> )
-> ast::CrateConfig { -> ast::CrateConfig {
cfgspecs.move_iter().map(|s| { cfgspecs.move_iter().map(|s| {
parse::parse_meta_from_source_str("cfgspec".to_str(), parse::parse_meta_from_source_str("cfgspec".to_strbuf(),
s, s.to_strbuf(),
Vec::new(), Vec::new(),
&parse::new_parse_sess()) &parse::new_parse_sess())
}).collect::<ast::CrateConfig>() }).collect::<ast::CrateConfig>()
@ -175,8 +175,8 @@ pub fn phase_1_parse_input(sess: &Session, cfg: ast::CrateConfig, input: &Input)
parse::parse_crate_from_file(&(*file), cfg.clone(), &sess.parse_sess) parse::parse_crate_from_file(&(*file), cfg.clone(), &sess.parse_sess)
} }
StrInput(ref src) => { StrInput(ref src) => {
parse::parse_crate_from_source_str(anon_src(), parse::parse_crate_from_source_str(anon_src().to_strbuf(),
(*src).clone(), src.to_strbuf(),
cfg.clone(), cfg.clone(),
&sess.parse_sess) &sess.parse_sess)
} }
@ -528,7 +528,7 @@ fn write_out_deps(sess: &Session,
// write Makefile-compatible dependency rules // write Makefile-compatible dependency rules
let files: Vec<~str> = sess.codemap().files.borrow() let files: Vec<~str> = sess.codemap().files.borrow()
.iter().filter(|fmap| fmap.is_real_file()) .iter().filter(|fmap| fmap.is_real_file())
.map(|fmap| fmap.name.clone()) .map(|fmap| fmap.name.to_owned())
.collect(); .collect();
let mut file = try!(io::File::create(&deps_filename)); let mut file = try!(io::File::create(&deps_filename));
for path in out_filenames.iter() { for path in out_filenames.iter() {
@ -604,20 +604,20 @@ impl pprust::PpAnn for IdentifiedAnnotation {
match node { match node {
pprust::NodeItem(item) => { pprust::NodeItem(item) => {
try!(pp::space(&mut s.s)); try!(pp::space(&mut s.s));
s.synth_comment(item.id.to_str()) s.synth_comment(item.id.to_str().to_strbuf())
} }
pprust::NodeBlock(blk) => { pprust::NodeBlock(blk) => {
try!(pp::space(&mut s.s)); try!(pp::space(&mut s.s));
s.synth_comment("block ".to_owned() + blk.id.to_str()) s.synth_comment((format!("block {}", blk.id)).to_strbuf())
} }
pprust::NodeExpr(expr) => { pprust::NodeExpr(expr) => {
try!(pp::space(&mut s.s)); try!(pp::space(&mut s.s));
try!(s.synth_comment(expr.id.to_str())); try!(s.synth_comment(expr.id.to_str().to_strbuf()));
s.pclose() s.pclose()
} }
pprust::NodePat(pat) => { pprust::NodePat(pat) => {
try!(pp::space(&mut s.s)); try!(pp::space(&mut s.s));
s.synth_comment("pat ".to_owned() + pat.id.to_str()) s.synth_comment((format!("pat {}", pat.id)).to_strbuf())
} }
} }
} }
@ -692,7 +692,7 @@ pub fn pretty_print_input(sess: Session,
pprust::print_crate(sess.codemap(), pprust::print_crate(sess.codemap(),
sess.diagnostic(), sess.diagnostic(),
&krate, &krate,
src_name, src_name.to_strbuf(),
&mut rdr, &mut rdr,
out, out,
&IdentifiedAnnotation, &IdentifiedAnnotation,
@ -707,7 +707,7 @@ pub fn pretty_print_input(sess: Session,
pprust::print_crate(annotation.analysis.ty_cx.sess.codemap(), pprust::print_crate(annotation.analysis.ty_cx.sess.codemap(),
annotation.analysis.ty_cx.sess.diagnostic(), annotation.analysis.ty_cx.sess.diagnostic(),
&krate, &krate,
src_name, src_name.to_strbuf(),
&mut rdr, &mut rdr,
out, out,
&annotation, &annotation,
@ -717,7 +717,7 @@ pub fn pretty_print_input(sess: Session,
pprust::print_crate(sess.codemap(), pprust::print_crate(sess.codemap(),
sess.diagnostic(), sess.diagnostic(),
&krate, &krate,
src_name, src_name.to_strbuf(),
&mut rdr, &mut rdr,
out, out,
&pprust::NoAnn, &pprust::NoAnn,

View file

@ -471,7 +471,8 @@ cgoptions!(
) )
// Seems out of place, but it uses session, so I'm putting it here // Seems out of place, but it uses session, so I'm putting it here
pub fn expect<T:Clone>(sess: &Session, opt: Option<T>, msg: || -> ~str) -> T { pub fn expect<T:Clone>(sess: &Session, opt: Option<T>, msg: || -> StrBuf)
-> T {
diagnostic::expect(sess.diagnostic(), opt, msg) diagnostic::expect(sess.diagnostic(), opt, msg)
} }

View file

@ -168,7 +168,7 @@ fn generate_test_harness(sess: &Session, krate: ast::Crate)
cx.ext_cx.bt_push(ExpnInfo { cx.ext_cx.bt_push(ExpnInfo {
call_site: DUMMY_SP, call_site: DUMMY_SP,
callee: NameAndSpan { callee: NameAndSpan {
name: "test".to_owned(), name: "test".to_strbuf(),
format: MacroAttribute, format: MacroAttribute,
span: None span: None
} }
@ -398,7 +398,7 @@ fn mk_tests(cx: &TestCtxt) -> @ast::Item {
fn is_test_crate(krate: &ast::Crate) -> bool { fn is_test_crate(krate: &ast::Crate) -> bool {
match attr::find_crateid(krate.attrs.as_slice()) { match attr::find_crateid(krate.attrs.as_slice()) {
Some(ref s) if "test" == s.name => true, Some(ref s) if "test" == s.name.as_slice() => true,
_ => false _ => false
} }
} }
@ -427,7 +427,7 @@ fn mk_test_desc_and_fn_rec(cx: &TestCtxt, test: &Test) -> @ast::Expr {
let name_lit: ast::Lit = let name_lit: ast::Lit =
nospan(ast::LitStr(token::intern_and_get_ident( nospan(ast::LitStr(token::intern_and_get_ident(
ast_util::path_name_i(path.as_slice())), ast_util::path_name_i(path.as_slice()).as_slice()),
ast::CookedStr)); ast::CookedStr));
let name_expr = @ast::Expr { let name_expr = @ast::Expr {

View file

@ -352,8 +352,9 @@ fn parse_crate_attrs(sess: &session::Session, input: &d::Input) ->
&sess.parse_sess) &sess.parse_sess)
} }
d::StrInput(ref src) => { d::StrInput(ref src) => {
parse::parse_crate_attrs_from_source_str(d::anon_src(), parse::parse_crate_attrs_from_source_str(
(*src).clone(), d::anon_src().to_strbuf(),
src.to_strbuf(),
Vec::new(), Vec::new(),
&sess.parse_sess) &sess.parse_sess)
} }

View file

@ -428,11 +428,11 @@ impl<'a> CrateLoader for Loader<'a> {
}; };
let macros = decoder::get_exported_macros(library.metadata.as_slice()); let macros = decoder::get_exported_macros(library.metadata.as_slice());
let registrar = decoder::get_macro_registrar_fn(library.metadata.as_slice()).map(|id| { let registrar = decoder::get_macro_registrar_fn(library.metadata.as_slice()).map(|id| {
decoder::get_symbol(library.metadata.as_slice(), id) decoder::get_symbol(library.metadata.as_slice(), id).to_strbuf()
}); });
let mc = MacroCrate { let mc = MacroCrate {
lib: library.dylib.clone(), lib: library.dylib.clone(),
macros: macros.move_iter().collect(), macros: macros.move_iter().map(|x| x.to_strbuf()).collect(),
registrar_symbol: registrar, registrar_symbol: registrar,
}; };
if should_link { if should_link {

View file

@ -207,12 +207,17 @@ pub fn get_field_type(tcx: &ty::ctxt, class_id: ast::DefId,
let all_items = reader::get_doc(reader::Doc(cdata.data()), tag_items); let all_items = reader::get_doc(reader::Doc(cdata.data()), tag_items);
let class_doc = expect(tcx.sess.diagnostic(), let class_doc = expect(tcx.sess.diagnostic(),
decoder::maybe_find_item(class_id.node, all_items), decoder::maybe_find_item(class_id.node, all_items),
|| format!("get_field_type: class ID {:?} not found", || {
class_id) ); (format!("get_field_type: class ID {:?} not found",
class_id)).to_strbuf()
});
let the_field = expect(tcx.sess.diagnostic(), let the_field = expect(tcx.sess.diagnostic(),
decoder::maybe_find_item(def.node, class_doc), decoder::maybe_find_item(def.node, class_doc),
|| format!("get_field_type: in class {:?}, field ID {:?} not found", || {
class_id, def) ); (format!("get_field_type: in class {:?}, field ID {:?} not found",
class_id,
def)).to_strbuf()
});
let ty = decoder::item_type(def, the_field, tcx, &*cdata); let ty = decoder::item_type(def, the_field, tcx, &*cdata);
ty::ty_param_bounds_and_ty { ty::ty_param_bounds_and_ty {
generics: ty::Generics {type_param_defs: Rc::new(Vec::new()), generics: ty::Generics {type_param_defs: Rc::new(Vec::new()),

View file

@ -1563,7 +1563,7 @@ impl<'a, 'b, 'c> Visitor<()> for MacroDefVisitor<'a, 'b, 'c> {
let def = self.ecx.tcx.sess.codemap().span_to_snippet(item.span) let def = self.ecx.tcx.sess.codemap().span_to_snippet(item.span)
.expect("Unable to find source for macro"); .expect("Unable to find source for macro");
self.ebml_w.start_tag(tag_macro_def); self.ebml_w.start_tag(tag_macro_def);
self.ebml_w.wr_str(def); self.ebml_w.wr_str(def.as_slice());
self.ebml_w.end_tag(); self.ebml_w.end_tag();
} }
_ => {} _ => {}

View file

@ -112,8 +112,8 @@ impl<'a, O:DataFlowOperator> pprust::PpAnn for DataFlowContext<'a, O> {
"".to_owned() "".to_owned()
}; };
try!(ps.synth_comment(format!("id {}: {}{}{}", id, entry_str, try!(ps.synth_comment((format!("id {}: {}{}{}", id, entry_str,
gens_str, kills_str))); gens_str, kills_str)).to_strbuf()));
try!(pp::space(&mut ps.s)); try!(pp::space(&mut ps.s));
} }
Ok(()) Ok(())

View file

@ -569,7 +569,7 @@ fn check_sized(tcx: &ty::ctxt, ty: ty::t, name: ~str, sp: Span) {
fn check_pat(cx: &mut Context, pat: &Pat) { fn check_pat(cx: &mut Context, pat: &Pat) {
let var_name = match pat.node { let var_name = match pat.node {
PatWild => Some("_".to_owned()), PatWild => Some("_".to_owned()),
PatIdent(_, ref path, _) => Some(path_to_str(path)), PatIdent(_, ref path, _) => Some(path_to_str(path).to_owned()),
_ => None _ => None
}; };

View file

@ -357,7 +357,7 @@ enum FieldName {
impl<'a> PrivacyVisitor<'a> { impl<'a> PrivacyVisitor<'a> {
// used when debugging // used when debugging
fn nodestr(&self, id: ast::NodeId) -> ~str { fn nodestr(&self, id: ast::NodeId) -> ~str {
self.tcx.map.node_to_str(id) self.tcx.map.node_to_str(id).to_owned()
} }
// Determines whether the given definition is public from the point of view // Determines whether the given definition is public from the point of view

View file

@ -3167,12 +3167,12 @@ impl<'a> Resolver<'a> {
.codemap() .codemap()
.span_to_snippet(imports.get(index).span) .span_to_snippet(imports.get(index).span)
.unwrap(); .unwrap();
if sn.contains("::") { if sn.as_slice().contains("::") {
self.resolve_error(imports.get(index).span, self.resolve_error(imports.get(index).span,
"unresolved import"); "unresolved import");
} else { } else {
let err = format!("unresolved import (maybe you meant `{}::*`?)", let err = format!("unresolved import (maybe you meant `{}::*`?)",
sn.slice(0, sn.len())); sn.as_slice().slice(0, sn.len()));
self.resolve_error(imports.get(index).span, err); self.resolve_error(imports.get(index).span, err);
} }
} }

View file

@ -1145,7 +1145,11 @@ pub fn new_fn_ctxt<'a>(ccx: &'a CrateContext,
for p in param_substs.iter() { p.validate(); } for p in param_substs.iter() { p.validate(); }
debug!("new_fn_ctxt(path={}, id={}, param_substs={})", debug!("new_fn_ctxt(path={}, id={}, param_substs={})",
if id == -1 { "".to_owned() } else { ccx.tcx.map.path_to_str(id) }, if id == -1 {
"".to_owned()
} else {
ccx.tcx.map.path_to_str(id).to_owned()
},
id, param_substs.map(|s| s.repr(ccx.tcx()))); id, param_substs.map(|s| s.repr(ccx.tcx())));
let substd_output_type = match param_substs { let substd_output_type = match param_substs {
@ -1458,7 +1462,7 @@ pub fn trans_fn(ccx: &CrateContext,
param_substs: Option<&param_substs>, param_substs: Option<&param_substs>,
id: ast::NodeId, id: ast::NodeId,
attrs: &[ast::Attribute]) { attrs: &[ast::Attribute]) {
let _s = StatRecorder::new(ccx, ccx.tcx.map.path_to_str(id)); let _s = StatRecorder::new(ccx, ccx.tcx.map.path_to_str(id).to_owned());
debug!("trans_fn(param_substs={})", param_substs.map(|s| s.repr(ccx.tcx()))); debug!("trans_fn(param_substs={})", param_substs.map(|s| s.repr(ccx.tcx())));
let _icx = push_ctxt("trans_fn"); let _icx = push_ctxt("trans_fn");
let output_type = ty::ty_fn_ret(ty::node_id_to_type(ccx.tcx(), id)); let output_type = ty::ty_fn_ret(ty::node_id_to_type(ccx.tcx(), id));
@ -2161,9 +2165,10 @@ pub fn trans_crate(krate: ast::Crate,
// crashes if the module identifer is same as other symbols // crashes if the module identifer is same as other symbols
// such as a function name in the module. // such as a function name in the module.
// 1. http://llvm.org/bugs/show_bug.cgi?id=11479 // 1. http://llvm.org/bugs/show_bug.cgi?id=11479
let llmod_id = link_meta.crateid.name + ".rs"; let mut llmod_id = link_meta.crateid.name.clone();
llmod_id.push_str(".rs");
let ccx = CrateContext::new(llmod_id, tcx, exp_map2, let ccx = CrateContext::new(llmod_id.as_slice(), tcx, exp_map2,
Sha256::new(), link_meta, reachable); Sha256::new(), link_meta, reachable);
{ {
let _icx = push_ctxt("text"); let _icx = push_ctxt("text");

View file

@ -364,7 +364,7 @@ pub fn trans_fn_ref_with_vtables(
let map_node = session::expect( let map_node = session::expect(
ccx.sess(), ccx.sess(),
tcx.map.find(def_id.node), tcx.map.find(def_id.node),
|| format!("local item should be in ast map")); || "local item should be in ast map".to_strbuf());
match map_node { match map_node {
ast_map::NodeForeignItem(_) => { ast_map::NodeForeignItem(_) => {

View file

@ -426,7 +426,7 @@ impl<'a> Block<'a> {
} }
pub fn node_id_to_str(&self, id: ast::NodeId) -> ~str { pub fn node_id_to_str(&self, id: ast::NodeId) -> ~str {
self.tcx().map.node_to_str(id) self.tcx().map.node_to_str(id).to_owned()
} }
pub fn expr_to_str(&self, e: &ast::Expr) -> ~str { pub fn expr_to_str(&self, e: &ast::Expr) -> ~str {
@ -839,7 +839,10 @@ pub fn filename_and_line_num_from_span(bcx: &Block, span: Span)
-> (ValueRef, ValueRef) { -> (ValueRef, ValueRef) {
let loc = bcx.sess().codemap().lookup_char_pos(span.lo); let loc = bcx.sess().codemap().lookup_char_pos(span.lo);
let filename_cstr = C_cstr(bcx.ccx(), let filename_cstr = C_cstr(bcx.ccx(),
token::intern_and_get_ident(loc.file.name), true); token::intern_and_get_ident(loc.file
.name
.as_slice()),
true);
let filename = build::PointerCast(bcx, filename_cstr, Type::i8p(bcx.ccx())); let filename = build::PointerCast(bcx, filename_cstr, Type::i8p(bcx.ccx()));
let line = C_int(bcx.ccx(), loc.line as int); let line = C_int(bcx.ccx(), loc.line as int);
(filename, line) (filename, line)

View file

@ -345,7 +345,11 @@ pub fn trans_fail<'a>(
let v_fail_str = C_cstr(ccx, fail_str, true); let v_fail_str = C_cstr(ccx, fail_str, true);
let _icx = push_ctxt("trans_fail_value"); let _icx = push_ctxt("trans_fail_value");
let loc = bcx.sess().codemap().lookup_char_pos(sp.lo); let loc = bcx.sess().codemap().lookup_char_pos(sp.lo);
let v_filename = C_cstr(ccx, token::intern_and_get_ident(loc.file.name), true); let v_filename = C_cstr(ccx,
token::intern_and_get_ident(loc.file
.name
.as_slice()),
true);
let v_line = loc.line as int; let v_line = loc.line as int;
let v_str = PointerCast(bcx, v_fail_str, Type::i8p(ccx)); let v_str = PointerCast(bcx, v_fail_str, Type::i8p(ccx));
let v_filename = PointerCast(bcx, v_filename, Type::i8p(ccx)); let v_filename = PointerCast(bcx, v_filename, Type::i8p(ccx));

View file

@ -330,7 +330,7 @@ pub fn create_global_var_metadata(cx: &CrateContext,
}; };
let filename = span_start(cx, span).file.name.clone(); let filename = span_start(cx, span).file.name.clone();
let file_metadata = file_metadata(cx, filename); let file_metadata = file_metadata(cx, filename.as_slice());
let is_local_to_unit = is_node_local_to_unit(cx, node_id); let is_local_to_unit = is_node_local_to_unit(cx, node_id);
let loc = span_start(cx, span); let loc = span_start(cx, span);
@ -700,7 +700,7 @@ pub fn create_function_debug_context(cx: &CrateContext,
} }
let loc = span_start(cx, span); let loc = span_start(cx, span);
let file_metadata = file_metadata(cx, loc.file.name); let file_metadata = file_metadata(cx, loc.file.name.as_slice());
let function_type_metadata = unsafe { let function_type_metadata = unsafe {
let fn_signature = get_function_signature(cx, fn_ast_id, fn_decl, param_substs, span); let fn_signature = get_function_signature(cx, fn_ast_id, fn_decl, param_substs, span);
@ -1011,7 +1011,7 @@ fn compile_unit_metadata(cx: &CrateContext) {
}); });
fn fallback_path(cx: &CrateContext) -> CString { fn fallback_path(cx: &CrateContext) -> CString {
cx.link_meta.crateid.name.to_c_str() cx.link_meta.crateid.name.as_slice().to_c_str()
} }
} }
@ -1025,7 +1025,7 @@ fn declare_local(bcx: &Block,
let cx: &CrateContext = bcx.ccx(); let cx: &CrateContext = bcx.ccx();
let filename = span_start(cx, span).file.name.clone(); let filename = span_start(cx, span).file.name.clone();
let file_metadata = file_metadata(cx, filename); let file_metadata = file_metadata(cx, filename.as_slice());
let name = token::get_ident(variable_ident); let name = token::get_ident(variable_ident);
let loc = span_start(cx, span); let loc = span_start(cx, span);
@ -1277,7 +1277,7 @@ fn prepare_struct_metadata(cx: &CrateContext,
let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, def_id); let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, def_id);
let file_name = span_start(cx, definition_span).file.name.clone(); let file_name = span_start(cx, definition_span).file.name.clone();
let file_metadata = file_metadata(cx, file_name); let file_metadata = file_metadata(cx, file_name.as_slice());
let struct_metadata_stub = create_struct_stub(cx, let struct_metadata_stub = create_struct_stub(cx,
struct_llvm_type, struct_llvm_type,
@ -1371,7 +1371,7 @@ fn prepare_tuple_metadata(cx: &CrateContext,
let tuple_llvm_type = type_of::type_of(cx, tuple_type); let tuple_llvm_type = type_of::type_of(cx, tuple_type);
let loc = span_start(cx, span); let loc = span_start(cx, span);
let file_metadata = file_metadata(cx, loc.file.name); let file_metadata = file_metadata(cx, loc.file.name.as_slice());
UnfinishedMetadata { UnfinishedMetadata {
cache_id: cache_id_for_type(tuple_type), cache_id: cache_id_for_type(tuple_type),
@ -1533,7 +1533,7 @@ fn prepare_enum_metadata(cx: &CrateContext,
let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, enum_def_id); let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, enum_def_id);
let loc = span_start(cx, definition_span); let loc = span_start(cx, definition_span);
let file_metadata = file_metadata(cx, loc.file.name); let file_metadata = file_metadata(cx, loc.file.name.as_slice());
// For empty enums there is an early exit. Just describe it as an empty struct with the // For empty enums there is an early exit. Just describe it as an empty struct with the
// appropriate type name // appropriate type name
@ -1903,7 +1903,7 @@ fn boxed_type_metadata(cx: &CrateContext,
]; ];
let loc = span_start(cx, span); let loc = span_start(cx, span);
let file_metadata = file_metadata(cx, loc.file.name); let file_metadata = file_metadata(cx, loc.file.name.as_slice());
return composite_type_metadata( return composite_type_metadata(
cx, cx,
@ -2004,7 +2004,7 @@ fn vec_metadata(cx: &CrateContext,
assert!(member_descriptions.len() == member_llvm_types.len()); assert!(member_descriptions.len() == member_llvm_types.len());
let loc = span_start(cx, span); let loc = span_start(cx, span);
let file_metadata = file_metadata(cx, loc.file.name); let file_metadata = file_metadata(cx, loc.file.name.as_slice());
composite_type_metadata( composite_type_metadata(
cx, cx,
@ -2055,7 +2055,7 @@ fn vec_slice_metadata(cx: &CrateContext,
assert!(member_descriptions.len() == member_llvm_types.len()); assert!(member_descriptions.len() == member_llvm_types.len());
let loc = span_start(cx, span); let loc = span_start(cx, span);
let file_metadata = file_metadata(cx, loc.file.name); let file_metadata = file_metadata(cx, loc.file.name.as_slice());
return composite_type_metadata( return composite_type_metadata(
cx, cx,
@ -2081,7 +2081,7 @@ fn subroutine_type_metadata(cx: &CrateContext,
span: Span) span: Span)
-> DICompositeType { -> DICompositeType {
let loc = span_start(cx, span); let loc = span_start(cx, span);
let file_metadata = file_metadata(cx, loc.file.name); let file_metadata = file_metadata(cx, loc.file.name.as_slice());
let mut signature_metadata: Vec<DIType> = let mut signature_metadata: Vec<DIType> =
Vec::with_capacity(signature.inputs.len() + 1); Vec::with_capacity(signature.inputs.len() + 1);
@ -2126,7 +2126,7 @@ fn trait_metadata(cx: &CrateContext,
let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, def_id); let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, def_id);
let file_name = span_start(cx, definition_span).file.name.clone(); let file_name = span_start(cx, definition_span).file.name.clone();
let file_metadata = file_metadata(cx, file_name); let file_metadata = file_metadata(cx, file_name.as_slice());
let trait_llvm_type = type_of::type_of(cx, trait_type); let trait_llvm_type = type_of::type_of(cx, trait_type);
@ -2420,7 +2420,7 @@ fn populate_scope_map(cx: &CrateContext,
&mut HashMap<ast::NodeId, DIScope>|) { &mut HashMap<ast::NodeId, DIScope>|) {
// Create a new lexical scope and push it onto the stack // Create a new lexical scope and push it onto the stack
let loc = cx.sess().codemap().lookup_char_pos(scope_span.lo); let loc = cx.sess().codemap().lookup_char_pos(scope_span.lo);
let file_metadata = file_metadata(cx, loc.file.name); let file_metadata = file_metadata(cx, loc.file.name.as_slice());
let parent_scope = scope_stack.last().unwrap().scope_metadata; let parent_scope = scope_stack.last().unwrap().scope_metadata;
let scope_metadata = unsafe { let scope_metadata = unsafe {
@ -2538,7 +2538,10 @@ fn populate_scope_map(cx: &CrateContext,
if need_new_scope { if need_new_scope {
// Create a new lexical scope and push it onto the stack // Create a new lexical scope and push it onto the stack
let loc = cx.sess().codemap().lookup_char_pos(pat.span.lo); let loc = cx.sess().codemap().lookup_char_pos(pat.span.lo);
let file_metadata = file_metadata(cx, loc.file.name); let file_metadata = file_metadata(cx,
loc.file
.name
.as_slice());
let parent_scope = scope_stack.last().unwrap().scope_metadata; let parent_scope = scope_stack.last().unwrap().scope_metadata;
let scope_metadata = unsafe { let scope_metadata = unsafe {
@ -2860,7 +2863,10 @@ fn namespace_for_item(cx: &CrateContext, def_id: ast::DefId) -> Rc<NamespaceTree
ty::with_path(cx.tcx(), def_id, |path| { ty::with_path(cx.tcx(), def_id, |path| {
// prepend crate name if not already present // prepend crate name if not already present
let krate = if def_id.krate == ast::LOCAL_CRATE { let krate = if def_id.krate == ast::LOCAL_CRATE {
let crate_namespace_ident = token::str_to_ident(cx.link_meta.crateid.name); let crate_namespace_ident = token::str_to_ident(cx.link_meta
.crateid
.name
.as_slice());
Some(ast_map::PathMod(crate_namespace_ident.name)) Some(ast_map::PathMod(crate_namespace_ident.name))
} else { } else {
None None

View file

@ -109,9 +109,11 @@ pub fn monomorphic_fn(ccx: &CrateContext,
let map_node = session::expect( let map_node = session::expect(
ccx.sess(), ccx.sess(),
ccx.tcx.map.find(fn_id.node), ccx.tcx.map.find(fn_id.node),
|| format!("while monomorphizing {:?}, couldn't find it in the \ || {
(format!("while monomorphizing {:?}, couldn't find it in the \
item map (may have attempted to monomorphize an item \ item map (may have attempted to monomorphize an item \
defined in a different crate?)", fn_id)); defined in a different crate?)", fn_id)).to_strbuf()
});
match map_node { match map_node {
ast_map::NodeForeignItem(_) => { ast_map::NodeForeignItem(_) => {

View file

@ -3709,7 +3709,7 @@ pub fn substd_enum_variants(cx: &ctxt,
} }
pub fn item_path_str(cx: &ctxt, id: ast::DefId) -> ~str { pub fn item_path_str(cx: &ctxt, id: ast::DefId) -> ~str {
with_path(cx, id, |path| ast_map::path_to_str(path)) with_path(cx, id, |path| ast_map::path_to_str(path)).to_owned()
} }
pub enum DtorKind { pub enum DtorKind {

View file

@ -341,9 +341,9 @@ pub fn ty_to_str(cx: &ctxt, typ: t) -> ~str {
ty_bot => "!".to_owned(), ty_bot => "!".to_owned(),
ty_bool => "bool".to_owned(), ty_bool => "bool".to_owned(),
ty_char => "char".to_owned(), ty_char => "char".to_owned(),
ty_int(t) => ast_util::int_ty_to_str(t, None), ty_int(t) => ast_util::int_ty_to_str(t, None).to_owned(),
ty_uint(t) => ast_util::uint_ty_to_str(t, None), ty_uint(t) => ast_util::uint_ty_to_str(t, None).to_owned(),
ty_float(t) => ast_util::float_ty_to_str(t), ty_float(t) => ast_util::float_ty_to_str(t).to_owned(),
ty_box(typ) => "@".to_owned() + ty_to_str(cx, typ), ty_box(typ) => "@".to_owned() + ty_to_str(cx, typ),
ty_uniq(typ) => "~".to_owned() + ty_to_str(cx, typ), ty_uniq(typ) => "~".to_owned() + ty_to_str(cx, typ),
ty_ptr(ref tm) => "*".to_owned() + mt_to_str(cx, tm), ty_ptr(ref tm) => "*".to_owned() + mt_to_str(cx, tm),
@ -870,7 +870,7 @@ impl Repr for ty::BuiltinBounds {
impl Repr for Span { impl Repr for Span {
fn repr(&self, tcx: &ctxt) -> ~str { fn repr(&self, tcx: &ctxt) -> ~str {
tcx.sess.codemap().span_to_str(*self) tcx.sess.codemap().span_to_str(*self).to_owned()
} }
} }

View file

@ -92,7 +92,7 @@ impl<'a> Clean<Crate> for visit_ast::RustdocVisitor<'a> {
let id = link::find_crate_id(self.attrs.as_slice(), let id = link::find_crate_id(self.attrs.as_slice(),
t_outputs.out_filestem); t_outputs.out_filestem);
Crate { Crate {
name: id.name, name: id.name.to_owned(),
module: Some(self.module.clean()), module: Some(self.module.clean()),
externs: externs, externs: externs,
} }
@ -1239,7 +1239,7 @@ impl ToSource for syntax::codemap::Span {
let ctxt = super::ctxtkey.get().unwrap(); let ctxt = super::ctxtkey.get().unwrap();
let cm = ctxt.sess().codemap().clone(); let cm = ctxt.sess().codemap().clone();
let sn = match cm.span_to_snippet(*self) { let sn = match cm.span_to_snippet(*self) {
Some(x) => x, Some(x) => x.to_owned(),
None => "".to_owned() None => "".to_owned()
}; };
debug!("got snippet {}", sn); debug!("got snippet {}", sn);

View file

@ -27,7 +27,9 @@ use t = syntax::parse::token;
/// Highlights some source code, returning the HTML output. /// Highlights some source code, returning the HTML output.
pub fn highlight(src: &str, class: Option<&str>) -> ~str { pub fn highlight(src: &str, class: Option<&str>) -> ~str {
let sess = parse::new_parse_sess(); let sess = parse::new_parse_sess();
let fm = parse::string_to_filemap(&sess, src.to_owned(), "<stdin>".to_owned()); let fm = parse::string_to_filemap(&sess,
src.to_strbuf(),
"<stdin>".to_strbuf());
let mut out = io::MemWriter::new(); let mut out = io::MemWriter::new();
doit(&sess, doit(&sess,
@ -70,11 +72,11 @@ fn doit(sess: &parse::ParseSess, mut lexer: lexer::StringReader, class: Option<&
hi: test, hi: test,
expn_info: None, expn_info: None,
}).unwrap(); }).unwrap();
if snip.contains("/") { if snip.as_slice().contains("/") {
try!(write!(out, "<span class='comment'>{}</span>", try!(write!(out, "<span class='comment'>{}</span>",
Escape(snip))); Escape(snip.as_slice())));
} else { } else {
try!(write!(out, "{}", Escape(snip))); try!(write!(out, "{}", Escape(snip.as_slice())));
} }
} }
last = next.sp.hi; last = next.sp.hi;
@ -171,10 +173,10 @@ fn doit(sess: &parse::ParseSess, mut lexer: lexer::StringReader, class: Option<&
// stringifying this token // stringifying this token
let snip = sess.span_diagnostic.cm.span_to_snippet(next.sp).unwrap(); let snip = sess.span_diagnostic.cm.span_to_snippet(next.sp).unwrap();
if klass == "" { if klass == "" {
try!(write!(out, "{}", Escape(snip))); try!(write!(out, "{}", Escape(snip.as_slice())));
} else { } else {
try!(write!(out, "<span class='{}'>{}</span>", klass, try!(write!(out, "<span class='{}'>{}</span>", klass,
Escape(snip))); Escape(snip.as_slice())));
} }
} }

View file

@ -79,7 +79,7 @@ impl<'a, T: Copy> Iterator<T> for Values<'a, T> {
/// The type of the iterator used by with_path. /// The type of the iterator used by with_path.
pub type PathElems<'a, 'b> = iter::Chain<Values<'a, PathElem>, LinkedPath<'b>>; pub type PathElems<'a, 'b> = iter::Chain<Values<'a, PathElem>, LinkedPath<'b>>;
pub fn path_to_str<PI: Iterator<PathElem>>(mut path: PI) -> ~str { pub fn path_to_str<PI: Iterator<PathElem>>(mut path: PI) -> StrBuf {
let itr = token::get_ident_interner(); let itr = token::get_ident_interner();
path.fold(StrBuf::new(), |mut s, e| { path.fold(StrBuf::new(), |mut s, e| {
@ -89,7 +89,7 @@ pub fn path_to_str<PI: Iterator<PathElem>>(mut path: PI) -> ~str {
} }
s.push_str(e.as_slice()); s.push_str(e.as_slice());
s s
}).into_owned() }).to_strbuf()
} }
#[deriving(Clone)] #[deriving(Clone)]
@ -322,11 +322,11 @@ impl Map {
self.with_path_next(id, None, f) self.with_path_next(id, None, f)
} }
pub fn path_to_str(&self, id: NodeId) -> ~str { pub fn path_to_str(&self, id: NodeId) -> StrBuf {
self.with_path(id, |path| path_to_str(path)) self.with_path(id, |path| path_to_str(path))
} }
fn path_to_str_with_ident(&self, id: NodeId, i: Ident) -> ~str { fn path_to_str_with_ident(&self, id: NodeId, i: Ident) -> StrBuf {
self.with_path(id, |path| { self.with_path(id, |path| {
path_to_str(path.chain(Some(PathName(i.name)).move_iter())) path_to_str(path.chain(Some(PathName(i.name)).move_iter()))
}) })
@ -405,7 +405,7 @@ impl Map {
} }
} }
pub fn node_to_str(&self, id: NodeId) -> ~str { pub fn node_to_str(&self, id: NodeId) -> StrBuf {
node_id_to_str(self, id) node_id_to_str(self, id)
} }
} }
@ -650,7 +650,7 @@ pub fn map_decoded_item<F: FoldOps>(map: &Map,
ii ii
} }
fn node_id_to_str(map: &Map, id: NodeId) -> ~str { fn node_id_to_str(map: &Map, id: NodeId) -> StrBuf {
match map.find(id) { match map.find(id) {
Some(NodeItem(item)) => { Some(NodeItem(item)) => {
let path_str = map.path_to_str_with_ident(id, item.ident); let path_str = map.path_to_str_with_ident(id, item.ident);
@ -666,51 +666,58 @@ fn node_id_to_str(map: &Map, id: NodeId) -> ~str {
ItemImpl(..) => "impl", ItemImpl(..) => "impl",
ItemMac(..) => "macro" ItemMac(..) => "macro"
}; };
format!("{} {} (id={})", item_str, path_str, id) (format!("{} {} (id={})", item_str, path_str, id)).to_strbuf()
} }
Some(NodeForeignItem(item)) => { Some(NodeForeignItem(item)) => {
let path_str = map.path_to_str_with_ident(id, item.ident); let path_str = map.path_to_str_with_ident(id, item.ident);
format!("foreign item {} (id={})", path_str, id) (format!("foreign item {} (id={})", path_str, id)).to_strbuf()
} }
Some(NodeMethod(m)) => { Some(NodeMethod(m)) => {
format!("method {} in {} (id={})", (format!("method {} in {} (id={})",
token::get_ident(m.ident), token::get_ident(m.ident),
map.path_to_str(id), id) map.path_to_str(id), id)).to_strbuf()
} }
Some(NodeTraitMethod(ref tm)) => { Some(NodeTraitMethod(ref tm)) => {
let m = ast_util::trait_method_to_ty_method(&**tm); let m = ast_util::trait_method_to_ty_method(&**tm);
format!("method {} in {} (id={})", (format!("method {} in {} (id={})",
token::get_ident(m.ident), token::get_ident(m.ident),
map.path_to_str(id), id) map.path_to_str(id), id)).to_strbuf()
} }
Some(NodeVariant(ref variant)) => { Some(NodeVariant(ref variant)) => {
format!("variant {} in {} (id={})", (format!("variant {} in {} (id={})",
token::get_ident(variant.node.name), token::get_ident(variant.node.name),
map.path_to_str(id), id) map.path_to_str(id), id)).to_strbuf()
} }
Some(NodeExpr(expr)) => { Some(NodeExpr(expr)) => {
format!("expr {} (id={})", pprust::expr_to_str(expr), id) (format!("expr {} (id={})",
pprust::expr_to_str(expr), id)).to_strbuf()
} }
Some(NodeStmt(stmt)) => { Some(NodeStmt(stmt)) => {
format!("stmt {} (id={})", pprust::stmt_to_str(stmt), id) (format!("stmt {} (id={})",
pprust::stmt_to_str(stmt), id)).to_strbuf()
} }
Some(NodeArg(pat)) => { Some(NodeArg(pat)) => {
format!("arg {} (id={})", pprust::pat_to_str(pat), id) (format!("arg {} (id={})",
pprust::pat_to_str(pat), id)).to_strbuf()
} }
Some(NodeLocal(pat)) => { Some(NodeLocal(pat)) => {
format!("local {} (id={})", pprust::pat_to_str(pat), id) (format!("local {} (id={})",
pprust::pat_to_str(pat), id)).to_strbuf()
} }
Some(NodeBlock(block)) => { Some(NodeBlock(block)) => {
format!("block {} (id={})", pprust::block_to_str(block), id) (format!("block {} (id={})",
pprust::block_to_str(block), id)).to_strbuf()
} }
Some(NodeStructCtor(_)) => { Some(NodeStructCtor(_)) => {
format!("struct_ctor {} (id={})", map.path_to_str(id), id) (format!("struct_ctor {} (id={})",
map.path_to_str(id), id)).to_strbuf()
} }
Some(NodeLifetime(ref l)) => { Some(NodeLifetime(ref l)) => {
format!("lifetime {} (id={})", pprust::lifetime_to_str(*l), id) (format!("lifetime {} (id={})",
pprust::lifetime_to_str(*l), id)).to_strbuf()
} }
None => { None => {
format!("unknown node (id={})", id) (format!("unknown node (id={})", id)).to_strbuf()
} }
} }
} }

View file

@ -24,11 +24,11 @@ use std::cmp;
use std::strbuf::StrBuf; use std::strbuf::StrBuf;
use std::u32; use std::u32;
pub fn path_name_i(idents: &[Ident]) -> ~str { pub fn path_name_i(idents: &[Ident]) -> StrBuf {
// FIXME: Bad copies (#2543 -- same for everything else that says "bad") // FIXME: Bad copies (#2543 -- same for everything else that says "bad")
idents.iter().map(|i| { idents.iter().map(|i| {
token::get_ident(*i).get().to_str() token::get_ident(*i).get().to_strbuf()
}).collect::<Vec<~str>>().connect("::") }).collect::<Vec<StrBuf>>().connect("::").to_strbuf()
} }
// totally scary function: ignores all but the last element, should have // totally scary function: ignores all but the last element, should have
@ -134,7 +134,7 @@ pub fn is_path(e: @Expr) -> bool {
// Get a string representation of a signed int type, with its value. // Get a string representation of a signed int type, with its value.
// We want to avoid "45int" and "-3int" in favor of "45" and "-3" // We want to avoid "45int" and "-3int" in favor of "45" and "-3"
pub fn int_ty_to_str(t: IntTy, val: Option<i64>) -> ~str { pub fn int_ty_to_str(t: IntTy, val: Option<i64>) -> StrBuf {
let s = match t { let s = match t {
TyI if val.is_some() => "", TyI if val.is_some() => "",
TyI => "int", TyI => "int",
@ -145,8 +145,8 @@ pub fn int_ty_to_str(t: IntTy, val: Option<i64>) -> ~str {
}; };
match val { match val {
Some(n) => format!("{}{}", n, s), Some(n) => format!("{}{}", n, s).to_strbuf(),
None => s.to_owned() None => s.to_strbuf()
} }
} }
@ -161,7 +161,7 @@ pub fn int_ty_max(t: IntTy) -> u64 {
// Get a string representation of an unsigned int type, with its value. // Get a string representation of an unsigned int type, with its value.
// We want to avoid "42uint" in favor of "42u" // We want to avoid "42uint" in favor of "42u"
pub fn uint_ty_to_str(t: UintTy, val: Option<u64>) -> ~str { pub fn uint_ty_to_str(t: UintTy, val: Option<u64>) -> StrBuf {
let s = match t { let s = match t {
TyU if val.is_some() => "u", TyU if val.is_some() => "u",
TyU => "uint", TyU => "uint",
@ -172,8 +172,8 @@ pub fn uint_ty_to_str(t: UintTy, val: Option<u64>) -> ~str {
}; };
match val { match val {
Some(n) => format!("{}{}", n, s), Some(n) => format!("{}{}", n, s).to_strbuf(),
None => s.to_owned() None => s.to_strbuf()
} }
} }
@ -186,8 +186,12 @@ pub fn uint_ty_max(t: UintTy) -> u64 {
} }
} }
pub fn float_ty_to_str(t: FloatTy) -> ~str { pub fn float_ty_to_str(t: FloatTy) -> StrBuf {
match t { TyF32 => "f32".to_owned(), TyF64 => "f64".to_owned(), TyF128 => "f128".to_owned() } match t {
TyF32 => "f32".to_strbuf(),
TyF64 => "f64".to_strbuf(),
TyF128 => "f128".to_strbuf(),
}
} }
pub fn is_call_expr(e: @Expr) -> bool { pub fn is_call_expr(e: @Expr) -> bool {
@ -252,11 +256,11 @@ pub fn unguarded_pat(a: &Arm) -> Option<Vec<@Pat> > {
/// listed as `__extensions__::method_name::hash`, with no indication /// listed as `__extensions__::method_name::hash`, with no indication
/// of the type). /// of the type).
pub fn impl_pretty_name(trait_ref: &Option<TraitRef>, ty: &Ty) -> Ident { pub fn impl_pretty_name(trait_ref: &Option<TraitRef>, ty: &Ty) -> Ident {
let mut pretty = StrBuf::from_owned_str(pprust::ty_to_str(ty)); let mut pretty = pprust::ty_to_str(ty);
match *trait_ref { match *trait_ref {
Some(ref trait_ref) => { Some(ref trait_ref) => {
pretty.push_char('.'); pretty.push_char('.');
pretty.push_str(pprust::path_to_str(&trait_ref.path)); pretty.push_str(pprust::path_to_str(&trait_ref.path).as_slice());
} }
None => {} None => {}
} }

View file

@ -125,7 +125,7 @@ impl AttributeMethods for Attribute {
let meta = mk_name_value_item_str( let meta = mk_name_value_item_str(
InternedString::new("doc"), InternedString::new("doc"),
token::intern_and_get_ident(strip_doc_comment_decoration( token::intern_and_get_ident(strip_doc_comment_decoration(
comment.get()))); comment.get()).as_slice()));
mk_attr(meta) mk_attr(meta)
} else { } else {
*self *self

View file

@ -189,7 +189,7 @@ pub enum MacroFormat {
pub struct NameAndSpan { pub struct NameAndSpan {
/// The name of the macro that was invoked to create the thing /// The name of the macro that was invoked to create the thing
/// with this Span. /// with this Span.
pub name: ~str, pub name: StrBuf,
/// The format with which the macro was invoked. /// The format with which the macro was invoked.
pub format: MacroFormat, pub format: MacroFormat,
/// The span of the macro definition itself. The macro may not /// The span of the macro definition itself. The macro may not
@ -220,7 +220,7 @@ pub struct ExpnInfo {
pub callee: NameAndSpan pub callee: NameAndSpan
} }
pub type FileName = ~str; pub type FileName = StrBuf;
pub struct FileLines { pub struct FileLines {
pub file: Rc<FileMap>, pub file: Rc<FileMap>,
@ -242,7 +242,7 @@ pub struct FileMap {
/// e.g. `<anon>` /// e.g. `<anon>`
pub name: FileName, pub name: FileName,
/// The complete source code /// The complete source code
pub src: ~str, pub src: StrBuf,
/// The start position of this source in the CodeMap /// The start position of this source in the CodeMap
pub start_pos: BytePos, pub start_pos: BytePos,
/// Locations of lines beginnings in the source code /// Locations of lines beginnings in the source code
@ -270,14 +270,14 @@ impl FileMap {
} }
// get a line from the list of pre-computed line-beginnings // get a line from the list of pre-computed line-beginnings
pub fn get_line(&self, line: int) -> ~str { pub fn get_line(&self, line: int) -> StrBuf {
let mut lines = self.lines.borrow_mut(); let mut lines = self.lines.borrow_mut();
let begin: BytePos = *lines.get(line as uint) - self.start_pos; let begin: BytePos = *lines.get(line as uint) - self.start_pos;
let begin = begin.to_uint(); let begin = begin.to_uint();
let slice = self.src.slice_from(begin); let slice = self.src.as_slice().slice_from(begin);
match slice.find('\n') { match slice.find('\n') {
Some(e) => slice.slice_to(e).to_owned(), Some(e) => slice.slice_to(e).to_strbuf(),
None => slice.to_owned() None => slice.to_strbuf()
} }
} }
@ -291,7 +291,8 @@ impl FileMap {
} }
pub fn is_real_file(&self) -> bool { pub fn is_real_file(&self) -> bool {
!(self.name.starts_with("<") && self.name.ends_with(">")) !(self.name.as_slice().starts_with("<") &&
self.name.as_slice().ends_with(">"))
} }
} }
@ -306,7 +307,7 @@ impl CodeMap {
} }
} }
pub fn new_filemap(&self, filename: FileName, src: ~str) -> Rc<FileMap> { pub fn new_filemap(&self, filename: FileName, src: StrBuf) -> Rc<FileMap> {
let mut files = self.files.borrow_mut(); let mut files = self.files.borrow_mut();
let start_pos = match files.last() { let start_pos = match files.last() {
None => 0, None => 0,
@ -316,10 +317,10 @@ impl CodeMap {
// Remove utf-8 BOM if any. // Remove utf-8 BOM if any.
// FIXME #12884: no efficient/safe way to remove from the start of a string // FIXME #12884: no efficient/safe way to remove from the start of a string
// and reuse the allocation. // and reuse the allocation.
let mut src = if src.starts_with("\ufeff") { let mut src = if src.as_slice().starts_with("\ufeff") {
StrBuf::from_str(src.as_slice().slice_from(3)) StrBuf::from_str(src.as_slice().slice_from(3))
} else { } else {
StrBuf::from_owned_str(src) StrBuf::from_str(src.as_slice())
}; };
// Append '\n' in case it's not already there. // Append '\n' in case it's not already there.
@ -332,7 +333,7 @@ impl CodeMap {
let filemap = Rc::new(FileMap { let filemap = Rc::new(FileMap {
name: filename, name: filename,
src: src.into_owned(), src: src.to_strbuf(),
start_pos: Pos::from_uint(start_pos), start_pos: Pos::from_uint(start_pos),
lines: RefCell::new(Vec::new()), lines: RefCell::new(Vec::new()),
multibyte_chars: RefCell::new(Vec::new()), multibyte_chars: RefCell::new(Vec::new()),
@ -343,9 +344,12 @@ impl CodeMap {
filemap filemap
} }
pub fn mk_substr_filename(&self, sp: Span) -> ~str { pub fn mk_substr_filename(&self, sp: Span) -> StrBuf {
let pos = self.lookup_char_pos(sp.lo); let pos = self.lookup_char_pos(sp.lo);
format!("<{}:{}:{}>", pos.file.name, pos.line, pos.col.to_uint() + 1) (format!("<{}:{}:{}>",
pos.file.name,
pos.line,
pos.col.to_uint() + 1)).to_strbuf()
} }
/// Lookup source information about a BytePos /// Lookup source information about a BytePos
@ -356,26 +360,30 @@ impl CodeMap {
pub fn lookup_char_pos_adj(&self, pos: BytePos) -> LocWithOpt { pub fn lookup_char_pos_adj(&self, pos: BytePos) -> LocWithOpt {
let loc = self.lookup_char_pos(pos); let loc = self.lookup_char_pos(pos);
LocWithOpt { LocWithOpt {
filename: loc.file.name.to_str(), filename: loc.file.name.to_strbuf(),
line: loc.line, line: loc.line,
col: loc.col, col: loc.col,
file: Some(loc.file) file: Some(loc.file)
} }
} }
pub fn span_to_str(&self, sp: Span) -> ~str { pub fn span_to_str(&self, sp: Span) -> StrBuf {
if self.files.borrow().len() == 0 && sp == DUMMY_SP { if self.files.borrow().len() == 0 && sp == DUMMY_SP {
return "no-location".to_owned(); return "no-location".to_strbuf();
} }
let lo = self.lookup_char_pos_adj(sp.lo); let lo = self.lookup_char_pos_adj(sp.lo);
let hi = self.lookup_char_pos_adj(sp.hi); let hi = self.lookup_char_pos_adj(sp.hi);
return format!("{}:{}:{}: {}:{}", lo.filename, return (format!("{}:{}:{}: {}:{}",
lo.line, lo.col.to_uint() + 1, hi.line, hi.col.to_uint() + 1) lo.filename,
lo.line,
lo.col.to_uint() + 1,
hi.line,
hi.col.to_uint() + 1)).to_strbuf()
} }
pub fn span_to_filename(&self, sp: Span) -> FileName { pub fn span_to_filename(&self, sp: Span) -> FileName {
self.lookup_char_pos(sp.lo).file.name.to_str() self.lookup_char_pos(sp.lo).file.name.to_strbuf()
} }
pub fn span_to_lines(&self, sp: Span) -> FileLines { pub fn span_to_lines(&self, sp: Span) -> FileLines {
@ -388,7 +396,7 @@ impl CodeMap {
FileLines {file: lo.file, lines: lines} FileLines {file: lo.file, lines: lines}
} }
pub fn span_to_snippet(&self, sp: Span) -> Option<~str> { pub fn span_to_snippet(&self, sp: Span) -> Option<StrBuf> {
let begin = self.lookup_byte_offset(sp.lo); let begin = self.lookup_byte_offset(sp.lo);
let end = self.lookup_byte_offset(sp.hi); let end = self.lookup_byte_offset(sp.hi);
@ -399,13 +407,14 @@ impl CodeMap {
if begin.fm.start_pos != end.fm.start_pos { if begin.fm.start_pos != end.fm.start_pos {
None None
} else { } else {
Some(begin.fm.src.slice( begin.pos.to_uint(), end.pos.to_uint()).to_owned()) Some(begin.fm.src.as_slice().slice(begin.pos.to_uint(),
end.pos.to_uint()).to_strbuf())
} }
} }
pub fn get_filemap(&self, filename: &str) -> Rc<FileMap> { pub fn get_filemap(&self, filename: &str) -> Rc<FileMap> {
for fm in self.files.borrow().iter() { for fm in self.files.borrow().iter() {
if filename == fm.name { if filename == fm.name.as_slice() {
return fm.clone(); return fm.clone();
} }
} }
@ -526,19 +535,21 @@ mod test {
#[test] #[test]
fn t1 () { fn t1 () {
let cm = CodeMap::new(); let cm = CodeMap::new();
let fm = cm.new_filemap("blork.rs".to_owned(),"first line.\nsecond line".to_owned()); let fm = cm.new_filemap("blork.rs".to_strbuf(),
"first line.\nsecond line".to_strbuf());
fm.next_line(BytePos(0)); fm.next_line(BytePos(0));
assert_eq!(&fm.get_line(0),&"first line.".to_owned()); assert_eq!(&fm.get_line(0),&"first line.".to_strbuf());
// TESTING BROKEN BEHAVIOR: // TESTING BROKEN BEHAVIOR:
fm.next_line(BytePos(10)); fm.next_line(BytePos(10));
assert_eq!(&fm.get_line(1),&".".to_owned()); assert_eq!(&fm.get_line(1), &".".to_strbuf());
} }
#[test] #[test]
#[should_fail] #[should_fail]
fn t2 () { fn t2 () {
let cm = CodeMap::new(); let cm = CodeMap::new();
let fm = cm.new_filemap("blork.rs".to_owned(),"first line.\nsecond line".to_owned()); let fm = cm.new_filemap("blork.rs".to_strbuf(),
"first line.\nsecond line".to_strbuf());
// TESTING *REALLY* BROKEN BEHAVIOR: // TESTING *REALLY* BROKEN BEHAVIOR:
fm.next_line(BytePos(0)); fm.next_line(BytePos(0));
fm.next_line(BytePos(10)); fm.next_line(BytePos(10));
@ -547,9 +558,12 @@ mod test {
fn init_code_map() -> CodeMap { fn init_code_map() -> CodeMap {
let cm = CodeMap::new(); let cm = CodeMap::new();
let fm1 = cm.new_filemap("blork.rs".to_owned(),"first line.\nsecond line".to_owned()); let fm1 = cm.new_filemap("blork.rs".to_strbuf(),
let fm2 = cm.new_filemap("empty.rs".to_owned(),"".to_owned()); "first line.\nsecond line".to_strbuf());
let fm3 = cm.new_filemap("blork2.rs".to_owned(),"first line.\nsecond line".to_owned()); let fm2 = cm.new_filemap("empty.rs".to_strbuf(),
"".to_strbuf());
let fm3 = cm.new_filemap("blork2.rs".to_strbuf(),
"first line.\nsecond line".to_strbuf());
fm1.next_line(BytePos(0)); fm1.next_line(BytePos(0));
fm1.next_line(BytePos(12)); fm1.next_line(BytePos(12));
@ -566,11 +580,11 @@ mod test {
let cm = init_code_map(); let cm = init_code_map();
let fmabp1 = cm.lookup_byte_offset(BytePos(22)); let fmabp1 = cm.lookup_byte_offset(BytePos(22));
assert_eq!(fmabp1.fm.name, "blork.rs".to_owned()); assert_eq!(fmabp1.fm.name, "blork.rs".to_strbuf());
assert_eq!(fmabp1.pos, BytePos(22)); assert_eq!(fmabp1.pos, BytePos(22));
let fmabp2 = cm.lookup_byte_offset(BytePos(24)); let fmabp2 = cm.lookup_byte_offset(BytePos(24));
assert_eq!(fmabp2.fm.name, "blork2.rs".to_owned()); assert_eq!(fmabp2.fm.name, "blork2.rs".to_strbuf());
assert_eq!(fmabp2.pos, BytePos(0)); assert_eq!(fmabp2.pos, BytePos(0));
} }
@ -592,12 +606,12 @@ mod test {
let cm = init_code_map(); let cm = init_code_map();
let loc1 = cm.lookup_char_pos(BytePos(22)); let loc1 = cm.lookup_char_pos(BytePos(22));
assert_eq!(loc1.file.name, "blork.rs".to_owned()); assert_eq!(loc1.file.name, "blork.rs".to_strbuf());
assert_eq!(loc1.line, 2); assert_eq!(loc1.line, 2);
assert_eq!(loc1.col, CharPos(10)); assert_eq!(loc1.col, CharPos(10));
let loc2 = cm.lookup_char_pos(BytePos(24)); let loc2 = cm.lookup_char_pos(BytePos(24));
assert_eq!(loc2.file.name, "blork2.rs".to_owned()); assert_eq!(loc2.file.name, "blork2.rs".to_strbuf());
assert_eq!(loc2.line, 1); assert_eq!(loc2.line, 1);
assert_eq!(loc2.col, CharPos(0)); assert_eq!(loc2.col, CharPos(0));
} }
@ -605,8 +619,11 @@ mod test {
fn init_code_map_mbc() -> CodeMap { fn init_code_map_mbc() -> CodeMap {
let cm = CodeMap::new(); let cm = CodeMap::new();
// € is a three byte utf8 char. // € is a three byte utf8 char.
let fm1 = cm.new_filemap("blork.rs".to_owned(),"fir€st €€€€ line.\nsecond line".to_owned()); let fm1 =
let fm2 = cm.new_filemap("blork2.rs".to_owned(),"first line€€.\n€ second line".to_owned()); cm.new_filemap("blork.rs".to_strbuf(),
"fir€st €€€€ line.\nsecond line".to_strbuf());
let fm2 = cm.new_filemap("blork2.rs".to_strbuf(),
"first line€€.\n€ second line".to_strbuf());
fm1.next_line(BytePos(0)); fm1.next_line(BytePos(0));
fm1.next_line(BytePos(22)); fm1.next_line(BytePos(22));
@ -650,7 +667,7 @@ mod test {
let span = Span {lo: BytePos(12), hi: BytePos(23), expn_info: None}; let span = Span {lo: BytePos(12), hi: BytePos(23), expn_info: None};
let file_lines = cm.span_to_lines(span); let file_lines = cm.span_to_lines(span);
assert_eq!(file_lines.file.name, "blork.rs".to_owned()); assert_eq!(file_lines.file.name, "blork.rs".to_strbuf());
assert_eq!(file_lines.lines.len(), 1); assert_eq!(file_lines.lines.len(), 1);
assert_eq!(*file_lines.lines.get(0), 1u); assert_eq!(*file_lines.lines.get(0), 1u);
} }
@ -662,7 +679,7 @@ mod test {
let span = Span {lo: BytePos(12), hi: BytePos(23), expn_info: None}; let span = Span {lo: BytePos(12), hi: BytePos(23), expn_info: None};
let snippet = cm.span_to_snippet(span); let snippet = cm.span_to_snippet(span);
assert_eq!(snippet, Some("second line".to_owned())); assert_eq!(snippet, Some("second line".to_strbuf()));
} }
#[test] #[test]
@ -672,6 +689,6 @@ mod test {
let span = Span {lo: BytePos(12), hi: BytePos(23), expn_info: None}; let span = Span {lo: BytePos(12), hi: BytePos(23), expn_info: None};
let sstr = cm.span_to_str(span); let sstr = cm.span_to_str(span);
assert_eq!(sstr, "blork.rs:2:1: 2:12".to_owned()); assert_eq!(sstr, "blork.rs:2:1: 2:12".to_strbuf());
} }
} }

View file

@ -24,11 +24,11 @@ use std::from_str::FromStr;
pub struct CrateId { pub struct CrateId {
/// A path which represents the codes origin. By convention this is the /// A path which represents the codes origin. By convention this is the
/// URL, without `http://` or `https://` prefix, to the crate's repository /// URL, without `http://` or `https://` prefix, to the crate's repository
pub path: ~str, pub path: StrBuf,
/// The name of the crate. /// The name of the crate.
pub name: ~str, pub name: StrBuf,
/// The version of the crate. /// The version of the crate.
pub version: Option<~str>, pub version: Option<StrBuf>,
} }
impl fmt::Show for CrateId { impl fmt::Show for CrateId {
@ -38,7 +38,8 @@ impl fmt::Show for CrateId {
None => "0.0", None => "0.0",
Some(ref version) => version.as_slice(), Some(ref version) => version.as_slice(),
}; };
if self.path == self.name || self.path.ends_with(format!("/{}", self.name)) { if self.path == self.name ||
self.path.as_slice().ends_with(format!("/{}", self.name)) {
write!(f.buf, "\\#{}", version) write!(f.buf, "\\#{}", version)
} else { } else {
write!(f.buf, "\\#{}:{}", self.name, version) write!(f.buf, "\\#{}:{}", self.name, version)
@ -60,7 +61,7 @@ impl FromStr for CrateId {
let inferred_name = *path_pieces.get(0); let inferred_name = *path_pieces.get(0);
let (name, version) = if pieces.len() == 1 { let (name, version) = if pieces.len() == 1 {
(inferred_name.to_owned(), None) (inferred_name.to_strbuf(), None)
} else { } else {
let hash_pieces: Vec<&str> = pieces.get(1) let hash_pieces: Vec<&str> = pieces.get(1)
.splitn(':', 1) .splitn(':', 1)
@ -72,16 +73,16 @@ impl FromStr for CrateId {
}; };
let name = if !hash_name.is_empty() { let name = if !hash_name.is_empty() {
hash_name.to_owned() hash_name.to_strbuf()
} else { } else {
inferred_name.to_owned() inferred_name.to_strbuf()
}; };
let version = if !hash_version.is_empty() { let version = if !hash_version.is_empty() {
if hash_version == "0.0" { if hash_version == "0.0" {
None None
} else { } else {
Some(hash_version.to_owned()) Some(hash_version.to_strbuf())
} }
} else { } else {
None None
@ -91,7 +92,7 @@ impl FromStr for CrateId {
}; };
Some(CrateId { Some(CrateId {
path: path.clone(), path: path.to_strbuf(),
name: name, name: name,
version: version, version: version,
}) })
@ -106,8 +107,8 @@ impl CrateId {
} }
} }
pub fn short_name_with_version(&self) -> ~str { pub fn short_name_with_version(&self) -> StrBuf {
format!("{}-{}", self.name, self.version_or_default()) (format!("{}-{}", self.name, self.version_or_default())).to_strbuf()
} }
pub fn matches(&self, other: &CrateId) -> bool { pub fn matches(&self, other: &CrateId) -> bool {
@ -123,17 +124,17 @@ impl CrateId {
#[test] #[test]
fn bare_name() { fn bare_name() {
let crateid: CrateId = from_str("foo").expect("valid crateid"); let crateid: CrateId = from_str("foo").expect("valid crateid");
assert_eq!(crateid.name, "foo".to_owned()); assert_eq!(crateid.name, "foo".to_strbuf());
assert_eq!(crateid.version, None); assert_eq!(crateid.version, None);
assert_eq!(crateid.path, "foo".to_owned()); assert_eq!(crateid.path, "foo".to_strbuf());
} }
#[test] #[test]
fn bare_name_single_char() { fn bare_name_single_char() {
let crateid: CrateId = from_str("f").expect("valid crateid"); let crateid: CrateId = from_str("f").expect("valid crateid");
assert_eq!(crateid.name, "f".to_owned()); assert_eq!(crateid.name, "f".to_strbuf());
assert_eq!(crateid.version, None); assert_eq!(crateid.version, None);
assert_eq!(crateid.path, "f".to_owned()); assert_eq!(crateid.path, "f".to_strbuf());
} }
#[test] #[test]
@ -145,17 +146,17 @@ fn empty_crateid() {
#[test] #[test]
fn simple_path() { fn simple_path() {
let crateid: CrateId = from_str("example.com/foo/bar").expect("valid crateid"); let crateid: CrateId = from_str("example.com/foo/bar").expect("valid crateid");
assert_eq!(crateid.name, "bar".to_owned()); assert_eq!(crateid.name, "bar".to_strbuf());
assert_eq!(crateid.version, None); assert_eq!(crateid.version, None);
assert_eq!(crateid.path, "example.com/foo/bar".to_owned()); assert_eq!(crateid.path, "example.com/foo/bar".to_strbuf());
} }
#[test] #[test]
fn simple_version() { fn simple_version() {
let crateid: CrateId = from_str("foo#1.0").expect("valid crateid"); let crateid: CrateId = from_str("foo#1.0").expect("valid crateid");
assert_eq!(crateid.name, "foo".to_owned()); assert_eq!(crateid.name, "foo".to_strbuf());
assert_eq!(crateid.version, Some("1.0".to_owned())); assert_eq!(crateid.version, Some("1.0".to_strbuf()));
assert_eq!(crateid.path, "foo".to_owned()); assert_eq!(crateid.path, "foo".to_strbuf());
} }
#[test] #[test]
@ -173,39 +174,39 @@ fn path_ends_with_slash() {
#[test] #[test]
fn path_and_version() { fn path_and_version() {
let crateid: CrateId = from_str("example.com/foo/bar#1.0").expect("valid crateid"); let crateid: CrateId = from_str("example.com/foo/bar#1.0").expect("valid crateid");
assert_eq!(crateid.name, "bar".to_owned()); assert_eq!(crateid.name, "bar".to_strbuf());
assert_eq!(crateid.version, Some("1.0".to_owned())); assert_eq!(crateid.version, Some("1.0".to_strbuf()));
assert_eq!(crateid.path, "example.com/foo/bar".to_owned()); assert_eq!(crateid.path, "example.com/foo/bar".to_strbuf());
} }
#[test] #[test]
fn single_chars() { fn single_chars() {
let crateid: CrateId = from_str("a/b#1").expect("valid crateid"); let crateid: CrateId = from_str("a/b#1").expect("valid crateid");
assert_eq!(crateid.name, "b".to_owned()); assert_eq!(crateid.name, "b".to_strbuf());
assert_eq!(crateid.version, Some("1".to_owned())); assert_eq!(crateid.version, Some("1".to_strbuf()));
assert_eq!(crateid.path, "a/b".to_owned()); assert_eq!(crateid.path, "a/b".to_strbuf());
} }
#[test] #[test]
fn missing_version() { fn missing_version() {
let crateid: CrateId = from_str("foo#").expect("valid crateid"); let crateid: CrateId = from_str("foo#").expect("valid crateid");
assert_eq!(crateid.name, "foo".to_owned()); assert_eq!(crateid.name, "foo".to_strbuf());
assert_eq!(crateid.version, None); assert_eq!(crateid.version, None);
assert_eq!(crateid.path, "foo".to_owned()); assert_eq!(crateid.path, "foo".to_strbuf());
} }
#[test] #[test]
fn path_and_name() { fn path_and_name() {
let crateid: CrateId = from_str("foo/rust-bar#bar:1.0").expect("valid crateid"); let crateid: CrateId = from_str("foo/rust-bar#bar:1.0").expect("valid crateid");
assert_eq!(crateid.name, "bar".to_owned()); assert_eq!(crateid.name, "bar".to_strbuf());
assert_eq!(crateid.version, Some("1.0".to_owned())); assert_eq!(crateid.version, Some("1.0".to_strbuf()));
assert_eq!(crateid.path, "foo/rust-bar".to_owned()); assert_eq!(crateid.path, "foo/rust-bar".to_strbuf());
} }
#[test] #[test]
fn empty_name() { fn empty_name() {
let crateid: CrateId = from_str("foo/bar#:1.0").expect("valid crateid"); let crateid: CrateId = from_str("foo/bar#:1.0").expect("valid crateid");
assert_eq!(crateid.name, "bar".to_owned()); assert_eq!(crateid.name, "bar".to_strbuf());
assert_eq!(crateid.version, Some("1.0".to_owned())); assert_eq!(crateid.version, Some("1.0".to_strbuf()));
assert_eq!(crateid.path, "foo/bar".to_owned()); assert_eq!(crateid.path, "foo/bar".to_strbuf());
} }

View file

@ -320,12 +320,12 @@ fn emit(dst: &mut EmitterWriter, cm: &codemap::CodeMap, rsp: RenderSpan,
// the span) // the span)
let span_end = Span { lo: sp.hi, hi: sp.hi, expn_info: sp.expn_info}; let span_end = Span { lo: sp.hi, hi: sp.hi, expn_info: sp.expn_info};
let ses = cm.span_to_str(span_end); let ses = cm.span_to_str(span_end);
try!(print_diagnostic(dst, ses, lvl, msg)); try!(print_diagnostic(dst, ses.as_slice(), lvl, msg));
if rsp.is_full_span() { if rsp.is_full_span() {
try!(custom_highlight_lines(dst, cm, sp, lvl, lines)); try!(custom_highlight_lines(dst, cm, sp, lvl, lines));
} }
} else { } else {
try!(print_diagnostic(dst, ss, lvl, msg)); try!(print_diagnostic(dst, ss.as_slice(), lvl, msg));
if rsp.is_full_span() { if rsp.is_full_span() {
try!(highlight_lines(dst, cm, sp, lvl, lines)); try!(highlight_lines(dst, cm, sp, lvl, lines));
} }
@ -378,7 +378,7 @@ fn highlight_lines(err: &mut EmitterWriter,
} }
let orig = fm.get_line(*lines.lines.get(0) as int); let orig = fm.get_line(*lines.lines.get(0) as int);
for pos in range(0u, left-skip) { for pos in range(0u, left-skip) {
let cur_char = orig[pos] as char; let cur_char = orig.as_slice()[pos] as char;
// Whenever a tab occurs on the previous line, we insert one on // Whenever a tab occurs on the previous line, we insert one on
// the error-point-squiggly-line as well (instead of a space). // the error-point-squiggly-line as well (instead of a space).
// That way the squiggly line will usually appear in the correct // That way the squiggly line will usually appear in the correct
@ -452,24 +452,28 @@ fn print_macro_backtrace(w: &mut EmitterWriter,
sp: Span) sp: Span)
-> io::IoResult<()> { -> io::IoResult<()> {
for ei in sp.expn_info.iter() { for ei in sp.expn_info.iter() {
let ss = ei.callee.span.as_ref().map_or("".to_owned(), |span| cm.span_to_str(*span)); let ss = ei.callee
.span
.as_ref()
.map_or("".to_strbuf(), |span| cm.span_to_str(*span));
let (pre, post) = match ei.callee.format { let (pre, post) = match ei.callee.format {
codemap::MacroAttribute => ("#[", "]"), codemap::MacroAttribute => ("#[", "]"),
codemap::MacroBang => ("", "!") codemap::MacroBang => ("", "!")
}; };
try!(print_diagnostic(w, ss, Note, try!(print_diagnostic(w, ss.as_slice(), Note,
format!("in expansion of {}{}{}", pre, format!("in expansion of {}{}{}", pre,
ei.callee.name, post))); ei.callee.name, post)));
let ss = cm.span_to_str(ei.call_site); let ss = cm.span_to_str(ei.call_site);
try!(print_diagnostic(w, ss, Note, "expansion site")); try!(print_diagnostic(w, ss.as_slice(), Note, "expansion site"));
try!(print_macro_backtrace(w, cm, ei.call_site)); try!(print_macro_backtrace(w, cm, ei.call_site));
} }
Ok(()) Ok(())
} }
pub fn expect<T:Clone>(diag: &SpanHandler, opt: Option<T>, msg: || -> ~str) -> T { pub fn expect<T:Clone>(diag: &SpanHandler, opt: Option<T>, msg: || -> StrBuf)
-> T {
match opt { match opt {
Some(ref t) => (*t).clone(), Some(ref t) => (*t).clone(),
None => diag.handler().bug(msg()), None => diag.handler().bug(msg().as_slice()),
} }
} }

View file

@ -30,7 +30,7 @@ use collections::HashMap;
// ast::MacInvocTT. // ast::MacInvocTT.
pub struct MacroDef { pub struct MacroDef {
pub name: ~str, pub name: StrBuf,
pub ext: SyntaxExtension pub ext: SyntaxExtension
} }
@ -361,8 +361,8 @@ pub fn syntax_expander_table() -> SyntaxEnv {
pub struct MacroCrate { pub struct MacroCrate {
pub lib: Option<Path>, pub lib: Option<Path>,
pub macros: Vec<~str>, pub macros: Vec<StrBuf>,
pub registrar_symbol: Option<~str>, pub registrar_symbol: Option<StrBuf>,
} }
pub trait CrateLoader { pub trait CrateLoader {
@ -425,7 +425,7 @@ impl<'a> ExtCtxt<'a> {
pub fn mod_pop(&mut self) { self.mod_path.pop().unwrap(); } pub fn mod_pop(&mut self) { self.mod_path.pop().unwrap(); }
pub fn mod_path(&self) -> Vec<ast::Ident> { pub fn mod_path(&self) -> Vec<ast::Ident> {
let mut v = Vec::new(); let mut v = Vec::new();
v.push(token::str_to_ident(self.ecfg.crate_id.name)); v.push(token::str_to_ident(self.ecfg.crate_id.name.as_slice()));
v.extend(self.mod_path.iter().map(|a| *a)); v.extend(self.mod_path.iter().map(|a| *a));
return v; return v;
} }
@ -540,14 +540,14 @@ pub fn get_single_str_from_tts(cx: &ExtCtxt,
sp: Span, sp: Span,
tts: &[ast::TokenTree], tts: &[ast::TokenTree],
name: &str) name: &str)
-> Option<~str> { -> Option<StrBuf> {
if tts.len() != 1 { if tts.len() != 1 {
cx.span_err(sp, format!("{} takes 1 argument.", name)); cx.span_err(sp, format!("{} takes 1 argument.", name));
} else { } else {
match tts[0] { match tts[0] {
ast::TTTok(_, token::LIT_STR(ident)) ast::TTTok(_, token::LIT_STR(ident))
| ast::TTTok(_, token::LIT_STR_RAW(ident, _)) => { | ast::TTTok(_, token::LIT_STR_RAW(ident, _)) => {
return Some(token::get_ident(ident).get().to_str()) return Some(token::get_ident(ident).get().to_strbuf())
} }
_ => cx.span_err(sp, format!("{} requires a string.", name)), _ => cx.span_err(sp, format!("{} requires a string.", name)),
} }

View file

@ -639,7 +639,9 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
vec!( vec!(
self.expr_str(span, msg), self.expr_str(span, msg),
self.expr_str(span, self.expr_str(span,
token::intern_and_get_ident(loc.file.name)), token::intern_and_get_ident(loc.file
.name
.as_slice())),
self.expr_uint(span, loc.line))) self.expr_uint(span, loc.line)))
} }

View file

@ -987,7 +987,7 @@ impl<'a> TraitDef<'a> {
to_set.expn_info = Some(@codemap::ExpnInfo { to_set.expn_info = Some(@codemap::ExpnInfo {
call_site: to_set, call_site: to_set,
callee: codemap::NameAndSpan { callee: codemap::NameAndSpan {
name: format!("deriving({})", trait_name), name: format!("deriving({})", trait_name).to_strbuf(),
format: codemap::MacroAttribute, format: codemap::MacroAttribute,
span: Some(self.span) span: Some(self.span)
} }

View file

@ -30,7 +30,7 @@ pub fn expand_option_env(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
Some(v) => v Some(v) => v
}; };
let e = match os::getenv(var) { let e = match os::getenv(var.as_slice()) {
None => { None => {
cx.expr_path(cx.path_all(sp, cx.expr_path(cx.path_all(sp,
true, true,

View file

@ -71,7 +71,7 @@ pub fn expand_expr(e: @ast::Expr, fld: &mut MacroExpander) -> @ast::Expr {
fld.cx.bt_push(ExpnInfo { fld.cx.bt_push(ExpnInfo {
call_site: e.span, call_site: e.span,
callee: NameAndSpan { callee: NameAndSpan {
name: extnamestr.get().to_str(), name: extnamestr.get().to_strbuf(),
format: MacroBang, format: MacroBang,
span: exp_span, span: exp_span,
}, },
@ -270,7 +270,7 @@ pub fn expand_item(it: @ast::Item, fld: &mut MacroExpander)
fld.cx.bt_push(ExpnInfo { fld.cx.bt_push(ExpnInfo {
call_site: attr.span, call_site: attr.span,
callee: NameAndSpan { callee: NameAndSpan {
name: mname.get().to_str(), name: mname.get().to_strbuf(),
format: MacroAttribute, format: MacroAttribute,
span: None span: None
} }
@ -334,7 +334,7 @@ fn expand_item_modifiers(mut it: @ast::Item, fld: &mut MacroExpander)
fld.cx.bt_push(ExpnInfo { fld.cx.bt_push(ExpnInfo {
call_site: attr.span, call_site: attr.span,
callee: NameAndSpan { callee: NameAndSpan {
name: mname.get().to_str(), name: mname.get().to_strbuf(),
format: MacroAttribute, format: MacroAttribute,
span: None, span: None,
} }
@ -393,7 +393,7 @@ pub fn expand_item_mac(it: @ast::Item, fld: &mut MacroExpander)
fld.cx.bt_push(ExpnInfo { fld.cx.bt_push(ExpnInfo {
call_site: it.span, call_site: it.span,
callee: NameAndSpan { callee: NameAndSpan {
name: extnamestr.get().to_str(), name: extnamestr.get().to_strbuf(),
format: MacroBang, format: MacroBang,
span: span span: span
} }
@ -412,7 +412,7 @@ pub fn expand_item_mac(it: @ast::Item, fld: &mut MacroExpander)
fld.cx.bt_push(ExpnInfo { fld.cx.bt_push(ExpnInfo {
call_site: it.span, call_site: it.span,
callee: NameAndSpan { callee: NameAndSpan {
name: extnamestr.get().to_str(), name: extnamestr.get().to_strbuf(),
format: MacroBang, format: MacroBang,
span: span span: span
} }
@ -433,7 +433,7 @@ pub fn expand_item_mac(it: @ast::Item, fld: &mut MacroExpander)
Some(MacroDef { name, ext }) => { Some(MacroDef { name, ext }) => {
// yikes... no idea how to apply the mark to this. I'm afraid // yikes... no idea how to apply the mark to this. I'm afraid
// we're going to have to wait-and-see on this one. // we're going to have to wait-and-see on this one.
fld.extsbox.insert(intern(name), ext); fld.extsbox.insert(intern(name.as_slice()), ext);
if attr::contains_name(it.attrs.as_slice(), "macro_export") { if attr::contains_name(it.attrs.as_slice(), "macro_export") {
SmallVector::one(it) SmallVector::one(it)
} else { } else {
@ -493,6 +493,7 @@ fn load_extern_macros(krate: &ast::ViewItem, fld: &mut MacroExpander) {
_ => unreachable!() _ => unreachable!()
}; };
let name = format!("<{} macros>", token::get_ident(crate_name)); let name = format!("<{} macros>", token::get_ident(crate_name));
let name = name.to_strbuf();
for source in macros.iter() { for source in macros.iter() {
let item = parse::parse_item_from_source_str(name.clone(), let item = parse::parse_item_from_source_str(name.clone(),
@ -524,7 +525,8 @@ fn load_extern_macros(krate: &ast::ViewItem, fld: &mut MacroExpander) {
}; };
unsafe { unsafe {
let registrar: MacroCrateRegistrationFun = match lib.symbol(registrar) { let registrar: MacroCrateRegistrationFun =
match lib.symbol(registrar.as_slice()) {
Ok(registrar) => registrar, Ok(registrar) => registrar,
// again fatal if we can't register macros // again fatal if we can't register macros
Err(err) => fld.cx.span_fatal(krate.span, err) Err(err) => fld.cx.span_fatal(krate.span, err)
@ -576,7 +578,7 @@ pub fn expand_stmt(s: &Stmt, fld: &mut MacroExpander) -> SmallVector<@Stmt> {
fld.cx.bt_push(ExpnInfo { fld.cx.bt_push(ExpnInfo {
call_site: s.span, call_site: s.span,
callee: NameAndSpan { callee: NameAndSpan {
name: extnamestr.get().to_str(), name: extnamestr.get().to_strbuf(),
format: MacroBang, format: MacroBang,
span: exp_span, span: exp_span,
} }
@ -1020,10 +1022,10 @@ mod test {
#[should_fail] #[should_fail]
#[test] fn macros_cant_escape_fns_test () { #[test] fn macros_cant_escape_fns_test () {
let src = "fn bogus() {macro_rules! z (() => (3+4))}\ let src = "fn bogus() {macro_rules! z (() => (3+4))}\
fn inty() -> int { z!() }".to_owned(); fn inty() -> int { z!() }".to_strbuf();
let sess = parse::new_parse_sess(); let sess = parse::new_parse_sess();
let crate_ast = parse::parse_crate_from_source_str( let crate_ast = parse::parse_crate_from_source_str(
"<test>".to_owned(), "<test>".to_strbuf(),
src, src,
Vec::new(), &sess); Vec::new(), &sess);
// should fail: // should fail:
@ -1040,10 +1042,10 @@ mod test {
#[should_fail] #[should_fail]
#[test] fn macros_cant_escape_mods_test () { #[test] fn macros_cant_escape_mods_test () {
let src = "mod foo {macro_rules! z (() => (3+4))}\ let src = "mod foo {macro_rules! z (() => (3+4))}\
fn inty() -> int { z!() }".to_owned(); fn inty() -> int { z!() }".to_strbuf();
let sess = parse::new_parse_sess(); let sess = parse::new_parse_sess();
let crate_ast = parse::parse_crate_from_source_str( let crate_ast = parse::parse_crate_from_source_str(
"<test>".to_owned(), "<test>".to_strbuf(),
src, src,
Vec::new(), &sess); Vec::new(), &sess);
// should fail: // should fail:
@ -1059,10 +1061,10 @@ mod test {
// macro_escape modules shouldn't cause macros to leave scope // macro_escape modules shouldn't cause macros to leave scope
#[test] fn macros_can_escape_flattened_mods_test () { #[test] fn macros_can_escape_flattened_mods_test () {
let src = "#[macro_escape] mod foo {macro_rules! z (() => (3+4))}\ let src = "#[macro_escape] mod foo {macro_rules! z (() => (3+4))}\
fn inty() -> int { z!() }".to_owned(); fn inty() -> int { z!() }".to_strbuf();
let sess = parse::new_parse_sess(); let sess = parse::new_parse_sess();
let crate_ast = parse::parse_crate_from_source_str( let crate_ast = parse::parse_crate_from_source_str(
"<test>".to_owned(), "<test>".to_strbuf(),
src, src,
Vec::new(), &sess); Vec::new(), &sess);
// should fail: // should fail:
@ -1100,7 +1102,7 @@ mod test {
} }
} }
fn expand_crate_str(crate_str: ~str) -> ast::Crate { fn expand_crate_str(crate_str: StrBuf) -> ast::Crate {
let ps = parse::new_parse_sess(); let ps = parse::new_parse_sess();
let crate_ast = string_to_parser(&ps, crate_str).parse_crate_mod(); let crate_ast = string_to_parser(&ps, crate_str).parse_crate_mod();
// the cfg argument actually does matter, here... // the cfg argument actually does matter, here...
@ -1118,13 +1120,14 @@ mod test {
// println!("expanded: {:?}\n",expanded_ast); // println!("expanded: {:?}\n",expanded_ast);
//mtwt_resolve_crate(expanded_ast) //mtwt_resolve_crate(expanded_ast)
//} //}
//fn expand_and_resolve_and_pretty_print (crate_str: @str) -> ~str { //fn expand_and_resolve_and_pretty_print (crate_str: @str) -> StrBuf {
//let resolved_ast = expand_and_resolve(crate_str); //let resolved_ast = expand_and_resolve(crate_str);
//pprust::to_str(&resolved_ast,fake_print_crate,get_ident_interner()) //pprust::to_str(&resolved_ast,fake_print_crate,get_ident_interner())
//} //}
#[test] fn macro_tokens_should_match(){ #[test] fn macro_tokens_should_match(){
expand_crate_str("macro_rules! m((a)=>(13)) fn main(){m!(a);}".to_owned()); expand_crate_str(
"macro_rules! m((a)=>(13)) fn main(){m!(a);}".to_strbuf());
} }
// renaming tests expand a crate and then check that the bindings match // renaming tests expand a crate and then check that the bindings match
@ -1182,7 +1185,7 @@ mod test {
let (teststr, bound_connections, bound_ident_check) = match *t { let (teststr, bound_connections, bound_ident_check) = match *t {
(ref str,ref conns, bic) => (str.to_owned(), conns.clone(), bic) (ref str,ref conns, bic) => (str.to_owned(), conns.clone(), bic)
}; };
let cr = expand_crate_str(teststr.to_owned()); let cr = expand_crate_str(teststr.to_strbuf());
// find the bindings: // find the bindings:
let mut name_finder = new_name_finder(Vec::new()); let mut name_finder = new_name_finder(Vec::new());
visit::walk_crate(&mut name_finder,&cr,()); visit::walk_crate(&mut name_finder,&cr,());
@ -1257,7 +1260,7 @@ mod test {
let crate_str = "macro_rules! fmt_wrap(($b:expr)=>($b.to_str())) let crate_str = "macro_rules! fmt_wrap(($b:expr)=>($b.to_str()))
macro_rules! foo_module (() => (mod generated { fn a() { let xx = 147; fmt_wrap!(xx);}})) macro_rules! foo_module (() => (mod generated { fn a() { let xx = 147; fmt_wrap!(xx);}}))
foo_module!() foo_module!()
".to_owned(); ".to_strbuf();
let cr = expand_crate_str(crate_str); let cr = expand_crate_str(crate_str);
// find the xx binding // find the xx binding
let mut name_finder = new_name_finder(Vec::new()); let mut name_finder = new_name_finder(Vec::new());
@ -1303,7 +1306,8 @@ foo_module!()
#[test] #[test]
fn pat_idents(){ fn pat_idents(){
let pat = string_to_pat("(a,Foo{x:c @ (b,9),y:Bar(4,d)})".to_owned()); let pat = string_to_pat(
"(a,Foo{x:c @ (b,9),y:Bar(4,d)})".to_strbuf());
let mut pat_idents = new_name_finder(Vec::new()); let mut pat_idents = new_name_finder(Vec::new());
pat_idents.visit_pat(pat, ()); pat_idents.visit_pat(pat, ());
assert_eq!(pat_idents.ident_accumulator, assert_eq!(pat_idents.ident_accumulator,

View file

@ -23,14 +23,14 @@ use collections::{HashMap, HashSet};
#[deriving(Eq)] #[deriving(Eq)]
enum ArgumentType { enum ArgumentType {
Known(~str), Known(StrBuf),
Unsigned, Unsigned,
String, String,
} }
enum Position { enum Position {
Exact(uint), Exact(uint),
Named(~str), Named(StrBuf),
} }
struct Context<'a, 'b> { struct Context<'a, 'b> {
@ -45,13 +45,13 @@ struct Context<'a, 'b> {
// Note that we keep a side-array of the ordering of the named arguments // Note that we keep a side-array of the ordering of the named arguments
// found to be sure that we can translate them in the same order that they // found to be sure that we can translate them in the same order that they
// were declared in. // were declared in.
names: HashMap<~str, @ast::Expr>, names: HashMap<StrBuf, @ast::Expr>,
name_types: HashMap<~str, ArgumentType>, name_types: HashMap<StrBuf, ArgumentType>,
name_ordering: Vec<~str>, name_ordering: Vec<StrBuf>,
// Collection of the compiled `rt::Piece` structures // Collection of the compiled `rt::Piece` structures
pieces: Vec<@ast::Expr> , pieces: Vec<@ast::Expr> ,
name_positions: HashMap<~str, uint>, name_positions: HashMap<StrBuf, uint>,
method_statics: Vec<@ast::Item> , method_statics: Vec<@ast::Item> ,
// Updated as arguments are consumed or methods are entered // Updated as arguments are consumed or methods are entered
@ -68,10 +68,10 @@ struct Context<'a, 'b> {
/// Some((fmtstr, unnamed arguments, ordering of named arguments, /// Some((fmtstr, unnamed arguments, ordering of named arguments,
/// named arguments)) /// named arguments))
fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
-> (@ast::Expr, Option<(@ast::Expr, Vec<@ast::Expr>, Vec<~str>, -> (@ast::Expr, Option<(@ast::Expr, Vec<@ast::Expr>, Vec<StrBuf>,
HashMap<~str, @ast::Expr>)>) { HashMap<StrBuf, @ast::Expr>)>) {
let mut args = Vec::new(); let mut args = Vec::new();
let mut names = HashMap::<~str, @ast::Expr>::new(); let mut names = HashMap::<StrBuf, @ast::Expr>::new();
let mut order = Vec::new(); let mut order = Vec::new();
let mut p = rsparse::new_parser_from_tts(ecx.parse_sess(), let mut p = rsparse::new_parser_from_tts(ecx.parse_sess(),
@ -131,8 +131,8 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
continue continue
} }
} }
order.push(name.to_str()); order.push(name.to_strbuf());
names.insert(name.to_str(), e); names.insert(name.to_strbuf(), e);
} else { } else {
args.push(p.parse_expr()); args.push(p.parse_expr());
} }
@ -171,13 +171,13 @@ impl<'a, 'b> Context<'a, 'b> {
Exact(i) Exact(i)
} }
parse::ArgumentIs(i) => Exact(i), parse::ArgumentIs(i) => Exact(i),
parse::ArgumentNamed(s) => Named(s.to_str()), parse::ArgumentNamed(s) => Named(s.to_strbuf()),
}; };
// and finally the method being applied // and finally the method being applied
match arg.method { match arg.method {
None => { None => {
let ty = Known(arg.format.ty.to_str()); let ty = Known(arg.format.ty.to_strbuf());
self.verify_arg_type(pos, ty); self.verify_arg_type(pos, ty);
} }
Some(ref method) => { self.verify_method(pos, *method); } Some(ref method) => { self.verify_method(pos, *method); }
@ -199,7 +199,7 @@ impl<'a, 'b> Context<'a, 'b> {
self.verify_arg_type(Exact(i), Unsigned); self.verify_arg_type(Exact(i), Unsigned);
} }
parse::CountIsName(s) => { parse::CountIsName(s) => {
self.verify_arg_type(Named(s.to_str()), Unsigned); self.verify_arg_type(Named(s.to_strbuf()), Unsigned);
} }
parse::CountIsNextParam => { parse::CountIsNextParam => {
if self.check_positional_ok() { if self.check_positional_ok() {
@ -822,8 +822,8 @@ pub fn expand_args(ecx: &mut ExtCtxt, sp: Span,
pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, sp: Span, pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, sp: Span,
extra: @ast::Expr, extra: @ast::Expr,
efmt: @ast::Expr, args: Vec<@ast::Expr>, efmt: @ast::Expr, args: Vec<@ast::Expr>,
name_ordering: Vec<~str>, name_ordering: Vec<StrBuf>,
names: HashMap<~str, @ast::Expr>) -> @ast::Expr { names: HashMap<StrBuf, @ast::Expr>) -> @ast::Expr {
let arg_types = Vec::from_fn(args.len(), |_| None); let arg_types = Vec::from_fn(args.len(), |_| None);
let mut cx = Context { let mut cx = Context {
ecx: ecx, ecx: ecx,

View file

@ -55,7 +55,7 @@ pub mod rt {
trait ToSource : ToTokens { trait ToSource : ToTokens {
// Takes a thing and generates a string containing rust code for it. // Takes a thing and generates a string containing rust code for it.
pub fn to_source() -> ~str; pub fn to_source() -> StrBuf;
// If you can make source, you can definitely make tokens. // If you can make source, you can definitely make tokens.
pub fn to_tokens(cx: &ExtCtxt) -> ~[TokenTree] { pub fn to_tokens(cx: &ExtCtxt) -> ~[TokenTree] {
@ -67,59 +67,67 @@ pub mod rt {
pub trait ToSource { pub trait ToSource {
// Takes a thing and generates a string containing rust code for it. // Takes a thing and generates a string containing rust code for it.
fn to_source(&self) -> ~str; fn to_source(&self) -> StrBuf;
} }
impl ToSource for ast::Ident { impl ToSource for ast::Ident {
fn to_source(&self) -> ~str { fn to_source(&self) -> StrBuf {
get_ident(*self).get().to_str() get_ident(*self).get().to_strbuf()
} }
} }
impl ToSource for @ast::Item { impl ToSource for @ast::Item {
fn to_source(&self) -> ~str { fn to_source(&self) -> StrBuf {
pprust::item_to_str(*self) pprust::item_to_str(*self)
} }
} }
impl<'a> ToSource for &'a [@ast::Item] { impl<'a> ToSource for &'a [@ast::Item] {
fn to_source(&self) -> ~str { fn to_source(&self) -> StrBuf {
self.iter().map(|i| i.to_source()).collect::<Vec<~str>>().connect("\n\n") self.iter()
.map(|i| i.to_source())
.collect::<Vec<StrBuf>>()
.connect("\n\n")
.to_strbuf()
} }
} }
impl ToSource for ast::Ty { impl ToSource for ast::Ty {
fn to_source(&self) -> ~str { fn to_source(&self) -> StrBuf {
pprust::ty_to_str(self) pprust::ty_to_str(self)
} }
} }
impl<'a> ToSource for &'a [ast::Ty] { impl<'a> ToSource for &'a [ast::Ty] {
fn to_source(&self) -> ~str { fn to_source(&self) -> StrBuf {
self.iter().map(|i| i.to_source()).collect::<Vec<~str>>().connect(", ") self.iter()
.map(|i| i.to_source())
.collect::<Vec<StrBuf>>()
.connect(", ")
.to_strbuf()
} }
} }
impl ToSource for Generics { impl ToSource for Generics {
fn to_source(&self) -> ~str { fn to_source(&self) -> StrBuf {
pprust::generics_to_str(self) pprust::generics_to_str(self)
} }
} }
impl ToSource for @ast::Expr { impl ToSource for @ast::Expr {
fn to_source(&self) -> ~str { fn to_source(&self) -> StrBuf {
pprust::expr_to_str(*self) pprust::expr_to_str(*self)
} }
} }
impl ToSource for ast::Block { impl ToSource for ast::Block {
fn to_source(&self) -> ~str { fn to_source(&self) -> StrBuf {
pprust::block_to_str(self) pprust::block_to_str(self)
} }
} }
impl<'a> ToSource for &'a str { impl<'a> ToSource for &'a str {
fn to_source(&self) -> ~str { fn to_source(&self) -> StrBuf {
let lit = dummy_spanned(ast::LitStr( let lit = dummy_spanned(ast::LitStr(
token::intern_and_get_ident(*self), ast::CookedStr)); token::intern_and_get_ident(*self), ast::CookedStr));
pprust::lit_to_str(&lit) pprust::lit_to_str(&lit)
@ -127,41 +135,41 @@ pub mod rt {
} }
impl ToSource for () { impl ToSource for () {
fn to_source(&self) -> ~str { fn to_source(&self) -> StrBuf {
"()".to_owned() "()".to_strbuf()
} }
} }
impl ToSource for bool { impl ToSource for bool {
fn to_source(&self) -> ~str { fn to_source(&self) -> StrBuf {
let lit = dummy_spanned(ast::LitBool(*self)); let lit = dummy_spanned(ast::LitBool(*self));
pprust::lit_to_str(&lit) pprust::lit_to_str(&lit)
} }
} }
impl ToSource for char { impl ToSource for char {
fn to_source(&self) -> ~str { fn to_source(&self) -> StrBuf {
let lit = dummy_spanned(ast::LitChar(*self)); let lit = dummy_spanned(ast::LitChar(*self));
pprust::lit_to_str(&lit) pprust::lit_to_str(&lit)
} }
} }
impl ToSource for int { impl ToSource for int {
fn to_source(&self) -> ~str { fn to_source(&self) -> StrBuf {
let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI)); let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI));
pprust::lit_to_str(&lit) pprust::lit_to_str(&lit)
} }
} }
impl ToSource for i8 { impl ToSource for i8 {
fn to_source(&self) -> ~str { fn to_source(&self) -> StrBuf {
let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI8)); let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI8));
pprust::lit_to_str(&lit) pprust::lit_to_str(&lit)
} }
} }
impl ToSource for i16 { impl ToSource for i16 {
fn to_source(&self) -> ~str { fn to_source(&self) -> StrBuf {
let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI16)); let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI16));
pprust::lit_to_str(&lit) pprust::lit_to_str(&lit)
} }
@ -169,49 +177,49 @@ pub mod rt {
impl ToSource for i32 { impl ToSource for i32 {
fn to_source(&self) -> ~str { fn to_source(&self) -> StrBuf {
let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI32)); let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI32));
pprust::lit_to_str(&lit) pprust::lit_to_str(&lit)
} }
} }
impl ToSource for i64 { impl ToSource for i64 {
fn to_source(&self) -> ~str { fn to_source(&self) -> StrBuf {
let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI64)); let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI64));
pprust::lit_to_str(&lit) pprust::lit_to_str(&lit)
} }
} }
impl ToSource for uint { impl ToSource for uint {
fn to_source(&self) -> ~str { fn to_source(&self) -> StrBuf {
let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU)); let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU));
pprust::lit_to_str(&lit) pprust::lit_to_str(&lit)
} }
} }
impl ToSource for u8 { impl ToSource for u8 {
fn to_source(&self) -> ~str { fn to_source(&self) -> StrBuf {
let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU8)); let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU8));
pprust::lit_to_str(&lit) pprust::lit_to_str(&lit)
} }
} }
impl ToSource for u16 { impl ToSource for u16 {
fn to_source(&self) -> ~str { fn to_source(&self) -> StrBuf {
let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU16)); let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU16));
pprust::lit_to_str(&lit) pprust::lit_to_str(&lit)
} }
} }
impl ToSource for u32 { impl ToSource for u32 {
fn to_source(&self) -> ~str { fn to_source(&self) -> StrBuf {
let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU32)); let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU32));
pprust::lit_to_str(&lit) pprust::lit_to_str(&lit)
} }
} }
impl ToSource for u64 { impl ToSource for u64 {
fn to_source(&self) -> ~str { fn to_source(&self) -> StrBuf {
let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU64)); let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU64));
pprust::lit_to_str(&lit) pprust::lit_to_str(&lit)
} }
@ -263,17 +271,17 @@ pub mod rt {
impl_to_tokens!(u64) impl_to_tokens!(u64)
pub trait ExtParseUtils { pub trait ExtParseUtils {
fn parse_item(&self, s: ~str) -> @ast::Item; fn parse_item(&self, s: StrBuf) -> @ast::Item;
fn parse_expr(&self, s: ~str) -> @ast::Expr; fn parse_expr(&self, s: StrBuf) -> @ast::Expr;
fn parse_stmt(&self, s: ~str) -> @ast::Stmt; fn parse_stmt(&self, s: StrBuf) -> @ast::Stmt;
fn parse_tts(&self, s: ~str) -> Vec<ast::TokenTree> ; fn parse_tts(&self, s: StrBuf) -> Vec<ast::TokenTree> ;
} }
impl<'a> ExtParseUtils for ExtCtxt<'a> { impl<'a> ExtParseUtils for ExtCtxt<'a> {
fn parse_item(&self, s: ~str) -> @ast::Item { fn parse_item(&self, s: StrBuf) -> @ast::Item {
let res = parse::parse_item_from_source_str( let res = parse::parse_item_from_source_str(
"<quote expansion>".to_str(), "<quote expansion>".to_strbuf(),
s, s,
self.cfg(), self.cfg(),
self.parse_sess()); self.parse_sess());
@ -286,23 +294,23 @@ pub mod rt {
} }
} }
fn parse_stmt(&self, s: ~str) -> @ast::Stmt { fn parse_stmt(&self, s: StrBuf) -> @ast::Stmt {
parse::parse_stmt_from_source_str("<quote expansion>".to_str(), parse::parse_stmt_from_source_str("<quote expansion>".to_strbuf(),
s, s,
self.cfg(), self.cfg(),
Vec::new(), Vec::new(),
self.parse_sess()) self.parse_sess())
} }
fn parse_expr(&self, s: ~str) -> @ast::Expr { fn parse_expr(&self, s: StrBuf) -> @ast::Expr {
parse::parse_expr_from_source_str("<quote expansion>".to_str(), parse::parse_expr_from_source_str("<quote expansion>".to_strbuf(),
s, s,
self.cfg(), self.cfg(),
self.parse_sess()) self.parse_sess())
} }
fn parse_tts(&self, s: ~str) -> Vec<ast::TokenTree> { fn parse_tts(&self, s: StrBuf) -> Vec<ast::TokenTree> {
parse::parse_tts_from_source_str("<quote expansion>".to_str(), parse::parse_tts_from_source_str("<quote expansion>".to_strbuf(),
s, s,
self.cfg(), self.cfg(),
self.parse_sess()) self.parse_sess())
@ -367,8 +375,8 @@ pub fn expand_quote_stmt(cx: &mut ExtCtxt,
base::MacExpr::new(expanded) base::MacExpr::new(expanded)
} }
fn ids_ext(strs: Vec<~str> ) -> Vec<ast::Ident> { fn ids_ext(strs: Vec<StrBuf> ) -> Vec<ast::Ident> {
strs.iter().map(|str| str_to_ident(*str)).collect() strs.iter().map(|str| str_to_ident((*str).as_slice())).collect()
} }
fn id_ext(str: &str) -> ast::Ident { fn id_ext(str: &str) -> ast::Ident {
@ -678,11 +686,11 @@ fn expand_wrapper(cx: &ExtCtxt,
sp: Span, sp: Span,
cx_expr: @ast::Expr, cx_expr: @ast::Expr,
expr: @ast::Expr) -> @ast::Expr { expr: @ast::Expr) -> @ast::Expr {
let uses = vec!( cx.view_use_glob(sp, ast::Inherited, let uses = vec![ cx.view_use_glob(sp, ast::Inherited,
ids_ext(vec!("syntax".to_owned(), ids_ext(vec!["syntax".to_strbuf(),
"ext".to_owned(), "ext".to_strbuf(),
"quote".to_owned(), "quote".to_strbuf(),
"rt".to_owned()))) ); "rt".to_strbuf()])) ];
let stmt_let_ext_cx = cx.stmt_let(sp, false, id_ext("ext_cx"), cx_expr); let stmt_let_ext_cx = cx.stmt_let(sp, false, id_ext("ext_cx"), cx_expr);

View file

@ -57,14 +57,15 @@ pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
let topmost = topmost_expn_info(cx.backtrace().unwrap()); let topmost = topmost_expn_info(cx.backtrace().unwrap());
let loc = cx.codemap().lookup_char_pos(topmost.call_site.lo); let loc = cx.codemap().lookup_char_pos(topmost.call_site.lo);
let filename = token::intern_and_get_ident(loc.file.name); let filename = token::intern_and_get_ident(loc.file.name.as_slice());
base::MacExpr::new(cx.expr_str(topmost.call_site, filename)) base::MacExpr::new(cx.expr_str(topmost.call_site, filename))
} }
pub fn expand_stringify(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) pub fn expand_stringify(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
-> Box<base::MacResult> { -> Box<base::MacResult> {
let s = pprust::tts_to_str(tts); let s = pprust::tts_to_str(tts);
base::MacExpr::new(cx.expr_str(sp, token::intern_and_get_ident(s))) base::MacExpr::new(cx.expr_str(sp,
token::intern_and_get_ident(s.as_slice())))
} }
pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
@ -72,8 +73,8 @@ pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
base::check_zero_tts(cx, sp, tts, "module_path!"); base::check_zero_tts(cx, sp, tts, "module_path!");
let string = cx.mod_path() let string = cx.mod_path()
.iter() .iter()
.map(|x| token::get_ident(*x).get().to_str()) .map(|x| token::get_ident(*x).get().to_strbuf())
.collect::<Vec<~str>>() .collect::<Vec<StrBuf>>()
.connect("::"); .connect("::");
base::MacExpr::new(cx.expr_str(sp, token::intern_and_get_ident(string))) base::MacExpr::new(cx.expr_str(sp, token::intern_and_get_ident(string)))
} }
@ -117,9 +118,9 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
Some(src) => { Some(src) => {
// Add this input file to the code map to make it available as // Add this input file to the code map to make it available as
// dependency information // dependency information
let filename = file.display().to_str(); let filename = file.display().to_str().to_strbuf();
let interned = token::intern_and_get_ident(src); let interned = token::intern_and_get_ident(src);
cx.codemap().new_filemap(filename, src.to_owned()); cx.codemap().new_filemap(filename, src.to_strbuf());
base::MacExpr::new(cx.expr_str(sp, interned)) base::MacExpr::new(cx.expr_str(sp, interned))
} }
@ -161,7 +162,7 @@ fn topmost_expn_info(expn_info: @codemap::ExpnInfo) -> @codemap::ExpnInfo {
.. ..
} => { } => {
// Don't recurse into file using "include!" // Don't recurse into file using "include!"
if "include" == *name { if "include" == name.as_slice() {
expn_info expn_info
} else { } else {
topmost_expn_info(next_expn_info) topmost_expn_info(next_expn_info)

View file

@ -201,8 +201,8 @@ pub fn nameize(p_s: &ParseSess, ms: &[Matcher], res: &[Rc<NamedMatch>])
pub enum ParseResult { pub enum ParseResult {
Success(HashMap<Ident, Rc<NamedMatch>>), Success(HashMap<Ident, Rc<NamedMatch>>),
Failure(codemap::Span, ~str), Failure(codemap::Span, StrBuf),
Error(codemap::Span, ~str) Error(codemap::Span, StrBuf)
} }
pub fn parse_or_else(sess: &ParseSess, pub fn parse_or_else(sess: &ParseSess,
@ -212,8 +212,12 @@ pub fn parse_or_else(sess: &ParseSess,
-> HashMap<Ident, Rc<NamedMatch>> { -> HashMap<Ident, Rc<NamedMatch>> {
match parse(sess, cfg, rdr, ms.as_slice()) { match parse(sess, cfg, rdr, ms.as_slice()) {
Success(m) => m, Success(m) => m,
Failure(sp, str) => sess.span_diagnostic.span_fatal(sp, str), Failure(sp, str) => {
Error(sp, str) => sess.span_diagnostic.span_fatal(sp, str) sess.span_diagnostic.span_fatal(sp, str.as_slice())
}
Error(sp, str) => {
sess.span_diagnostic.span_fatal(sp, str.as_slice())
}
} }
} }
@ -366,9 +370,9 @@ pub fn parse(sess: &ParseSess,
} }
return Success(nameize(sess, ms, v.as_slice())); return Success(nameize(sess, ms, v.as_slice()));
} else if eof_eis.len() > 1u { } else if eof_eis.len() > 1u {
return Error(sp, "ambiguity: multiple successful parses".to_owned()); return Error(sp, "ambiguity: multiple successful parses".to_strbuf());
} else { } else {
return Failure(sp, "unexpected end of macro invocation".to_owned()); return Failure(sp, "unexpected end of macro invocation".to_strbuf());
} }
} else { } else {
if (bb_eis.len() > 0u && next_eis.len() > 0u) if (bb_eis.len() > 0u && next_eis.len() > 0u)
@ -376,19 +380,19 @@ pub fn parse(sess: &ParseSess,
let nts = bb_eis.iter().map(|ei| { let nts = bb_eis.iter().map(|ei| {
match ei.elts.get(ei.idx).node { match ei.elts.get(ei.idx).node {
MatchNonterminal(bind, name, _) => { MatchNonterminal(bind, name, _) => {
format!("{} ('{}')", (format!("{} ('{}')",
token::get_ident(name), token::get_ident(name),
token::get_ident(bind)) token::get_ident(bind))).to_strbuf()
} }
_ => fail!() _ => fail!()
} }).collect::<Vec<~str>>().connect(" or "); } }).collect::<Vec<StrBuf>>().connect(" or ");
return Error(sp, format!( return Error(sp, format!(
"local ambiguity: multiple parsing options: \ "local ambiguity: multiple parsing options: \
built-in NTs {} or {} other options.", built-in NTs {} or {} other options.",
nts, next_eis.len())); nts, next_eis.len()).to_strbuf());
} else if bb_eis.len() == 0u && next_eis.len() == 0u { } else if bb_eis.len() == 0u && next_eis.len() == 0u {
return Failure(sp, format!("no rules expected the token `{}`", return Failure(sp, format!("no rules expected the token `{}`",
token::to_str(&tok))); token::to_str(&tok)).to_strbuf());
} else if next_eis.len() > 0u { } else if next_eis.len() > 0u {
/* Now process the next token */ /* Now process the next token */
while next_eis.len() > 0u { while next_eis.len() > 0u {
@ -436,7 +440,8 @@ pub fn parse_nt(p: &mut Parser, name: &str) -> Nonterminal {
token::IDENT(sn,b) => { p.bump(); token::NtIdent(box sn,b) } token::IDENT(sn,b) => { p.bump(); token::NtIdent(box sn,b) }
_ => { _ => {
let token_str = token::to_str(&p.token); let token_str = token::to_str(&p.token);
p.fatal("expected ident, found ".to_owned() + token_str) p.fatal((format!("expected ident, found {}",
token_str.as_slice())).as_slice())
} }
}, },
"path" => { "path" => {

View file

@ -132,7 +132,7 @@ fn generic_extension(cx: &ExtCtxt,
// Which arm's failure should we report? (the one furthest along) // Which arm's failure should we report? (the one furthest along)
let mut best_fail_spot = DUMMY_SP; let mut best_fail_spot = DUMMY_SP;
let mut best_fail_msg = "internal error: ran no matchers".to_owned(); let mut best_fail_msg = "internal error: ran no matchers".to_strbuf();
for (i, lhs) in lhses.iter().enumerate() { // try each arm's matchers for (i, lhs) in lhses.iter().enumerate() { // try each arm's matchers
match **lhs { match **lhs {
@ -177,13 +177,13 @@ fn generic_extension(cx: &ExtCtxt,
best_fail_spot = sp; best_fail_spot = sp;
best_fail_msg = (*msg).clone(); best_fail_msg = (*msg).clone();
}, },
Error(sp, ref msg) => cx.span_fatal(sp, (*msg)) Error(sp, ref msg) => cx.span_fatal(sp, msg.as_slice())
} }
} }
_ => cx.bug("non-matcher found in parsed lhses") _ => cx.bug("non-matcher found in parsed lhses")
} }
} }
cx.span_fatal(best_fail_spot, best_fail_msg); cx.span_fatal(best_fail_spot, best_fail_msg.as_slice());
} }
// this procedure performs the expansion of the // this procedure performs the expansion of the
@ -247,7 +247,7 @@ pub fn add_new_extension(cx: &mut ExtCtxt,
box MacroRulesDefiner { box MacroRulesDefiner {
def: RefCell::new(Some(MacroDef { def: RefCell::new(Some(MacroDef {
name: token::get_ident(name).to_str(), name: token::get_ident(name).to_str().to_strbuf(),
ext: NormalTT(exp, Some(sp)) ext: NormalTT(exp, Some(sp))
})) }))
} as Box<MacResult> } as Box<MacResult>

View file

@ -100,7 +100,7 @@ fn lookup_cur_matched(r: &TtReader, name: Ident) -> Rc<NamedMatch> {
enum LockstepIterSize { enum LockstepIterSize {
LisUnconstrained, LisUnconstrained,
LisConstraint(uint, Ident), LisConstraint(uint, Ident),
LisContradiction(~str), LisContradiction(StrBuf),
} }
fn lis_merge(lhs: LockstepIterSize, rhs: LockstepIterSize) -> LockstepIterSize { fn lis_merge(lhs: LockstepIterSize, rhs: LockstepIterSize) -> LockstepIterSize {
@ -116,7 +116,7 @@ fn lis_merge(lhs: LockstepIterSize, rhs: LockstepIterSize) -> LockstepIterSize {
let r_n = token::get_ident(r_id); let r_n = token::get_ident(r_id);
LisContradiction(format!("inconsistent lockstep iteration: \ LisContradiction(format!("inconsistent lockstep iteration: \
'{}' has {} items, but '{}' has {}", '{}' has {} items, but '{}' has {}",
l_n, l_len, r_n, r_len)) l_n, l_len, r_n, r_len).to_strbuf())
} }
} }
} }
@ -223,7 +223,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
} }
LisContradiction(ref msg) => { LisContradiction(ref msg) => {
// FIXME #2887 blame macro invoker instead // FIXME #2887 blame macro invoker instead
r.sp_diag.span_fatal(sp.clone(), *msg); r.sp_diag.span_fatal(sp.clone(), msg.as_slice());
} }
LisConstraint(len, _) => { LisConstraint(len, _) => {
if len == 0 { if len == 0 {

View file

@ -949,7 +949,7 @@ mod test {
let pred_val = $pred; let pred_val = $pred;
let a_val = $a; let a_val = $a;
let b_val = $b; let b_val = $b;
if !(pred_val(a_val,b_val)) { if !(pred_val(a_val.as_slice(),b_val.as_slice())) {
fail!("expected args satisfying {}, got {:?} and {:?}", fail!("expected args satisfying {}, got {:?} and {:?}",
$predname, a_val, b_val); $predname, a_val, b_val);
} }
@ -961,12 +961,13 @@ mod test {
#[test] fn ident_transformation () { #[test] fn ident_transformation () {
let mut zz_fold = ToZzIdentFolder; let mut zz_fold = ToZzIdentFolder;
let ast = string_to_crate( let ast = string_to_crate(
"#[a] mod b {fn c (d : e, f : g) {h!(i,j,k);l;m}}".to_owned()); "#[a] mod b {fn c (d : e, f : g) {h!(i,j,k);l;m}}".to_strbuf());
let folded_crate = zz_fold.fold_crate(ast); let folded_crate = zz_fold.fold_crate(ast);
assert_pred!(matches_codepattern, assert_pred!(
matches_codepattern,
"matches_codepattern", "matches_codepattern",
pprust::to_str(|s| fake_print_crate(s, &folded_crate)), pprust::to_str(|s| fake_print_crate(s, &folded_crate)),
"#[a]mod zz{fn zz(zz:zz,zz:zz){zz!(zz,zz,zz);zz;zz}}".to_owned()); "#[a]mod zz{fn zz(zz:zz,zz:zz){zz!(zz,zz,zz);zz;zz}}".to_strbuf());
} }
// even inside macro defs.... // even inside macro defs....
@ -974,11 +975,12 @@ mod test {
let mut zz_fold = ToZzIdentFolder; let mut zz_fold = ToZzIdentFolder;
let ast = string_to_crate( let ast = string_to_crate(
"macro_rules! a {(b $c:expr $(d $e:token)f+ => \ "macro_rules! a {(b $c:expr $(d $e:token)f+ => \
(g $(d $d $e)+))} ".to_owned()); (g $(d $d $e)+))} ".to_strbuf());
let folded_crate = zz_fold.fold_crate(ast); let folded_crate = zz_fold.fold_crate(ast);
assert_pred!(matches_codepattern, assert_pred!(
matches_codepattern,
"matches_codepattern", "matches_codepattern",
pprust::to_str(|s| fake_print_crate(s, &folded_crate)), pprust::to_str(|s| fake_print_crate(s, &folded_crate)),
"zz!zz((zz$zz:zz$(zz $zz:zz)zz+=>(zz$(zz$zz$zz)+)))".to_owned()); "zz!zz((zz$zz:zz$(zz $zz:zz)zz+=>(zz$(zz$zz$zz)+)))".to_strbuf());
} }
} }

View file

@ -33,7 +33,7 @@ pub enum CommentStyle {
#[deriving(Clone)] #[deriving(Clone)]
pub struct Comment { pub struct Comment {
pub style: CommentStyle, pub style: CommentStyle,
pub lines: Vec<~str>, pub lines: Vec<StrBuf>,
pub pos: BytePos, pub pos: BytePos,
} }
@ -53,35 +53,40 @@ pub fn doc_comment_style(comment: &str) -> ast::AttrStyle {
} }
} }
pub fn strip_doc_comment_decoration(comment: &str) -> ~str { pub fn strip_doc_comment_decoration(comment: &str) -> StrBuf {
/// remove whitespace-only lines from the start/end of lines /// remove whitespace-only lines from the start/end of lines
fn vertical_trim(lines: Vec<~str> ) -> Vec<~str> { fn vertical_trim(lines: Vec<StrBuf> ) -> Vec<StrBuf> {
let mut i = 0u; let mut i = 0u;
let mut j = lines.len(); let mut j = lines.len();
// first line of all-stars should be omitted // first line of all-stars should be omitted
if lines.len() > 0 && lines.get(0).chars().all(|c| c == '*') { if lines.len() > 0 &&
lines.get(0).as_slice().chars().all(|c| c == '*') {
i += 1; i += 1;
} }
while i < j && lines.get(i).trim().is_empty() { while i < j && lines.get(i).as_slice().trim().is_empty() {
i += 1; i += 1;
} }
// like the first, a last line of all stars should be omitted // like the first, a last line of all stars should be omitted
if j > i && lines.get(j - 1).chars().skip(1).all(|c| c == '*') { if j > i && lines.get(j - 1)
.as_slice()
.chars()
.skip(1)
.all(|c| c == '*') {
j -= 1; j -= 1;
} }
while j > i && lines.get(j - 1).trim().is_empty() { while j > i && lines.get(j - 1).as_slice().trim().is_empty() {
j -= 1; j -= 1;
} }
return lines.slice(i, j).iter().map(|x| (*x).clone()).collect(); return lines.slice(i, j).iter().map(|x| (*x).clone()).collect();
} }
/// remove a "[ \t]*\*" block from each line, if possible /// remove a "[ \t]*\*" block from each line, if possible
fn horizontal_trim(lines: Vec<~str> ) -> Vec<~str> { fn horizontal_trim(lines: Vec<StrBuf> ) -> Vec<StrBuf> {
let mut i = uint::MAX; let mut i = uint::MAX;
let mut can_trim = true; let mut can_trim = true;
let mut first = true; let mut first = true;
for line in lines.iter() { for line in lines.iter() {
for (j, c) in line.chars().enumerate() { for (j, c) in line.as_slice().chars().enumerate() {
if j > i || !"* \t".contains_char(c) { if j > i || !"* \t".contains_char(c) {
can_trim = false; can_trim = false;
break; break;
@ -105,7 +110,9 @@ pub fn strip_doc_comment_decoration(comment: &str) -> ~str {
} }
if can_trim { if can_trim {
lines.iter().map(|line| line.slice(i + 1, line.len()).to_owned()).collect() lines.iter().map(|line| {
line.as_slice().slice(i + 1, line.len()).to_strbuf()
}).collect()
} else { } else {
lines lines
} }
@ -115,39 +122,41 @@ pub fn strip_doc_comment_decoration(comment: &str) -> ~str {
static ONLINERS: &'static [&'static str] = &["///!", "///", "//!", "//"]; static ONLINERS: &'static [&'static str] = &["///!", "///", "//!", "//"];
for prefix in ONLINERS.iter() { for prefix in ONLINERS.iter() {
if comment.starts_with(*prefix) { if comment.starts_with(*prefix) {
return comment.slice_from(prefix.len()).to_owned(); return comment.slice_from(prefix.len()).to_strbuf();
} }
} }
if comment.starts_with("/*") { if comment.starts_with("/*") {
let lines = comment.slice(3u, comment.len() - 2u) let lines = comment.slice(3u, comment.len() - 2u)
.lines_any() .lines_any()
.map(|s| s.to_owned()) .map(|s| s.to_strbuf())
.collect::<Vec<~str> >(); .collect::<Vec<StrBuf> >();
let lines = vertical_trim(lines); let lines = vertical_trim(lines);
let lines = horizontal_trim(lines); let lines = horizontal_trim(lines);
return lines.connect("\n"); return lines.connect("\n").to_strbuf();
} }
fail!("not a doc-comment: {}", comment); fail!("not a doc-comment: {}", comment);
} }
fn read_to_eol(rdr: &mut StringReader) -> ~str { fn read_to_eol(rdr: &mut StringReader) -> StrBuf {
let mut val = StrBuf::new(); let mut val = StrBuf::new();
while !rdr.curr_is('\n') && !is_eof(rdr) { while !rdr.curr_is('\n') && !is_eof(rdr) {
val.push_char(rdr.curr.unwrap()); val.push_char(rdr.curr.unwrap());
bump(rdr); bump(rdr);
} }
if rdr.curr_is('\n') { bump(rdr); } if rdr.curr_is('\n') { bump(rdr); }
return val.into_owned(); return val
} }
fn read_one_line_comment(rdr: &mut StringReader) -> ~str { fn read_one_line_comment(rdr: &mut StringReader) -> StrBuf {
let val = read_to_eol(rdr); let val = read_to_eol(rdr);
assert!((val[0] == '/' as u8 && val[1] == '/' as u8) || assert!((val.as_slice()[0] == '/' as u8 &&
(val[0] == '#' as u8 && val[1] == '!' as u8)); val.as_slice()[1] == '/' as u8) ||
(val.as_slice()[0] == '#' as u8 &&
val.as_slice()[1] == '!' as u8));
return val; return val;
} }
@ -193,11 +202,12 @@ fn read_line_comments(rdr: &mut StringReader, code_to_the_left: bool,
comments: &mut Vec<Comment>) { comments: &mut Vec<Comment>) {
debug!(">>> line comments"); debug!(">>> line comments");
let p = rdr.last_pos; let p = rdr.last_pos;
let mut lines: Vec<~str> = Vec::new(); let mut lines: Vec<StrBuf> = Vec::new();
while rdr.curr_is('/') && nextch_is(rdr, '/') { while rdr.curr_is('/') && nextch_is(rdr, '/') {
let line = read_one_line_comment(rdr); let line = read_one_line_comment(rdr);
debug!("{}", line); debug!("{}", line);
if is_doc_comment(line) { // doc-comments are not put in comments // Doc comments are not put in comments.
if is_doc_comment(line.as_slice()) {
break; break;
} }
lines.push(line); lines.push(line);
@ -231,14 +241,16 @@ fn all_whitespace(s: &str, col: CharPos) -> Option<uint> {
return Some(cursor); return Some(cursor);
} }
fn trim_whitespace_prefix_and_push_line(lines: &mut Vec<~str> , fn trim_whitespace_prefix_and_push_line(lines: &mut Vec<StrBuf> ,
s: ~str, col: CharPos) { s: StrBuf, col: CharPos) {
let len = s.len(); let len = s.len();
let s1 = match all_whitespace(s, col) { let s1 = match all_whitespace(s.as_slice(), col) {
Some(col) => { Some(col) => {
if col < len { if col < len {
s.slice(col, len).to_owned() s.as_slice().slice(col, len).to_strbuf()
} else { "".to_owned() } } else {
"".to_strbuf()
}
} }
None => s, None => s,
}; };
@ -251,7 +263,7 @@ fn read_block_comment(rdr: &mut StringReader,
comments: &mut Vec<Comment> ) { comments: &mut Vec<Comment> ) {
debug!(">>> block comment"); debug!(">>> block comment");
let p = rdr.last_pos; let p = rdr.last_pos;
let mut lines: Vec<~str> = Vec::new(); let mut lines: Vec<StrBuf> = Vec::new();
let col = rdr.col; let col = rdr.col;
bump(rdr); bump(rdr);
bump(rdr); bump(rdr);
@ -273,17 +285,17 @@ fn read_block_comment(rdr: &mut StringReader,
return return
} }
assert!(!curr_line.as_slice().contains_char('\n')); assert!(!curr_line.as_slice().contains_char('\n'));
lines.push(curr_line.into_owned()); lines.push(curr_line);
} else { } else {
let mut level: int = 1; let mut level: int = 1;
while level > 0 { while level > 0 {
debug!("=== block comment level {}", level); debug!("=== block comment level {}", level);
if is_eof(rdr) { if is_eof(rdr) {
rdr.fatal("unterminated block comment".to_owned()); rdr.fatal("unterminated block comment".to_strbuf());
} }
if rdr.curr_is('\n') { if rdr.curr_is('\n') {
trim_whitespace_prefix_and_push_line(&mut lines, trim_whitespace_prefix_and_push_line(&mut lines,
curr_line.into_owned(), curr_line,
col); col);
curr_line = StrBuf::new(); curr_line = StrBuf::new();
bump(rdr); bump(rdr);
@ -306,7 +318,7 @@ fn read_block_comment(rdr: &mut StringReader,
} }
if curr_line.len() != 0 { if curr_line.len() != 0 {
trim_whitespace_prefix_and_push_line(&mut lines, trim_whitespace_prefix_and_push_line(&mut lines,
curr_line.into_owned(), curr_line,
col); col);
} }
} }
@ -344,7 +356,7 @@ fn consume_comment(rdr: &mut StringReader,
#[deriving(Clone)] #[deriving(Clone)]
pub struct Literal { pub struct Literal {
pub lit: ~str, pub lit: StrBuf,
pub pos: BytePos, pub pos: BytePos,
} }
@ -352,11 +364,11 @@ pub struct Literal {
// probably not a good thing. // probably not a good thing.
pub fn gather_comments_and_literals(span_diagnostic: pub fn gather_comments_and_literals(span_diagnostic:
&diagnostic::SpanHandler, &diagnostic::SpanHandler,
path: ~str, path: StrBuf,
srdr: &mut io::Reader) srdr: &mut io::Reader)
-> (Vec<Comment>, Vec<Literal>) { -> (Vec<Comment>, Vec<Literal>) {
let src = srdr.read_to_end().unwrap(); let src = srdr.read_to_end().unwrap();
let src = str::from_utf8(src.as_slice()).unwrap().to_owned(); let src = str::from_utf8(src.as_slice()).unwrap().to_strbuf();
let cm = CodeMap::new(); let cm = CodeMap::new();
let filemap = cm.new_filemap(path, src); let filemap = cm.new_filemap(path, src);
let mut rdr = lexer::new_low_level_string_reader(span_diagnostic, filemap); let mut rdr = lexer::new_low_level_string_reader(span_diagnostic, filemap);
@ -387,7 +399,7 @@ pub fn gather_comments_and_literals(span_diagnostic:
if token::is_lit(&tok) { if token::is_lit(&tok) {
with_str_from(&rdr, bstart, |s| { with_str_from(&rdr, bstart, |s| {
debug!("tok lit: {}", s); debug!("tok lit: {}", s);
literals.push(Literal {lit: s.to_owned(), pos: sp.lo}); literals.push(Literal {lit: s.to_strbuf(), pos: sp.lo});
}) })
} else { } else {
debug!("tok: {}", token::to_str(&tok)); debug!("tok: {}", token::to_str(&tok));
@ -405,41 +417,41 @@ mod test {
#[test] fn test_block_doc_comment_1() { #[test] fn test_block_doc_comment_1() {
let comment = "/**\n * Test \n ** Test\n * Test\n*/"; let comment = "/**\n * Test \n ** Test\n * Test\n*/";
let stripped = strip_doc_comment_decoration(comment); let stripped = strip_doc_comment_decoration(comment);
assert_eq!(stripped, " Test \n* Test\n Test".to_owned()); assert_eq!(stripped, " Test \n* Test\n Test".to_strbuf());
} }
#[test] fn test_block_doc_comment_2() { #[test] fn test_block_doc_comment_2() {
let comment = "/**\n * Test\n * Test\n*/"; let comment = "/**\n * Test\n * Test\n*/";
let stripped = strip_doc_comment_decoration(comment); let stripped = strip_doc_comment_decoration(comment);
assert_eq!(stripped, " Test\n Test".to_owned()); assert_eq!(stripped, " Test\n Test".to_strbuf());
} }
#[test] fn test_block_doc_comment_3() { #[test] fn test_block_doc_comment_3() {
let comment = "/**\n let a: *int;\n *a = 5;\n*/"; let comment = "/**\n let a: *int;\n *a = 5;\n*/";
let stripped = strip_doc_comment_decoration(comment); let stripped = strip_doc_comment_decoration(comment);
assert_eq!(stripped, " let a: *int;\n *a = 5;".to_owned()); assert_eq!(stripped, " let a: *int;\n *a = 5;".to_strbuf());
} }
#[test] fn test_block_doc_comment_4() { #[test] fn test_block_doc_comment_4() {
let comment = "/*******************\n test\n *********************/"; let comment = "/*******************\n test\n *********************/";
let stripped = strip_doc_comment_decoration(comment); let stripped = strip_doc_comment_decoration(comment);
assert_eq!(stripped, " test".to_owned()); assert_eq!(stripped, " test".to_strbuf());
} }
#[test] fn test_line_doc_comment() { #[test] fn test_line_doc_comment() {
let stripped = strip_doc_comment_decoration("/// test"); let stripped = strip_doc_comment_decoration("/// test");
assert_eq!(stripped, " test".to_owned()); assert_eq!(stripped, " test".to_strbuf());
let stripped = strip_doc_comment_decoration("///! test"); let stripped = strip_doc_comment_decoration("///! test");
assert_eq!(stripped, " test".to_owned()); assert_eq!(stripped, " test".to_strbuf());
let stripped = strip_doc_comment_decoration("// test"); let stripped = strip_doc_comment_decoration("// test");
assert_eq!(stripped, " test".to_owned()); assert_eq!(stripped, " test".to_strbuf());
let stripped = strip_doc_comment_decoration("// test"); let stripped = strip_doc_comment_decoration("// test");
assert_eq!(stripped, " test".to_owned()); assert_eq!(stripped, " test".to_strbuf());
let stripped = strip_doc_comment_decoration("///test"); let stripped = strip_doc_comment_decoration("///test");
assert_eq!(stripped, "test".to_owned()); assert_eq!(stripped, "test".to_strbuf());
let stripped = strip_doc_comment_decoration("///!test"); let stripped = strip_doc_comment_decoration("///!test");
assert_eq!(stripped, "test".to_owned()); assert_eq!(stripped, "test".to_strbuf());
let stripped = strip_doc_comment_decoration("//test"); let stripped = strip_doc_comment_decoration("//test");
assert_eq!(stripped, "test".to_owned()); assert_eq!(stripped, "test".to_strbuf());
} }
} }

View file

@ -28,7 +28,7 @@ pub use ext::tt::transcribe::{TtReader, new_tt_reader};
pub trait Reader { pub trait Reader {
fn is_eof(&self) -> bool; fn is_eof(&self) -> bool;
fn next_token(&mut self) -> TokenAndSpan; fn next_token(&mut self) -> TokenAndSpan;
fn fatal(&self, ~str) -> !; fn fatal(&self, StrBuf) -> !;
fn span_diag<'a>(&'a self) -> &'a SpanHandler; fn span_diag<'a>(&'a self) -> &'a SpanHandler;
fn peek(&self) -> TokenAndSpan; fn peek(&self) -> TokenAndSpan;
} }
@ -101,8 +101,8 @@ impl<'a> Reader for StringReader<'a> {
string_advance_token(self); string_advance_token(self);
ret_val ret_val
} }
fn fatal(&self, m: ~str) -> ! { fn fatal(&self, m: StrBuf) -> ! {
self.span_diagnostic.span_fatal(self.peek_span, m) self.span_diagnostic.span_fatal(self.peek_span, m.as_slice())
} }
fn span_diag<'a>(&'a self) -> &'a SpanHandler { self.span_diagnostic } fn span_diag<'a>(&'a self) -> &'a SpanHandler { self.span_diagnostic }
fn peek(&self) -> TokenAndSpan { fn peek(&self) -> TokenAndSpan {
@ -123,8 +123,8 @@ impl<'a> Reader for TtReader<'a> {
debug!("TtReader: r={:?}", r); debug!("TtReader: r={:?}", r);
r r
} }
fn fatal(&self, m: ~str) -> ! { fn fatal(&self, m: StrBuf) -> ! {
self.sp_diag.span_fatal(self.cur_span, m); self.sp_diag.span_fatal(self.cur_span, m.as_slice());
} }
fn span_diag<'a>(&'a self) -> &'a SpanHandler { self.sp_diag } fn span_diag<'a>(&'a self) -> &'a SpanHandler { self.sp_diag }
fn peek(&self) -> TokenAndSpan { fn peek(&self) -> TokenAndSpan {
@ -139,7 +139,7 @@ impl<'a> Reader for TtReader<'a> {
fn fatal_span(rdr: &mut StringReader, fn fatal_span(rdr: &mut StringReader,
from_pos: BytePos, from_pos: BytePos,
to_pos: BytePos, to_pos: BytePos,
m: ~str) m: StrBuf)
-> ! { -> ! {
rdr.peek_span = codemap::mk_sp(from_pos, to_pos); rdr.peek_span = codemap::mk_sp(from_pos, to_pos);
rdr.fatal(m); rdr.fatal(m);
@ -150,13 +150,13 @@ fn fatal_span(rdr: &mut StringReader,
fn fatal_span_char(rdr: &mut StringReader, fn fatal_span_char(rdr: &mut StringReader,
from_pos: BytePos, from_pos: BytePos,
to_pos: BytePos, to_pos: BytePos,
m: ~str, m: StrBuf,
c: char) c: char)
-> ! { -> ! {
let mut m = StrBuf::from_owned_str(m); let mut m = m;
m.push_str(": "); m.push_str(": ");
char::escape_default(c, |c| m.push_char(c)); char::escape_default(c, |c| m.push_char(c));
fatal_span(rdr, from_pos, to_pos, m.into_owned()); fatal_span(rdr, from_pos, to_pos, m.into_strbuf());
} }
// report a lexical error spanning [`from_pos`, `to_pos`), appending the // report a lexical error spanning [`from_pos`, `to_pos`), appending the
@ -164,14 +164,14 @@ fn fatal_span_char(rdr: &mut StringReader,
fn fatal_span_verbose(rdr: &mut StringReader, fn fatal_span_verbose(rdr: &mut StringReader,
from_pos: BytePos, from_pos: BytePos,
to_pos: BytePos, to_pos: BytePos,
m: ~str) m: StrBuf)
-> ! { -> ! {
let mut m = StrBuf::from_owned_str(m); let mut m = m;
m.push_str(": "); m.push_str(": ");
let from = byte_offset(rdr, from_pos).to_uint(); let from = byte_offset(rdr, from_pos).to_uint();
let to = byte_offset(rdr, to_pos).to_uint(); let to = byte_offset(rdr, to_pos).to_uint();
m.push_str(rdr.filemap.src.slice(from, to)); m.push_str(rdr.filemap.src.as_slice().slice(from, to));
fatal_span(rdr, from_pos, to_pos, m.into_owned()); fatal_span(rdr, from_pos, to_pos, m);
} }
// EFFECT: advance peek_tok and peek_span to refer to the next token. // EFFECT: advance peek_tok and peek_span to refer to the next token.
@ -218,7 +218,7 @@ fn with_str_from_to<T>(
end: BytePos, end: BytePos,
f: |s: &str| -> T) f: |s: &str| -> T)
-> T { -> T {
f(rdr.filemap.src.slice( f(rdr.filemap.src.as_slice().slice(
byte_offset(rdr, start).to_uint(), byte_offset(rdr, start).to_uint(),
byte_offset(rdr, end).to_uint())) byte_offset(rdr, end).to_uint()))
} }
@ -231,7 +231,10 @@ pub fn bump(rdr: &mut StringReader) {
if current_byte_offset < rdr.filemap.src.len() { if current_byte_offset < rdr.filemap.src.len() {
assert!(rdr.curr.is_some()); assert!(rdr.curr.is_some());
let last_char = rdr.curr.unwrap(); let last_char = rdr.curr.unwrap();
let next = rdr.filemap.src.char_range_at(current_byte_offset); let next = rdr.filemap
.src
.as_slice()
.char_range_at(current_byte_offset);
let byte_offset_diff = next.next - current_byte_offset; let byte_offset_diff = next.next - current_byte_offset;
rdr.pos = rdr.pos + Pos::from_uint(byte_offset_diff); rdr.pos = rdr.pos + Pos::from_uint(byte_offset_diff);
rdr.curr = Some(next.ch); rdr.curr = Some(next.ch);
@ -256,7 +259,7 @@ pub fn is_eof(rdr: &StringReader) -> bool {
pub fn nextch(rdr: &StringReader) -> Option<char> { pub fn nextch(rdr: &StringReader) -> Option<char> {
let offset = byte_offset(rdr, rdr.pos).to_uint(); let offset = byte_offset(rdr, rdr.pos).to_uint();
if offset < rdr.filemap.src.len() { if offset < rdr.filemap.src.len() {
Some(rdr.filemap.src.char_at(offset)) Some(rdr.filemap.src.as_slice().char_at(offset))
} else { } else {
None None
} }
@ -400,9 +403,9 @@ fn consume_block_comment(rdr: &mut StringReader) -> Option<TokenAndSpan> {
while level > 0 { while level > 0 {
if is_eof(rdr) { if is_eof(rdr) {
let msg = if is_doc_comment { let msg = if is_doc_comment {
"unterminated block doc-comment".to_owned() "unterminated block doc-comment".to_strbuf()
} else { } else {
"unterminated block comment".to_owned() "unterminated block comment".to_strbuf()
}; };
fatal_span(rdr, start_bpos, rdr.last_pos, msg); fatal_span(rdr, start_bpos, rdr.last_pos, msg);
} else if rdr.curr_is('/') && nextch_is(rdr, '*') { } else if rdr.curr_is('/') && nextch_is(rdr, '*') {
@ -438,7 +441,7 @@ fn consume_block_comment(rdr: &mut StringReader) -> Option<TokenAndSpan> {
if res.is_some() { res } else { consume_whitespace_and_comments(rdr) } if res.is_some() { res } else { consume_whitespace_and_comments(rdr) }
} }
fn scan_exponent(rdr: &mut StringReader, start_bpos: BytePos) -> Option<~str> { fn scan_exponent(rdr: &mut StringReader, start_bpos: BytePos) -> Option<StrBuf> {
// \x00 hits the `return None` case immediately, so this is fine. // \x00 hits the `return None` case immediately, so this is fine.
let mut c = rdr.curr.unwrap_or('\x00'); let mut c = rdr.curr.unwrap_or('\x00');
let mut rslt = StrBuf::new(); let mut rslt = StrBuf::new();
@ -452,16 +455,18 @@ fn scan_exponent(rdr: &mut StringReader, start_bpos: BytePos) -> Option<~str> {
} }
let exponent = scan_digits(rdr, 10u); let exponent = scan_digits(rdr, 10u);
if exponent.len() > 0u { if exponent.len() > 0u {
rslt.push_str(exponent); rslt.push_str(exponent.as_slice());
return Some(rslt.into_owned()); return Some(rslt);
} else { } else {
fatal_span(rdr, start_bpos, rdr.last_pos, fatal_span(rdr, start_bpos, rdr.last_pos,
"scan_exponent: bad fp literal".to_owned()); "scan_exponent: bad fp literal".to_strbuf());
}
} else {
return None::<StrBuf>;
} }
} else { return None::<~str>; }
} }
fn scan_digits(rdr: &mut StringReader, radix: uint) -> ~str { fn scan_digits(rdr: &mut StringReader, radix: uint) -> StrBuf {
let mut rslt = StrBuf::new(); let mut rslt = StrBuf::new();
loop { loop {
let c = rdr.curr; let c = rdr.curr;
@ -471,7 +476,7 @@ fn scan_digits(rdr: &mut StringReader, radix: uint) -> ~str {
rslt.push_char(c.unwrap()); rslt.push_char(c.unwrap());
bump(rdr); bump(rdr);
} }
_ => return rslt.into_owned() _ => return rslt
} }
}; };
} }
@ -479,12 +484,14 @@ fn scan_digits(rdr: &mut StringReader, radix: uint) -> ~str {
fn check_float_base(rdr: &mut StringReader, start_bpos: BytePos, last_bpos: BytePos, fn check_float_base(rdr: &mut StringReader, start_bpos: BytePos, last_bpos: BytePos,
base: uint) { base: uint) {
match base { match base {
16u => fatal_span(rdr, start_bpos, last_bpos, 16u => {
"hexadecimal float literal is not supported".to_owned()), fatal_span(rdr, start_bpos, last_bpos,
"hexadecimal float literal is not supported".to_strbuf())
}
8u => fatal_span(rdr, start_bpos, last_bpos, 8u => fatal_span(rdr, start_bpos, last_bpos,
"octal float literal is not supported".to_owned()), "octal float literal is not supported".to_strbuf()),
2u => fatal_span(rdr, start_bpos, last_bpos, 2u => fatal_span(rdr, start_bpos, last_bpos,
"binary float literal is not supported".to_owned()), "binary float literal is not supported".to_strbuf()),
_ => () _ => ()
} }
} }
@ -508,7 +515,7 @@ fn scan_number(c: char, rdr: &mut StringReader) -> token::Token {
bump(rdr); bump(rdr);
base = 2u; base = 2u;
} }
num_str = StrBuf::from_owned_str(scan_digits(rdr, base)); num_str = scan_digits(rdr, base);
c = rdr.curr.unwrap_or('\x00'); c = rdr.curr.unwrap_or('\x00');
nextch(rdr); nextch(rdr);
if c == 'u' || c == 'i' { if c == 'u' || c == 'i' {
@ -544,13 +551,13 @@ fn scan_number(c: char, rdr: &mut StringReader) -> token::Token {
} }
if num_str.len() == 0u { if num_str.len() == 0u {
fatal_span(rdr, start_bpos, rdr.last_pos, fatal_span(rdr, start_bpos, rdr.last_pos,
"no valid digits found for number".to_owned()); "no valid digits found for number".to_strbuf());
} }
let parsed = match from_str_radix::<u64>(num_str.as_slice(), let parsed = match from_str_radix::<u64>(num_str.as_slice(),
base as uint) { base as uint) {
Some(p) => p, Some(p) => p,
None => fatal_span(rdr, start_bpos, rdr.last_pos, None => fatal_span(rdr, start_bpos, rdr.last_pos,
"int literal is too large".to_owned()) "int literal is too large".to_strbuf())
}; };
match tp { match tp {
@ -564,12 +571,12 @@ fn scan_number(c: char, rdr: &mut StringReader) -> token::Token {
bump(rdr); bump(rdr);
let dec_part = scan_digits(rdr, 10u); let dec_part = scan_digits(rdr, 10u);
num_str.push_char('.'); num_str.push_char('.');
num_str.push_str(dec_part); num_str.push_str(dec_part.as_slice());
} }
match scan_exponent(rdr, start_bpos) { match scan_exponent(rdr, start_bpos) {
Some(ref s) => { Some(ref s) => {
is_float = true; is_float = true;
num_str.push_str(*s); num_str.push_str(s.as_slice());
} }
None => () None => ()
} }
@ -601,7 +608,7 @@ fn scan_number(c: char, rdr: &mut StringReader) -> token::Token {
return token::LIT_FLOAT(str_to_ident(num_str.as_slice()), ast::TyF128); return token::LIT_FLOAT(str_to_ident(num_str.as_slice()), ast::TyF128);
} }
fatal_span(rdr, start_bpos, rdr.last_pos, fatal_span(rdr, start_bpos, rdr.last_pos,
"expected `f32`, `f64` or `f128` suffix".to_owned()); "expected `f32`, `f64` or `f128` suffix".to_strbuf());
} }
if is_float { if is_float {
check_float_base(rdr, start_bpos, rdr.last_pos, base); check_float_base(rdr, start_bpos, rdr.last_pos, base);
@ -610,13 +617,13 @@ fn scan_number(c: char, rdr: &mut StringReader) -> token::Token {
} else { } else {
if num_str.len() == 0u { if num_str.len() == 0u {
fatal_span(rdr, start_bpos, rdr.last_pos, fatal_span(rdr, start_bpos, rdr.last_pos,
"no valid digits found for number".to_owned()); "no valid digits found for number".to_strbuf());
} }
let parsed = match from_str_radix::<u64>(num_str.as_slice(), let parsed = match from_str_radix::<u64>(num_str.as_slice(),
base as uint) { base as uint) {
Some(p) => p, Some(p) => p,
None => fatal_span(rdr, start_bpos, rdr.last_pos, None => fatal_span(rdr, start_bpos, rdr.last_pos,
"int literal is too large".to_owned()) "int literal is too large".to_strbuf())
}; };
debug!("lexing {} as an unsuffixed integer literal", debug!("lexing {} as an unsuffixed integer literal",
@ -632,8 +639,11 @@ fn scan_numeric_escape(rdr: &mut StringReader, n_hex_digits: uint) -> char {
while i != 0u && !is_eof(rdr) { while i != 0u && !is_eof(rdr) {
let n = rdr.curr; let n = rdr.curr;
if !is_hex_digit(n) { if !is_hex_digit(n) {
fatal_span_char(rdr, rdr.last_pos, rdr.pos, fatal_span_char(
"illegal character in numeric character escape".to_owned(), rdr,
rdr.last_pos,
rdr.pos,
"illegal character in numeric character escape".to_strbuf(),
n.unwrap()); n.unwrap());
} }
bump(rdr); bump(rdr);
@ -643,13 +653,13 @@ fn scan_numeric_escape(rdr: &mut StringReader, n_hex_digits: uint) -> char {
} }
if i != 0 && is_eof(rdr) { if i != 0 && is_eof(rdr) {
fatal_span(rdr, start_bpos, rdr.last_pos, fatal_span(rdr, start_bpos, rdr.last_pos,
"unterminated numeric character escape".to_owned()); "unterminated numeric character escape".to_strbuf());
} }
match char::from_u32(accum_int as u32) { match char::from_u32(accum_int as u32) {
Some(x) => x, Some(x) => x,
None => fatal_span(rdr, start_bpos, rdr.last_pos, None => fatal_span(rdr, start_bpos, rdr.last_pos,
"illegal numeric character escape".to_owned()) "illegal numeric character escape".to_strbuf())
} }
} }
@ -819,11 +829,11 @@ fn next_token_inner(rdr: &mut StringReader) -> token::Token {
if token::is_keyword(token::keywords::Self, tok) { if token::is_keyword(token::keywords::Self, tok) {
fatal_span(rdr, start, rdr.last_pos, fatal_span(rdr, start, rdr.last_pos,
"invalid lifetime name: 'self \ "invalid lifetime name: 'self \
is no longer a special lifetime".to_owned()); is no longer a special lifetime".to_strbuf());
} else if token::is_any_keyword(tok) && } else if token::is_any_keyword(tok) &&
!token::is_keyword(token::keywords::Static, tok) { !token::is_keyword(token::keywords::Static, tok) {
fatal_span(rdr, start, rdr.last_pos, fatal_span(rdr, start, rdr.last_pos,
"invalid lifetime name".to_owned()); "invalid lifetime name".to_strbuf());
} else { } else {
return token::LIFETIME(ident); return token::LIFETIME(ident);
} }
@ -851,16 +861,24 @@ fn next_token_inner(rdr: &mut StringReader) -> token::Token {
'u' => scan_numeric_escape(rdr, 4u), 'u' => scan_numeric_escape(rdr, 4u),
'U' => scan_numeric_escape(rdr, 8u), 'U' => scan_numeric_escape(rdr, 8u),
c2 => { c2 => {
fatal_span_char(rdr, escaped_pos, rdr.last_pos, fatal_span_char(rdr,
"unknown character escape".to_owned(), c2) escaped_pos,
rdr.last_pos,
"unknown character \
escape".to_strbuf(),
c2)
} }
} }
} }
} }
} }
'\t' | '\n' | '\r' | '\'' => { '\t' | '\n' | '\r' | '\'' => {
fatal_span_char(rdr, start, rdr.last_pos, fatal_span_char(
"character constant must be escaped".to_owned(), c2); rdr,
start,
rdr.last_pos,
"character constant must be escaped".to_strbuf(),
c2);
} }
_ => {} _ => {}
} }
@ -871,7 +889,7 @@ fn next_token_inner(rdr: &mut StringReader) -> token::Token {
// ascii single quote. // ascii single quote.
start - BytePos(1), start - BytePos(1),
rdr.last_pos, rdr.last_pos,
"unterminated character constant".to_owned()); "unterminated character constant".to_strbuf());
} }
bump(rdr); // advance curr past token bump(rdr); // advance curr past token
return token::LIT_CHAR(c2); return token::LIT_CHAR(c2);
@ -883,7 +901,7 @@ fn next_token_inner(rdr: &mut StringReader) -> token::Token {
while !rdr.curr_is('"') { while !rdr.curr_is('"') {
if is_eof(rdr) { if is_eof(rdr) {
fatal_span(rdr, start_bpos, rdr.last_pos, fatal_span(rdr, start_bpos, rdr.last_pos,
"unterminated double quote string".to_owned()); "unterminated double quote string".to_strbuf());
} }
let ch = rdr.curr.unwrap(); let ch = rdr.curr.unwrap();
@ -892,7 +910,7 @@ fn next_token_inner(rdr: &mut StringReader) -> token::Token {
'\\' => { '\\' => {
if is_eof(rdr) { if is_eof(rdr) {
fatal_span(rdr, start_bpos, rdr.last_pos, fatal_span(rdr, start_bpos, rdr.last_pos,
"unterminated double quote string".to_owned()); "unterminated double quote string".to_strbuf());
} }
let escaped = rdr.curr.unwrap(); let escaped = rdr.curr.unwrap();
@ -918,7 +936,7 @@ fn next_token_inner(rdr: &mut StringReader) -> token::Token {
} }
c2 => { c2 => {
fatal_span_char(rdr, escaped_pos, rdr.last_pos, fatal_span_char(rdr, escaped_pos, rdr.last_pos,
"unknown string escape".to_owned(), c2); "unknown string escape".to_strbuf(), c2);
} }
} }
} }
@ -939,11 +957,11 @@ fn next_token_inner(rdr: &mut StringReader) -> token::Token {
if is_eof(rdr) { if is_eof(rdr) {
fatal_span(rdr, start_bpos, rdr.last_pos, fatal_span(rdr, start_bpos, rdr.last_pos,
"unterminated raw string".to_owned()); "unterminated raw string".to_strbuf());
} else if !rdr.curr_is('"') { } else if !rdr.curr_is('"') {
fatal_span_char(rdr, start_bpos, rdr.last_pos, fatal_span_char(rdr, start_bpos, rdr.last_pos,
"only `#` is allowed in raw string delimitation; \ "only `#` is allowed in raw string delimitation; \
found illegal character".to_owned(), found illegal character".to_strbuf(),
rdr.curr.unwrap()); rdr.curr.unwrap());
} }
bump(rdr); bump(rdr);
@ -952,7 +970,7 @@ fn next_token_inner(rdr: &mut StringReader) -> token::Token {
'outer: loop { 'outer: loop {
if is_eof(rdr) { if is_eof(rdr) {
fatal_span(rdr, start_bpos, rdr.last_pos, fatal_span(rdr, start_bpos, rdr.last_pos,
"unterminated raw string".to_owned()); "unterminated raw string".to_strbuf());
} }
if rdr.curr_is('"') { if rdr.curr_is('"') {
content_end_bpos = rdr.last_pos; content_end_bpos = rdr.last_pos;
@ -1000,7 +1018,7 @@ fn next_token_inner(rdr: &mut StringReader) -> token::Token {
'%' => { return binop(rdr, token::PERCENT); } '%' => { return binop(rdr, token::PERCENT); }
c => { c => {
fatal_span_char(rdr, rdr.last_pos, rdr.pos, fatal_span_char(rdr, rdr.last_pos, rdr.pos,
"unknown start of token".to_owned(), c); "unknown start of token".to_strbuf(), c);
} }
} }
} }
@ -1027,8 +1045,8 @@ mod test {
// open a string reader for the given string // open a string reader for the given string
fn setup<'a>(span_handler: &'a diagnostic::SpanHandler, fn setup<'a>(span_handler: &'a diagnostic::SpanHandler,
teststr: ~str) -> StringReader<'a> { teststr: StrBuf) -> StringReader<'a> {
let fm = span_handler.cm.new_filemap("zebra.rs".to_owned(), teststr); let fm = span_handler.cm.new_filemap("zebra.rs".to_strbuf(), teststr);
new_string_reader(span_handler, fm) new_string_reader(span_handler, fm)
} }
@ -1036,7 +1054,7 @@ mod test {
let span_handler = mk_sh(); let span_handler = mk_sh();
let mut string_reader = setup(&span_handler, let mut string_reader = setup(&span_handler,
"/* my source file */ \ "/* my source file */ \
fn main() { println!(\"zebra\"); }\n".to_owned()); fn main() { println!(\"zebra\"); }\n".to_strbuf());
let id = str_to_ident("fn"); let id = str_to_ident("fn");
let tok1 = string_reader.next_token(); let tok1 = string_reader.next_token();
let tok2 = TokenAndSpan{ let tok2 = TokenAndSpan{
@ -1069,54 +1087,56 @@ mod test {
} }
#[test] fn doublecolonparsing () { #[test] fn doublecolonparsing () {
check_tokenization(setup(&mk_sh(), "a b".to_owned()), check_tokenization(setup(&mk_sh(), "a b".to_strbuf()),
vec!(mk_ident("a",false), vec!(mk_ident("a",false),
mk_ident("b",false))); mk_ident("b",false)));
} }
#[test] fn dcparsing_2 () { #[test] fn dcparsing_2 () {
check_tokenization(setup(&mk_sh(), "a::b".to_owned()), check_tokenization(setup(&mk_sh(), "a::b".to_strbuf()),
vec!(mk_ident("a",true), vec!(mk_ident("a",true),
token::MOD_SEP, token::MOD_SEP,
mk_ident("b",false))); mk_ident("b",false)));
} }
#[test] fn dcparsing_3 () { #[test] fn dcparsing_3 () {
check_tokenization(setup(&mk_sh(), "a ::b".to_owned()), check_tokenization(setup(&mk_sh(), "a ::b".to_strbuf()),
vec!(mk_ident("a",false), vec!(mk_ident("a",false),
token::MOD_SEP, token::MOD_SEP,
mk_ident("b",false))); mk_ident("b",false)));
} }
#[test] fn dcparsing_4 () { #[test] fn dcparsing_4 () {
check_tokenization(setup(&mk_sh(), "a:: b".to_owned()), check_tokenization(setup(&mk_sh(), "a:: b".to_strbuf()),
vec!(mk_ident("a",true), vec!(mk_ident("a",true),
token::MOD_SEP, token::MOD_SEP,
mk_ident("b",false))); mk_ident("b",false)));
} }
#[test] fn character_a() { #[test] fn character_a() {
assert_eq!(setup(&mk_sh(), "'a'".to_owned()).next_token().tok, assert_eq!(setup(&mk_sh(), "'a'".to_strbuf()).next_token().tok,
token::LIT_CHAR('a')); token::LIT_CHAR('a'));
} }
#[test] fn character_space() { #[test] fn character_space() {
assert_eq!(setup(&mk_sh(), "' '".to_owned()).next_token().tok, assert_eq!(setup(&mk_sh(), "' '".to_strbuf()).next_token().tok,
token::LIT_CHAR(' ')); token::LIT_CHAR(' '));
} }
#[test] fn character_escaped() { #[test] fn character_escaped() {
assert_eq!(setup(&mk_sh(), "'\\n'".to_owned()).next_token().tok, assert_eq!(setup(&mk_sh(), "'\\n'".to_strbuf()).next_token().tok,
token::LIT_CHAR('\n')); token::LIT_CHAR('\n'));
} }
#[test] fn lifetime_name() { #[test] fn lifetime_name() {
assert_eq!(setup(&mk_sh(), "'abc".to_owned()).next_token().tok, assert_eq!(setup(&mk_sh(), "'abc".to_strbuf()).next_token().tok,
token::LIFETIME(token::str_to_ident("abc"))); token::LIFETIME(token::str_to_ident("abc")));
} }
#[test] fn raw_string() { #[test] fn raw_string() {
assert_eq!(setup(&mk_sh(), "r###\"\"#a\\b\x00c\"\"###".to_owned()).next_token().tok, assert_eq!(setup(&mk_sh(),
"r###\"\"#a\\b\x00c\"\"###".to_strbuf()).next_token()
.tok,
token::LIT_STR_RAW(token::str_to_ident("\"#a\\b\x00c\""), 3)); token::LIT_STR_RAW(token::str_to_ident("\"#a\\b\x00c\""), 3));
} }
@ -1127,7 +1147,8 @@ mod test {
} }
#[test] fn nested_block_comments() { #[test] fn nested_block_comments() {
assert_eq!(setup(&mk_sh(), "/* /* */ */'a'".to_owned()).next_token().tok, assert_eq!(setup(&mk_sh(),
"/* /* */ */'a'".to_strbuf()).next_token().tok,
token::LIT_CHAR('a')); token::LIT_CHAR('a'));
} }

View file

@ -77,8 +77,8 @@ pub fn parse_crate_attrs_from_file(
inner inner
} }
pub fn parse_crate_from_source_str(name: ~str, pub fn parse_crate_from_source_str(name: StrBuf,
source: ~str, source: StrBuf,
cfg: ast::CrateConfig, cfg: ast::CrateConfig,
sess: &ParseSess) sess: &ParseSess)
-> ast::Crate { -> ast::Crate {
@ -89,8 +89,8 @@ pub fn parse_crate_from_source_str(name: ~str,
maybe_aborted(p.parse_crate_mod(),p) maybe_aborted(p.parse_crate_mod(),p)
} }
pub fn parse_crate_attrs_from_source_str(name: ~str, pub fn parse_crate_attrs_from_source_str(name: StrBuf,
source: ~str, source: StrBuf,
cfg: ast::CrateConfig, cfg: ast::CrateConfig,
sess: &ParseSess) sess: &ParseSess)
-> Vec<ast::Attribute> { -> Vec<ast::Attribute> {
@ -102,8 +102,8 @@ pub fn parse_crate_attrs_from_source_str(name: ~str,
inner inner
} }
pub fn parse_expr_from_source_str(name: ~str, pub fn parse_expr_from_source_str(name: StrBuf,
source: ~str, source: StrBuf,
cfg: ast::CrateConfig, cfg: ast::CrateConfig,
sess: &ParseSess) sess: &ParseSess)
-> @ast::Expr { -> @ast::Expr {
@ -111,8 +111,8 @@ pub fn parse_expr_from_source_str(name: ~str,
maybe_aborted(p.parse_expr(), p) maybe_aborted(p.parse_expr(), p)
} }
pub fn parse_item_from_source_str(name: ~str, pub fn parse_item_from_source_str(name: StrBuf,
source: ~str, source: StrBuf,
cfg: ast::CrateConfig, cfg: ast::CrateConfig,
sess: &ParseSess) sess: &ParseSess)
-> Option<@ast::Item> { -> Option<@ast::Item> {
@ -121,8 +121,8 @@ pub fn parse_item_from_source_str(name: ~str,
maybe_aborted(p.parse_item(attrs),p) maybe_aborted(p.parse_item(attrs),p)
} }
pub fn parse_meta_from_source_str(name: ~str, pub fn parse_meta_from_source_str(name: StrBuf,
source: ~str, source: StrBuf,
cfg: ast::CrateConfig, cfg: ast::CrateConfig,
sess: &ParseSess) sess: &ParseSess)
-> @ast::MetaItem { -> @ast::MetaItem {
@ -130,8 +130,8 @@ pub fn parse_meta_from_source_str(name: ~str,
maybe_aborted(p.parse_meta_item(),p) maybe_aborted(p.parse_meta_item(),p)
} }
pub fn parse_stmt_from_source_str(name: ~str, pub fn parse_stmt_from_source_str(name: StrBuf,
source: ~str, source: StrBuf,
cfg: ast::CrateConfig, cfg: ast::CrateConfig,
attrs: Vec<ast::Attribute> , attrs: Vec<ast::Attribute> ,
sess: &ParseSess) sess: &ParseSess)
@ -145,8 +145,8 @@ pub fn parse_stmt_from_source_str(name: ~str,
maybe_aborted(p.parse_stmt(attrs),p) maybe_aborted(p.parse_stmt(attrs),p)
} }
pub fn parse_tts_from_source_str(name: ~str, pub fn parse_tts_from_source_str(name: StrBuf,
source: ~str, source: StrBuf,
cfg: ast::CrateConfig, cfg: ast::CrateConfig,
sess: &ParseSess) sess: &ParseSess)
-> Vec<ast::TokenTree> { -> Vec<ast::TokenTree> {
@ -164,8 +164,8 @@ pub fn parse_tts_from_source_str(name: ~str,
// Create a new parser from a source string // Create a new parser from a source string
pub fn new_parser_from_source_str<'a>(sess: &'a ParseSess, pub fn new_parser_from_source_str<'a>(sess: &'a ParseSess,
cfg: ast::CrateConfig, cfg: ast::CrateConfig,
name: ~str, name: StrBuf,
source: ~str) source: StrBuf)
-> Parser<'a> { -> Parser<'a> {
filemap_to_parser(sess, string_to_filemap(sess, source, name), cfg) filemap_to_parser(sess, string_to_filemap(sess, source, name), cfg)
} }
@ -225,8 +225,8 @@ pub fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
}; };
match str::from_utf8(bytes.as_slice()) { match str::from_utf8(bytes.as_slice()) {
Some(s) => { Some(s) => {
return string_to_filemap(sess, s.to_owned(), return string_to_filemap(sess, s.to_strbuf(),
path.as_str().unwrap().to_str()) path.as_str().unwrap().to_strbuf())
} }
None => err(format!("{} is not UTF-8 encoded", path.display())), None => err(format!("{} is not UTF-8 encoded", path.display())),
} }
@ -235,7 +235,7 @@ pub fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
// given a session and a string, add the string to // given a session and a string, add the string to
// the session's codemap and return the new filemap // the session's codemap and return the new filemap
pub fn string_to_filemap(sess: &ParseSess, source: ~str, path: ~str) pub fn string_to_filemap(sess: &ParseSess, source: StrBuf, path: StrBuf)
-> Rc<FileMap> { -> Rc<FileMap> {
sess.span_diagnostic.cm.new_filemap(path, source) sess.span_diagnostic.cm.new_filemap(path, source)
} }
@ -284,11 +284,11 @@ mod test {
use util::parser_testing::{string_to_expr, string_to_item}; use util::parser_testing::{string_to_expr, string_to_item};
use util::parser_testing::string_to_stmt; use util::parser_testing::string_to_stmt;
fn to_json_str<'a, E: Encodable<json::Encoder<'a>, io::IoError>>(val: &E) -> ~str { fn to_json_str<'a, E: Encodable<json::Encoder<'a>, io::IoError>>(val: &E) -> StrBuf {
let mut writer = MemWriter::new(); let mut writer = MemWriter::new();
let mut encoder = json::Encoder::new(&mut writer as &mut io::Writer); let mut encoder = json::Encoder::new(&mut writer as &mut io::Writer);
let _ = val.encode(&mut encoder); let _ = val.encode(&mut encoder);
str::from_utf8(writer.unwrap().as_slice()).unwrap().to_owned() str::from_utf8(writer.unwrap().as_slice()).unwrap().to_strbuf()
} }
// produce a codemap::span // produce a codemap::span
@ -297,7 +297,7 @@ mod test {
} }
#[test] fn path_exprs_1() { #[test] fn path_exprs_1() {
assert!(string_to_expr("a".to_owned()) == assert!(string_to_expr("a".to_strbuf()) ==
@ast::Expr{ @ast::Expr{
id: ast::DUMMY_NODE_ID, id: ast::DUMMY_NODE_ID,
node: ast::ExprPath(ast::Path { node: ast::ExprPath(ast::Path {
@ -316,7 +316,7 @@ mod test {
} }
#[test] fn path_exprs_2 () { #[test] fn path_exprs_2 () {
assert!(string_to_expr("::a::b".to_owned()) == assert!(string_to_expr("::a::b".to_strbuf()) ==
@ast::Expr { @ast::Expr {
id: ast::DUMMY_NODE_ID, id: ast::DUMMY_NODE_ID,
node: ast::ExprPath(ast::Path { node: ast::ExprPath(ast::Path {
@ -341,12 +341,12 @@ mod test {
#[should_fail] #[should_fail]
#[test] fn bad_path_expr_1() { #[test] fn bad_path_expr_1() {
string_to_expr("::abc::def::return".to_owned()); string_to_expr("::abc::def::return".to_strbuf());
} }
// check the token-tree-ization of macros // check the token-tree-ization of macros
#[test] fn string_to_tts_macro () { #[test] fn string_to_tts_macro () {
let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_owned()); let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_strbuf());
let tts: &[ast::TokenTree] = tts.as_slice(); let tts: &[ast::TokenTree] = tts.as_slice();
match tts { match tts {
[ast::TTTok(_,_), [ast::TTTok(_,_),
@ -399,7 +399,7 @@ mod test {
} }
#[test] fn string_to_tts_1 () { #[test] fn string_to_tts_1 () {
let tts = string_to_tts("fn a (b : int) { b; }".to_owned()); let tts = string_to_tts("fn a (b : int) { b; }".to_strbuf());
assert_eq!(to_json_str(&tts), assert_eq!(to_json_str(&tts),
"[\ "[\
{\ {\
@ -523,12 +523,12 @@ mod test {
]\ ]\
]\ ]\
}\ }\
]".to_owned() ]".to_strbuf()
); );
} }
#[test] fn ret_expr() { #[test] fn ret_expr() {
assert!(string_to_expr("return d".to_owned()) == assert!(string_to_expr("return d".to_strbuf()) ==
@ast::Expr{ @ast::Expr{
id: ast::DUMMY_NODE_ID, id: ast::DUMMY_NODE_ID,
node:ast::ExprRet(Some(@ast::Expr{ node:ast::ExprRet(Some(@ast::Expr{
@ -551,7 +551,7 @@ mod test {
} }
#[test] fn parse_stmt_1 () { #[test] fn parse_stmt_1 () {
assert!(string_to_stmt("b;".to_owned()) == assert!(string_to_stmt("b;".to_strbuf()) ==
@Spanned{ @Spanned{
node: ast::StmtExpr(@ast::Expr { node: ast::StmtExpr(@ast::Expr {
id: ast::DUMMY_NODE_ID, id: ast::DUMMY_NODE_ID,
@ -578,7 +578,7 @@ mod test {
#[test] fn parse_ident_pat () { #[test] fn parse_ident_pat () {
let sess = new_parse_sess(); let sess = new_parse_sess();
let mut parser = string_to_parser(&sess, "b".to_owned()); let mut parser = string_to_parser(&sess, "b".to_strbuf());
assert!(parser.parse_pat() == assert!(parser.parse_pat() ==
@ast::Pat{id: ast::DUMMY_NODE_ID, @ast::Pat{id: ast::DUMMY_NODE_ID,
node: ast::PatIdent( node: ast::PatIdent(
@ -602,7 +602,7 @@ mod test {
// check the contents of the tt manually: // check the contents of the tt manually:
#[test] fn parse_fundecl () { #[test] fn parse_fundecl () {
// this test depends on the intern order of "fn" and "int" // this test depends on the intern order of "fn" and "int"
assert!(string_to_item("fn a (b : int) { b; }".to_owned()) == assert!(string_to_item("fn a (b : int) { b; }".to_strbuf()) ==
Some( Some(
@ast::Item{ident:str_to_ident("a"), @ast::Item{ident:str_to_ident("a"),
attrs:Vec::new(), attrs:Vec::new(),
@ -694,13 +694,13 @@ mod test {
#[test] fn parse_exprs () { #[test] fn parse_exprs () {
// just make sure that they parse.... // just make sure that they parse....
string_to_expr("3 + 4".to_owned()); string_to_expr("3 + 4".to_strbuf());
string_to_expr("a::z.froob(b,@(987+3))".to_owned()); string_to_expr("a::z.froob(b,@(987+3))".to_strbuf());
} }
#[test] fn attrs_fix_bug () { #[test] fn attrs_fix_bug () {
string_to_item("pub fn mk_file_writer(path: &Path, flags: &[FileFlag]) string_to_item("pub fn mk_file_writer(path: &Path, flags: &[FileFlag])
-> Result<@Writer, ~str> { -> Result<@Writer, StrBuf> {
#[cfg(windows)] #[cfg(windows)]
fn wb() -> c_int { fn wb() -> c_int {
(O_WRONLY | libc::consts::os::extra::O_BINARY) as c_int (O_WRONLY | libc::consts::os::extra::O_BINARY) as c_int
@ -710,7 +710,7 @@ mod test {
fn wb() -> c_int { O_WRONLY as c_int } fn wb() -> c_int { O_WRONLY as c_int }
let mut fflags: c_int = wb(); let mut fflags: c_int = wb();
}".to_owned()); }".to_strbuf());
} }
} }

View file

@ -123,7 +123,7 @@ impl<'a> ParserObsoleteMethods for Parser<'a> {
), ),
ObsoleteManagedString => ( ObsoleteManagedString => (
"managed string", "managed string",
"use `Rc<~str>` instead of a managed string" "use `Rc<StrBuf>` instead of a managed string"
), ),
ObsoleteManagedVec => ( ObsoleteManagedVec => (
"managed vector", "managed vector",

View file

@ -345,12 +345,12 @@ fn is_plain_ident_or_underscore(t: &token::Token) -> bool {
impl<'a> Parser<'a> { impl<'a> Parser<'a> {
// convert a token to a string using self's reader // convert a token to a string using self's reader
pub fn token_to_str(token: &token::Token) -> ~str { pub fn token_to_str(token: &token::Token) -> StrBuf {
token::to_str(token) token::to_str(token)
} }
// convert the current token to a string using self's reader // convert the current token to a string using self's reader
pub fn this_token_to_str(&mut self) -> ~str { pub fn this_token_to_str(&mut self) -> StrBuf {
Parser::token_to_str(&self.token) Parser::token_to_str(&self.token)
} }
@ -385,11 +385,17 @@ impl<'a> Parser<'a> {
pub fn expect_one_of(&mut self, pub fn expect_one_of(&mut self,
edible: &[token::Token], edible: &[token::Token],
inedible: &[token::Token]) { inedible: &[token::Token]) {
fn tokens_to_str(tokens: &[token::Token]) -> ~str { fn tokens_to_str(tokens: &[token::Token]) -> StrBuf {
let mut i = tokens.iter(); let mut i = tokens.iter();
// This might be a sign we need a connect method on Iterator. // This might be a sign we need a connect method on Iterator.
let b = i.next().map_or("".to_owned(), |t| Parser::token_to_str(t)); let b = i.next()
i.fold(b, |b,a| b + "`, `" + Parser::token_to_str(a)) .map_or("".to_strbuf(), |t| Parser::token_to_str(t));
i.fold(b, |b,a| {
let mut b = b;
b.push_str("`, `");
b.push_str(Parser::token_to_str(a).as_slice());
b
})
} }
if edible.contains(&self.token) { if edible.contains(&self.token) {
self.bump(); self.bump();
@ -3898,7 +3904,7 @@ impl<'a> Parser<'a> {
(ident, ItemImpl(generics, opt_trait, ty, meths), Some(inner_attrs)) (ident, ItemImpl(generics, opt_trait, ty, meths), Some(inner_attrs))
} }
// parse a::B<~str,int> // parse a::B<StrBuf,int>
fn parse_trait_ref(&mut self) -> TraitRef { fn parse_trait_ref(&mut self) -> TraitRef {
ast::TraitRef { ast::TraitRef {
path: self.parse_path(LifetimeAndTypesWithoutColons).path, path: self.parse_path(LifetimeAndTypesWithoutColons).path,
@ -3906,7 +3912,7 @@ impl<'a> Parser<'a> {
} }
} }
// parse B + C<~str,int> + D // parse B + C<StrBuf,int> + D
fn parse_trait_ref_list(&mut self, ket: &token::Token) -> Vec<TraitRef> { fn parse_trait_ref_list(&mut self, ket: &token::Token) -> Vec<TraitRef> {
self.parse_seq_to_before_end( self.parse_seq_to_before_end(
ket, ket,

View file

@ -137,58 +137,62 @@ impl fmt::Show for Nonterminal {
} }
} }
pub fn binop_to_str(o: BinOp) -> ~str { pub fn binop_to_str(o: BinOp) -> StrBuf {
match o { match o {
PLUS => "+".to_owned(), PLUS => "+".to_strbuf(),
MINUS => "-".to_owned(), MINUS => "-".to_strbuf(),
STAR => "*".to_owned(), STAR => "*".to_strbuf(),
SLASH => "/".to_owned(), SLASH => "/".to_strbuf(),
PERCENT => "%".to_owned(), PERCENT => "%".to_strbuf(),
CARET => "^".to_owned(), CARET => "^".to_strbuf(),
AND => "&".to_owned(), AND => "&".to_strbuf(),
OR => "|".to_owned(), OR => "|".to_strbuf(),
SHL => "<<".to_owned(), SHL => "<<".to_strbuf(),
SHR => ">>".to_owned() SHR => ">>".to_strbuf()
} }
} }
pub fn to_str(t: &Token) -> ~str { pub fn to_str(t: &Token) -> StrBuf {
match *t { match *t {
EQ => "=".to_owned(), EQ => "=".to_strbuf(),
LT => "<".to_owned(), LT => "<".to_strbuf(),
LE => "<=".to_owned(), LE => "<=".to_strbuf(),
EQEQ => "==".to_owned(), EQEQ => "==".to_strbuf(),
NE => "!=".to_owned(), NE => "!=".to_strbuf(),
GE => ">=".to_owned(), GE => ">=".to_strbuf(),
GT => ">".to_owned(), GT => ">".to_strbuf(),
NOT => "!".to_owned(), NOT => "!".to_strbuf(),
TILDE => "~".to_owned(), TILDE => "~".to_strbuf(),
OROR => "||".to_owned(), OROR => "||".to_strbuf(),
ANDAND => "&&".to_owned(), ANDAND => "&&".to_strbuf(),
BINOP(op) => binop_to_str(op), BINOP(op) => binop_to_str(op),
BINOPEQ(op) => binop_to_str(op) + "=", BINOPEQ(op) => {
let mut s = binop_to_str(op);
s.push_str("=");
s
}
/* Structural symbols */ /* Structural symbols */
AT => "@".to_owned(), AT => "@".to_strbuf(),
DOT => ".".to_owned(), DOT => ".".to_strbuf(),
DOTDOT => "..".to_owned(), DOTDOT => "..".to_strbuf(),
DOTDOTDOT => "...".to_owned(), DOTDOTDOT => "...".to_strbuf(),
COMMA => ",".to_owned(), COMMA => ",".to_strbuf(),
SEMI => ";".to_owned(), SEMI => ";".to_strbuf(),
COLON => ":".to_owned(), COLON => ":".to_strbuf(),
MOD_SEP => "::".to_owned(), MOD_SEP => "::".to_strbuf(),
RARROW => "->".to_owned(), RARROW => "->".to_strbuf(),
LARROW => "<-".to_owned(), LARROW => "<-".to_strbuf(),
DARROW => "<->".to_owned(), DARROW => "<->".to_strbuf(),
FAT_ARROW => "=>".to_owned(), FAT_ARROW => "=>".to_strbuf(),
LPAREN => "(".to_owned(), LPAREN => "(".to_strbuf(),
RPAREN => ")".to_owned(), RPAREN => ")".to_strbuf(),
LBRACKET => "[".to_owned(), LBRACKET => "[".to_strbuf(),
RBRACKET => "]".to_owned(), RBRACKET => "]".to_strbuf(),
LBRACE => "{".to_owned(), LBRACE => "{".to_strbuf(),
RBRACE => "}".to_owned(), RBRACE => "}".to_strbuf(),
POUND => "#".to_owned(), POUND => "#".to_strbuf(),
DOLLAR => "$".to_owned(), DOLLAR => "$".to_strbuf(),
/* Literals */ /* Literals */
LIT_CHAR(c) => { LIT_CHAR(c) => {
@ -197,63 +201,64 @@ pub fn to_str(t: &Token) -> ~str {
res.push_char(c); res.push_char(c);
}); });
res.push_char('\''); res.push_char('\'');
res.into_owned() res
} }
LIT_INT(i, t) => ast_util::int_ty_to_str(t, Some(i)), LIT_INT(i, t) => ast_util::int_ty_to_str(t, Some(i)),
LIT_UINT(u, t) => ast_util::uint_ty_to_str(t, Some(u)), LIT_UINT(u, t) => ast_util::uint_ty_to_str(t, Some(u)),
LIT_INT_UNSUFFIXED(i) => { i.to_str() } LIT_INT_UNSUFFIXED(i) => { i.to_str().to_strbuf() }
LIT_FLOAT(s, t) => { LIT_FLOAT(s, t) => {
let mut body = StrBuf::from_str(get_ident(s).get()); let mut body = StrBuf::from_str(get_ident(s).get());
if body.as_slice().ends_with(".") { if body.as_slice().ends_with(".") {
body.push_char('0'); // `10.f` is not a float literal body.push_char('0'); // `10.f` is not a float literal
} }
body.push_str(ast_util::float_ty_to_str(t)); body.push_str(ast_util::float_ty_to_str(t).as_slice());
body.into_owned() body
} }
LIT_FLOAT_UNSUFFIXED(s) => { LIT_FLOAT_UNSUFFIXED(s) => {
let mut body = StrBuf::from_str(get_ident(s).get()); let mut body = StrBuf::from_str(get_ident(s).get());
if body.as_slice().ends_with(".") { if body.as_slice().ends_with(".") {
body.push_char('0'); // `10.f` is not a float literal body.push_char('0'); // `10.f` is not a float literal
} }
body.into_owned() body
} }
LIT_STR(s) => { LIT_STR(s) => {
format!("\"{}\"", get_ident(s).get().escape_default()) (format!("\"{}\"", get_ident(s).get().escape_default())).to_strbuf()
} }
LIT_STR_RAW(s, n) => { LIT_STR_RAW(s, n) => {
format!("r{delim}\"{string}\"{delim}", (format!("r{delim}\"{string}\"{delim}",
delim="#".repeat(n), string=get_ident(s)) delim="#".repeat(n), string=get_ident(s))).to_strbuf()
} }
/* Name components */ /* Name components */
IDENT(s, _) => get_ident(s).get().to_str(), IDENT(s, _) => get_ident(s).get().to_strbuf(),
LIFETIME(s) => { LIFETIME(s) => {
format!("'{}", get_ident(s)) (format!("'{}", get_ident(s))).to_strbuf()
} }
UNDERSCORE => "_".to_owned(), UNDERSCORE => "_".to_strbuf(),
/* Other */ /* Other */
DOC_COMMENT(s) => get_ident(s).get().to_str(), DOC_COMMENT(s) => get_ident(s).get().to_strbuf(),
EOF => "<eof>".to_owned(), EOF => "<eof>".to_strbuf(),
INTERPOLATED(ref nt) => { INTERPOLATED(ref nt) => {
match nt { match nt {
&NtExpr(e) => ::print::pprust::expr_to_str(e), &NtExpr(e) => ::print::pprust::expr_to_str(e),
&NtMeta(e) => ::print::pprust::meta_item_to_str(e), &NtMeta(e) => ::print::pprust::meta_item_to_str(e),
_ => { _ => {
"an interpolated ".to_owned() + let mut s = "an interpolated ".to_strbuf();
match *nt { match *nt {
NtItem(..) => "item".to_owned(), NtItem(..) => s.push_str("item"),
NtBlock(..) => "block".to_owned(), NtBlock(..) => s.push_str("block"),
NtStmt(..) => "statement".to_owned(), NtStmt(..) => s.push_str("statement"),
NtPat(..) => "pattern".to_owned(), NtPat(..) => s.push_str("pattern"),
NtMeta(..) => fail!("should have been handled"), NtMeta(..) => fail!("should have been handled"),
NtExpr(..) => fail!("should have been handled above"), NtExpr(..) => fail!("should have been handled above"),
NtTy(..) => "type".to_owned(), NtTy(..) => s.push_str("type"),
NtIdent(..) => "identifier".to_owned(), NtIdent(..) => s.push_str("identifier"),
NtPath(..) => "path".to_owned(), NtPath(..) => s.push_str("path"),
NtTT(..) => "tt".to_owned(), NtTT(..) => s.push_str("tt"),
NtMatchers(..) => "matcher sequence".to_owned() NtMatchers(..) => s.push_str("matcher sequence")
} };
s
} }
} }
} }

View file

@ -84,7 +84,7 @@ pub struct BeginToken {
#[deriving(Clone)] #[deriving(Clone)]
pub enum Token { pub enum Token {
String(~str, int), String(StrBuf, int),
Break(BreakToken), Break(BreakToken),
Begin(BeginToken), Begin(BeginToken),
End, End,
@ -109,13 +109,13 @@ impl Token {
} }
} }
pub fn tok_str(t: Token) -> ~str { pub fn tok_str(t: Token) -> StrBuf {
match t { match t {
String(s, len) => return format!("STR({},{})", s, len), String(s, len) => return format!("STR({},{})", s, len).to_strbuf(),
Break(_) => return "BREAK".to_owned(), Break(_) => return "BREAK".to_strbuf(),
Begin(_) => return "BEGIN".to_owned(), Begin(_) => return "BEGIN".to_strbuf(),
End => return "END".to_owned(), End => return "END".to_strbuf(),
Eof => return "EOF".to_owned() Eof => return "EOF".to_strbuf()
} }
} }
@ -124,7 +124,7 @@ pub fn buf_str(toks: Vec<Token>,
left: uint, left: uint,
right: uint, right: uint,
lim: uint) lim: uint)
-> ~str { -> StrBuf {
let n = toks.len(); let n = toks.len();
assert_eq!(n, szs.len()); assert_eq!(n, szs.len());
let mut i = left; let mut i = left;
@ -140,7 +140,7 @@ pub fn buf_str(toks: Vec<Token>,
i %= n; i %= n;
} }
s.push_char(']'); s.push_char(']');
return s.into_owned(); return s.into_strbuf();
} }
pub enum PrintStackBreak { pub enum PrintStackBreak {
@ -585,7 +585,7 @@ impl Printer {
assert_eq!(l, len); assert_eq!(l, len);
// assert!(l <= space); // assert!(l <= space);
self.space -= len; self.space -= len;
self.print_str(s) self.print_str(s.as_slice())
} }
Eof => { Eof => {
// Eof should never get here. // Eof should never get here.
@ -625,15 +625,15 @@ pub fn end(p: &mut Printer) -> io::IoResult<()> { p.pretty_print(End) }
pub fn eof(p: &mut Printer) -> io::IoResult<()> { p.pretty_print(Eof) } pub fn eof(p: &mut Printer) -> io::IoResult<()> { p.pretty_print(Eof) }
pub fn word(p: &mut Printer, wrd: &str) -> io::IoResult<()> { pub fn word(p: &mut Printer, wrd: &str) -> io::IoResult<()> {
p.pretty_print(String(/* bad */ wrd.to_str(), wrd.len() as int)) p.pretty_print(String(/* bad */ wrd.to_strbuf(), wrd.len() as int))
} }
pub fn huge_word(p: &mut Printer, wrd: &str) -> io::IoResult<()> { pub fn huge_word(p: &mut Printer, wrd: &str) -> io::IoResult<()> {
p.pretty_print(String(/* bad */ wrd.to_str(), SIZE_INFINITY)) p.pretty_print(String(/* bad */ wrd.to_strbuf(), SIZE_INFINITY))
} }
pub fn zero_word(p: &mut Printer, wrd: &str) -> io::IoResult<()> { pub fn zero_word(p: &mut Printer, wrd: &str) -> io::IoResult<()> {
p.pretty_print(String(/* bad */ wrd.to_str(), 0)) p.pretty_print(String(/* bad */ wrd.to_strbuf(), 0))
} }
pub fn spaces(p: &mut Printer, n: uint) -> io::IoResult<()> { pub fn spaces(p: &mut Printer, n: uint) -> io::IoResult<()> {

View file

@ -97,7 +97,7 @@ pub static default_columns: uint = 78u;
pub fn print_crate<'a>(cm: &'a CodeMap, pub fn print_crate<'a>(cm: &'a CodeMap,
span_diagnostic: &diagnostic::SpanHandler, span_diagnostic: &diagnostic::SpanHandler,
krate: &ast::Crate, krate: &ast::Crate,
filename: ~str, filename: StrBuf,
input: &mut io::Reader, input: &mut io::Reader,
out: Box<io::Writer>, out: Box<io::Writer>,
ann: &'a PpAnn, ann: &'a PpAnn,
@ -132,7 +132,7 @@ pub fn print_crate<'a>(cm: &'a CodeMap,
eof(&mut s.s) eof(&mut s.s)
} }
pub fn to_str(f: |&mut State| -> IoResult<()>) -> ~str { pub fn to_str(f: |&mut State| -> IoResult<()>) -> StrBuf {
let mut s = rust_printer(box MemWriter::new()); let mut s = rust_printer(box MemWriter::new());
f(&mut s).unwrap(); f(&mut s).unwrap();
eof(&mut s.s).unwrap(); eof(&mut s.s).unwrap();
@ -143,65 +143,65 @@ pub fn to_str(f: |&mut State| -> IoResult<()>) -> ~str {
let (_, wr): (uint, Box<MemWriter>) = cast::transmute_copy(&s.s.out); let (_, wr): (uint, Box<MemWriter>) = cast::transmute_copy(&s.s.out);
let result = str::from_utf8_owned(wr.get_ref().to_owned()).unwrap(); let result = str::from_utf8_owned(wr.get_ref().to_owned()).unwrap();
cast::forget(wr); cast::forget(wr);
result result.to_strbuf()
} }
} }
pub fn ty_to_str(ty: &ast::Ty) -> ~str { pub fn ty_to_str(ty: &ast::Ty) -> StrBuf {
to_str(|s| s.print_type(ty)) to_str(|s| s.print_type(ty))
} }
pub fn pat_to_str(pat: &ast::Pat) -> ~str { pub fn pat_to_str(pat: &ast::Pat) -> StrBuf {
to_str(|s| s.print_pat(pat)) to_str(|s| s.print_pat(pat))
} }
pub fn expr_to_str(e: &ast::Expr) -> ~str { pub fn expr_to_str(e: &ast::Expr) -> StrBuf {
to_str(|s| s.print_expr(e)) to_str(|s| s.print_expr(e))
} }
pub fn lifetime_to_str(e: &ast::Lifetime) -> ~str { pub fn lifetime_to_str(e: &ast::Lifetime) -> StrBuf {
to_str(|s| s.print_lifetime(e)) to_str(|s| s.print_lifetime(e))
} }
pub fn tt_to_str(tt: &ast::TokenTree) -> ~str { pub fn tt_to_str(tt: &ast::TokenTree) -> StrBuf {
to_str(|s| s.print_tt(tt)) to_str(|s| s.print_tt(tt))
} }
pub fn tts_to_str(tts: &[ast::TokenTree]) -> ~str { pub fn tts_to_str(tts: &[ast::TokenTree]) -> StrBuf {
to_str(|s| s.print_tts(&tts)) to_str(|s| s.print_tts(&tts))
} }
pub fn stmt_to_str(stmt: &ast::Stmt) -> ~str { pub fn stmt_to_str(stmt: &ast::Stmt) -> StrBuf {
to_str(|s| s.print_stmt(stmt)) to_str(|s| s.print_stmt(stmt))
} }
pub fn item_to_str(i: &ast::Item) -> ~str { pub fn item_to_str(i: &ast::Item) -> StrBuf {
to_str(|s| s.print_item(i)) to_str(|s| s.print_item(i))
} }
pub fn generics_to_str(generics: &ast::Generics) -> ~str { pub fn generics_to_str(generics: &ast::Generics) -> StrBuf {
to_str(|s| s.print_generics(generics)) to_str(|s| s.print_generics(generics))
} }
pub fn ty_method_to_str(p: &ast::TypeMethod) -> ~str { pub fn ty_method_to_str(p: &ast::TypeMethod) -> StrBuf {
to_str(|s| s.print_ty_method(p)) to_str(|s| s.print_ty_method(p))
} }
pub fn method_to_str(p: &ast::Method) -> ~str { pub fn method_to_str(p: &ast::Method) -> StrBuf {
to_str(|s| s.print_method(p)) to_str(|s| s.print_method(p))
} }
pub fn fn_block_to_str(p: &ast::FnDecl) -> ~str { pub fn fn_block_to_str(p: &ast::FnDecl) -> StrBuf {
to_str(|s| s.print_fn_block_args(p)) to_str(|s| s.print_fn_block_args(p))
} }
pub fn path_to_str(p: &ast::Path) -> ~str { pub fn path_to_str(p: &ast::Path) -> StrBuf {
to_str(|s| s.print_path(p, false)) to_str(|s| s.print_path(p, false))
} }
pub fn fun_to_str(decl: &ast::FnDecl, fn_style: ast::FnStyle, name: ast::Ident, pub fn fun_to_str(decl: &ast::FnDecl, fn_style: ast::FnStyle, name: ast::Ident,
opt_explicit_self: Option<ast::ExplicitSelf_>, opt_explicit_self: Option<ast::ExplicitSelf_>,
generics: &ast::Generics) -> ~str { generics: &ast::Generics) -> StrBuf {
to_str(|s| { to_str(|s| {
try!(s.print_fn(decl, Some(fn_style), abi::Rust, try!(s.print_fn(decl, Some(fn_style), abi::Rust,
name, generics, opt_explicit_self, ast::Inherited)); name, generics, opt_explicit_self, ast::Inherited));
@ -210,7 +210,7 @@ pub fn fun_to_str(decl: &ast::FnDecl, fn_style: ast::FnStyle, name: ast::Ident,
}) })
} }
pub fn block_to_str(blk: &ast::Block) -> ~str { pub fn block_to_str(blk: &ast::Block) -> StrBuf {
to_str(|s| { to_str(|s| {
// containing cbox, will be closed by print-block at } // containing cbox, will be closed by print-block at }
try!(s.cbox(indent_unit)); try!(s.cbox(indent_unit));
@ -220,30 +220,30 @@ pub fn block_to_str(blk: &ast::Block) -> ~str {
}) })
} }
pub fn meta_item_to_str(mi: &ast::MetaItem) -> ~str { pub fn meta_item_to_str(mi: &ast::MetaItem) -> StrBuf {
to_str(|s| s.print_meta_item(mi)) to_str(|s| s.print_meta_item(mi))
} }
pub fn attribute_to_str(attr: &ast::Attribute) -> ~str { pub fn attribute_to_str(attr: &ast::Attribute) -> StrBuf {
to_str(|s| s.print_attribute(attr)) to_str(|s| s.print_attribute(attr))
} }
pub fn lit_to_str(l: &ast::Lit) -> ~str { pub fn lit_to_str(l: &ast::Lit) -> StrBuf {
to_str(|s| s.print_literal(l)) to_str(|s| s.print_literal(l))
} }
pub fn explicit_self_to_str(explicit_self: ast::ExplicitSelf_) -> ~str { pub fn explicit_self_to_str(explicit_self: ast::ExplicitSelf_) -> StrBuf {
to_str(|s| s.print_explicit_self(explicit_self, ast::MutImmutable).map(|_| {})) to_str(|s| s.print_explicit_self(explicit_self, ast::MutImmutable).map(|_| {}))
} }
pub fn variant_to_str(var: &ast::Variant) -> ~str { pub fn variant_to_str(var: &ast::Variant) -> StrBuf {
to_str(|s| s.print_variant(var)) to_str(|s| s.print_variant(var))
} }
pub fn visibility_qualified(vis: ast::Visibility, s: &str) -> ~str { pub fn visibility_qualified(vis: ast::Visibility, s: &str) -> StrBuf {
match vis { match vis {
ast::Public => format!("pub {}", s), ast::Public => format!("pub {}", s).to_strbuf(),
ast::Inherited => s.to_owned() ast::Inherited => s.to_strbuf()
} }
} }
@ -366,10 +366,10 @@ impl<'a> State<'a> {
// Synthesizes a comment that was not textually present in the original source // Synthesizes a comment that was not textually present in the original source
// file. // file.
pub fn synth_comment(&mut self, text: ~str) -> IoResult<()> { pub fn synth_comment(&mut self, text: StrBuf) -> IoResult<()> {
try!(word(&mut self.s, "/*")); try!(word(&mut self.s, "/*"));
try!(space(&mut self.s)); try!(space(&mut self.s));
try!(word(&mut self.s, text)); try!(word(&mut self.s, text.as_slice()));
try!(space(&mut self.s)); try!(space(&mut self.s));
word(&mut self.s, "*/") word(&mut self.s, "*/")
} }
@ -552,7 +552,8 @@ impl<'a> State<'a> {
self.end() // end the outer fn box self.end() // end the outer fn box
} }
ast::ForeignItemStatic(t, m) => { ast::ForeignItemStatic(t, m) => {
try!(self.head(visibility_qualified(item.vis, "static"))); try!(self.head(visibility_qualified(item.vis,
"static").as_slice()));
if m { if m {
try!(self.word_space("mut")); try!(self.word_space("mut"));
} }
@ -573,7 +574,8 @@ impl<'a> State<'a> {
try!(self.ann.pre(self, NodeItem(item))); try!(self.ann.pre(self, NodeItem(item)));
match item.node { match item.node {
ast::ItemStatic(ty, m, expr) => { ast::ItemStatic(ty, m, expr) => {
try!(self.head(visibility_qualified(item.vis, "static"))); try!(self.head(visibility_qualified(item.vis,
"static").as_slice()));
if m == ast::MutMutable { if m == ast::MutMutable {
try!(self.word_space("mut")); try!(self.word_space("mut"));
} }
@ -602,7 +604,8 @@ impl<'a> State<'a> {
try!(self.print_block_with_attrs(body, item.attrs.as_slice())); try!(self.print_block_with_attrs(body, item.attrs.as_slice()));
} }
ast::ItemMod(ref _mod) => { ast::ItemMod(ref _mod) => {
try!(self.head(visibility_qualified(item.vis, "mod"))); try!(self.head(visibility_qualified(item.vis,
"mod").as_slice()));
try!(self.print_ident(item.ident)); try!(self.print_ident(item.ident));
try!(self.nbsp()); try!(self.nbsp());
try!(self.bopen()); try!(self.bopen());
@ -619,7 +622,8 @@ impl<'a> State<'a> {
ast::ItemTy(ty, ref params) => { ast::ItemTy(ty, ref params) => {
try!(self.ibox(indent_unit)); try!(self.ibox(indent_unit));
try!(self.ibox(0u)); try!(self.ibox(0u));
try!(self.word_nbsp(visibility_qualified(item.vis, "type"))); try!(self.word_nbsp(visibility_qualified(item.vis,
"type").as_slice()));
try!(self.print_ident(item.ident)); try!(self.print_ident(item.ident));
try!(self.print_generics(params)); try!(self.print_generics(params));
try!(self.end()); // end the inner ibox try!(self.end()); // end the inner ibox
@ -643,12 +647,14 @@ impl<'a> State<'a> {
if struct_def.is_virtual { if struct_def.is_virtual {
try!(self.word_space("virtual")); try!(self.word_space("virtual"));
} }
try!(self.head(visibility_qualified(item.vis, "struct"))); try!(self.head(visibility_qualified(item.vis,
"struct").as_slice()));
try!(self.print_struct(struct_def, generics, item.ident, item.span)); try!(self.print_struct(struct_def, generics, item.ident, item.span));
} }
ast::ItemImpl(ref generics, ref opt_trait, ty, ref methods) => { ast::ItemImpl(ref generics, ref opt_trait, ty, ref methods) => {
try!(self.head(visibility_qualified(item.vis, "impl"))); try!(self.head(visibility_qualified(item.vis,
"impl").as_slice()));
if generics.is_parameterized() { if generics.is_parameterized() {
try!(self.print_generics(generics)); try!(self.print_generics(generics));
try!(space(&mut self.s)); try!(space(&mut self.s));
@ -674,7 +680,8 @@ impl<'a> State<'a> {
try!(self.bclose(item.span)); try!(self.bclose(item.span));
} }
ast::ItemTrait(ref generics, ref sized, ref traits, ref methods) => { ast::ItemTrait(ref generics, ref sized, ref traits, ref methods) => {
try!(self.head(visibility_qualified(item.vis, "trait"))); try!(self.head(visibility_qualified(item.vis,
"trait").as_slice()));
try!(self.print_ident(item.ident)); try!(self.print_ident(item.ident));
try!(self.print_generics(generics)); try!(self.print_generics(generics));
if *sized == ast::DynSize { if *sized == ast::DynSize {
@ -723,7 +730,7 @@ impl<'a> State<'a> {
generics: &ast::Generics, ident: ast::Ident, generics: &ast::Generics, ident: ast::Ident,
span: codemap::Span, span: codemap::Span,
visibility: ast::Visibility) -> IoResult<()> { visibility: ast::Visibility) -> IoResult<()> {
try!(self.head(visibility_qualified(visibility, "enum"))); try!(self.head(visibility_qualified(visibility, "enum").as_slice()));
try!(self.print_ident(ident)); try!(self.print_ident(ident));
try!(self.print_generics(generics)); try!(self.print_generics(generics));
try!(space(&mut self.s)); try!(space(&mut self.s));
@ -825,7 +832,7 @@ impl<'a> State<'a> {
match *tt { match *tt {
ast::TTDelim(ref tts) => self.print_tts(&(tts.as_slice())), ast::TTDelim(ref tts) => self.print_tts(&(tts.as_slice())),
ast::TTTok(_, ref tk) => { ast::TTTok(_, ref tk) => {
word(&mut self.s, parse::token::to_str(tk)) word(&mut self.s, parse::token::to_str(tk).as_slice())
} }
ast::TTSeq(_, ref tts, ref sep, zerok) => { ast::TTSeq(_, ref tts, ref sep, zerok) => {
try!(word(&mut self.s, "$(")); try!(word(&mut self.s, "$("));
@ -835,7 +842,8 @@ impl<'a> State<'a> {
try!(word(&mut self.s, ")")); try!(word(&mut self.s, ")"));
match *sep { match *sep {
Some(ref tk) => { Some(ref tk) => {
try!(word(&mut self.s, parse::token::to_str(tk))); try!(word(&mut self.s,
parse::token::to_str(tk).as_slice()));
} }
None => () None => ()
} }
@ -2189,7 +2197,7 @@ impl<'a> State<'a> {
try!(self.maybe_print_comment(lit.span.lo)); try!(self.maybe_print_comment(lit.span.lo));
match self.next_lit(lit.span.lo) { match self.next_lit(lit.span.lo) {
Some(ref ltrl) => { Some(ref ltrl) => {
return word(&mut self.s, (*ltrl).lit); return word(&mut self.s, (*ltrl).lit.as_slice());
} }
_ => () _ => ()
} }
@ -2202,16 +2210,19 @@ impl<'a> State<'a> {
word(&mut self.s, res.into_owned()) word(&mut self.s, res.into_owned())
} }
ast::LitInt(i, t) => { ast::LitInt(i, t) => {
word(&mut self.s, ast_util::int_ty_to_str(t, Some(i))) word(&mut self.s,
ast_util::int_ty_to_str(t, Some(i)).as_slice())
} }
ast::LitUint(u, t) => { ast::LitUint(u, t) => {
word(&mut self.s, ast_util::uint_ty_to_str(t, Some(u))) word(&mut self.s,
ast_util::uint_ty_to_str(t, Some(u)).as_slice())
} }
ast::LitIntUnsuffixed(i) => { ast::LitIntUnsuffixed(i) => {
word(&mut self.s, format!("{}", i)) word(&mut self.s, format!("{}", i))
} }
ast::LitFloat(ref f, t) => { ast::LitFloat(ref f, t) => {
word(&mut self.s, f.get() + ast_util::float_ty_to_str(t)) word(&mut self.s,
f.get() + ast_util::float_ty_to_str(t).as_slice())
} }
ast::LitFloatUnsuffixed(ref f) => word(&mut self.s, f.get()), ast::LitFloatUnsuffixed(ref f) => word(&mut self.s, f.get()),
ast::LitNil => word(&mut self.s, "()"), ast::LitNil => word(&mut self.s, "()"),
@ -2266,7 +2277,7 @@ impl<'a> State<'a> {
comments::Mixed => { comments::Mixed => {
assert_eq!(cmnt.lines.len(), 1u); assert_eq!(cmnt.lines.len(), 1u);
try!(zerobreak(&mut self.s)); try!(zerobreak(&mut self.s));
try!(word(&mut self.s, *cmnt.lines.get(0))); try!(word(&mut self.s, cmnt.lines.get(0).as_slice()));
zerobreak(&mut self.s) zerobreak(&mut self.s)
} }
comments::Isolated => { comments::Isolated => {
@ -2275,7 +2286,7 @@ impl<'a> State<'a> {
// Don't print empty lines because they will end up as trailing // Don't print empty lines because they will end up as trailing
// whitespace // whitespace
if !line.is_empty() { if !line.is_empty() {
try!(word(&mut self.s, *line)); try!(word(&mut self.s, line.as_slice()));
} }
try!(hardbreak(&mut self.s)); try!(hardbreak(&mut self.s));
} }
@ -2284,13 +2295,13 @@ impl<'a> State<'a> {
comments::Trailing => { comments::Trailing => {
try!(word(&mut self.s, " ")); try!(word(&mut self.s, " "));
if cmnt.lines.len() == 1u { if cmnt.lines.len() == 1u {
try!(word(&mut self.s, *cmnt.lines.get(0))); try!(word(&mut self.s, cmnt.lines.get(0).as_slice()));
hardbreak(&mut self.s) hardbreak(&mut self.s)
} else { } else {
try!(self.ibox(0u)); try!(self.ibox(0u));
for line in cmnt.lines.iter() { for line in cmnt.lines.iter() {
if !line.is_empty() { if !line.is_empty() {
try!(word(&mut self.s, *line)); try!(word(&mut self.s, line.as_slice()));
} }
try!(hardbreak(&mut self.s)); try!(hardbreak(&mut self.s));
} }
@ -2300,7 +2311,7 @@ impl<'a> State<'a> {
comments::BlankLine => { comments::BlankLine => {
// We need to do at least one, possibly two hardbreaks. // We need to do at least one, possibly two hardbreaks.
let is_semi = match self.s.last_token() { let is_semi = match self.s.last_token() {
pp::String(s, _) => ";" == s, pp::String(s, _) => ";" == s.as_slice(),
_ => false _ => false
}; };
if is_semi || self.is_begin() || self.is_end() { if is_semi || self.is_begin() || self.is_end() {
@ -2371,8 +2382,9 @@ impl<'a> State<'a> {
opt_fn_style: Option<ast::FnStyle>, opt_fn_style: Option<ast::FnStyle>,
abi: abi::Abi, abi: abi::Abi,
vis: ast::Visibility) -> IoResult<()> { vis: ast::Visibility) -> IoResult<()> {
try!(word(&mut self.s, visibility_qualified(vis, ""))); try!(word(&mut self.s, visibility_qualified(vis, "").as_slice()));
try!(self.print_opt_fn_style(opt_fn_style)); try!(self.print_opt_fn_style(opt_fn_style));
if abi != abi::Rust { if abi != abi::Rust {
try!(self.word_nbsp("extern")); try!(self.word_nbsp("extern"));
try!(self.word_nbsp(abi.to_str())); try!(self.word_nbsp(abi.to_str()));
@ -2420,7 +2432,7 @@ mod test {
let generics = ast_util::empty_generics(); let generics = ast_util::empty_generics();
assert_eq!(&fun_to_str(&decl, ast::NormalFn, abba_ident, assert_eq!(&fun_to_str(&decl, ast::NormalFn, abba_ident,
None, &generics), None, &generics),
&"fn abba()".to_owned()); &"fn abba()".to_strbuf());
} }
#[test] #[test]
@ -2438,6 +2450,6 @@ mod test {
}); });
let varstr = variant_to_str(&var); let varstr = variant_to_str(&var);
assert_eq!(&varstr,&"pub principal_skinner".to_owned()); assert_eq!(&varstr,&"pub principal_skinner".to_strbuf());
} }
} }

View file

@ -92,7 +92,7 @@ impl<T: TotalEq + Hash + Clone + 'static> Interner<T> {
#[deriving(Clone, Eq, Hash, Ord)] #[deriving(Clone, Eq, Hash, Ord)]
pub struct RcStr { pub struct RcStr {
string: Rc<~str>, string: Rc<StrBuf>,
} }
impl TotalEq for RcStr {} impl TotalEq for RcStr {}
@ -106,7 +106,7 @@ impl TotalOrd for RcStr {
impl Str for RcStr { impl Str for RcStr {
#[inline] #[inline]
fn as_slice<'a>(&'a self) -> &'a str { fn as_slice<'a>(&'a self) -> &'a str {
let s: &'a str = *self.string; let s: &'a str = self.string.as_slice();
s s
} }
} }
@ -121,7 +121,7 @@ impl fmt::Show for RcStr {
impl RcStr { impl RcStr {
pub fn new(string: &str) -> RcStr { pub fn new(string: &str) -> RcStr {
RcStr { RcStr {
string: Rc::new(string.to_owned()), string: Rc::new(string.to_strbuf()),
} }
} }
} }

View file

@ -16,17 +16,21 @@ use parse::parser::Parser;
use parse::token; use parse::token;
// map a string to tts, using a made-up filename: // map a string to tts, using a made-up filename:
pub fn string_to_tts(source_str: ~str) -> Vec<ast::TokenTree> { pub fn string_to_tts(source_str: StrBuf) -> Vec<ast::TokenTree> {
let ps = new_parse_sess(); let ps = new_parse_sess();
filemap_to_tts(&ps, string_to_filemap(&ps, source_str,"bogofile".to_owned())) filemap_to_tts(&ps,
string_to_filemap(&ps, source_str, "bogofile".to_strbuf()))
} }
// map string to parser (via tts) // map string to parser (via tts)
pub fn string_to_parser<'a>(ps: &'a ParseSess, source_str: ~str) -> Parser<'a> { pub fn string_to_parser<'a>(ps: &'a ParseSess, source_str: StrBuf) -> Parser<'a> {
new_parser_from_source_str(ps, Vec::new(), "bogofile".to_owned(), source_str) new_parser_from_source_str(ps,
Vec::new(),
"bogofile".to_strbuf(),
source_str)
} }
fn with_error_checking_parse<T>(s: ~str, f: |&mut Parser| -> T) -> T { fn with_error_checking_parse<T>(s: StrBuf, f: |&mut Parser| -> T) -> T {
let ps = new_parse_sess(); let ps = new_parse_sess();
let mut p = string_to_parser(&ps, s); let mut p = string_to_parser(&ps, s);
let x = f(&mut p); let x = f(&mut p);
@ -35,28 +39,28 @@ fn with_error_checking_parse<T>(s: ~str, f: |&mut Parser| -> T) -> T {
} }
// parse a string, return a crate. // parse a string, return a crate.
pub fn string_to_crate (source_str : ~str) -> ast::Crate { pub fn string_to_crate (source_str : StrBuf) -> ast::Crate {
with_error_checking_parse(source_str, |p| { with_error_checking_parse(source_str, |p| {
p.parse_crate_mod() p.parse_crate_mod()
}) })
} }
// parse a string, return an expr // parse a string, return an expr
pub fn string_to_expr (source_str : ~str) -> @ast::Expr { pub fn string_to_expr (source_str : StrBuf) -> @ast::Expr {
with_error_checking_parse(source_str, |p| { with_error_checking_parse(source_str, |p| {
p.parse_expr() p.parse_expr()
}) })
} }
// parse a string, return an item // parse a string, return an item
pub fn string_to_item (source_str : ~str) -> Option<@ast::Item> { pub fn string_to_item (source_str : StrBuf) -> Option<@ast::Item> {
with_error_checking_parse(source_str, |p| { with_error_checking_parse(source_str, |p| {
p.parse_item(Vec::new()) p.parse_item(Vec::new())
}) })
} }
// parse a string, return a stmt // parse a string, return a stmt
pub fn string_to_stmt(source_str : ~str) -> @ast::Stmt { pub fn string_to_stmt(source_str : StrBuf) -> @ast::Stmt {
with_error_checking_parse(source_str, |p| { with_error_checking_parse(source_str, |p| {
p.parse_stmt(Vec::new()) p.parse_stmt(Vec::new())
}) })
@ -64,7 +68,7 @@ pub fn string_to_stmt(source_str : ~str) -> @ast::Stmt {
// parse a string, return a pat. Uses "irrefutable"... which doesn't // parse a string, return a pat. Uses "irrefutable"... which doesn't
// (currently) affect parsing. // (currently) affect parsing.
pub fn string_to_pat(source_str: ~str) -> @ast::Pat { pub fn string_to_pat(source_str: StrBuf) -> @ast::Pat {
string_to_parser(&new_parse_sess(), source_str).parse_pat() string_to_parser(&new_parse_sess(), source_str).parse_pat()
} }