1
Fork 0

librustdoc: Remove all ~str usage from librustdoc.

This commit is contained in:
Patrick Walton 2014-05-12 13:44:59 -07:00
parent 6559a3675e
commit 9ba91e1243
13 changed files with 357 additions and 261 deletions

View file

@ -69,7 +69,7 @@ impl<T: Clean<U>, U> Clean<Vec<U>> for syntax::owned_slice::OwnedSlice<T> {
#[deriving(Clone, Encodable, Decodable)] #[deriving(Clone, Encodable, Decodable)]
pub struct Crate { pub struct Crate {
pub name: ~str, pub name: StrBuf,
pub module: Option<Item>, pub module: Option<Item>,
pub externs: Vec<(ast::CrateNum, ExternalCrate)>, pub externs: Vec<(ast::CrateNum, ExternalCrate)>,
} }
@ -92,7 +92,7 @@ impl<'a> Clean<Crate> for visit_ast::RustdocVisitor<'a> {
let id = link::find_crate_id(self.attrs.as_slice(), let id = link::find_crate_id(self.attrs.as_slice(),
t_outputs.out_filestem.as_slice()); t_outputs.out_filestem.as_slice());
Crate { Crate {
name: id.name.to_owned(), name: id.name.to_strbuf(),
module: Some(self.module.clean()), module: Some(self.module.clean()),
externs: externs, externs: externs,
} }
@ -101,14 +101,14 @@ impl<'a> Clean<Crate> for visit_ast::RustdocVisitor<'a> {
#[deriving(Clone, Encodable, Decodable)] #[deriving(Clone, Encodable, Decodable)]
pub struct ExternalCrate { pub struct ExternalCrate {
pub name: ~str, pub name: StrBuf,
pub attrs: Vec<Attribute>, pub attrs: Vec<Attribute>,
} }
impl Clean<ExternalCrate> for cstore::crate_metadata { impl Clean<ExternalCrate> for cstore::crate_metadata {
fn clean(&self) -> ExternalCrate { fn clean(&self) -> ExternalCrate {
ExternalCrate { ExternalCrate {
name: self.name.to_owned(), name: self.name.to_strbuf(),
attrs: decoder::get_crate_attributes(self.data()).clean() attrs: decoder::get_crate_attributes(self.data()).clean()
.move_iter() .move_iter()
.collect(), .collect(),
@ -124,7 +124,7 @@ pub struct Item {
/// Stringified span /// Stringified span
pub source: Span, pub source: Span,
/// Not everything has a name. E.g., impls /// Not everything has a name. E.g., impls
pub name: Option<~str>, pub name: Option<StrBuf>,
pub attrs: Vec<Attribute> , pub attrs: Vec<Attribute> ,
pub inner: ItemEnum, pub inner: ItemEnum,
pub visibility: Option<Visibility>, pub visibility: Option<Visibility>,
@ -137,7 +137,9 @@ impl Item {
pub fn doc_list<'a>(&'a self) -> Option<&'a [Attribute]> { pub fn doc_list<'a>(&'a self) -> Option<&'a [Attribute]> {
for attr in self.attrs.iter() { for attr in self.attrs.iter() {
match *attr { match *attr {
List(ref x, ref list) if "doc" == *x => { return Some(list.as_slice()); } List(ref x, ref list) if "doc" == x.as_slice() => {
return Some(list.as_slice());
}
_ => {} _ => {}
} }
} }
@ -149,7 +151,9 @@ impl Item {
pub fn doc_value<'a>(&'a self) -> Option<&'a str> { pub fn doc_value<'a>(&'a self) -> Option<&'a str> {
for attr in self.attrs.iter() { for attr in self.attrs.iter() {
match *attr { match *attr {
NameValue(ref x, ref v) if "doc" == *x => { return Some(v.as_slice()); } NameValue(ref x, ref v) if "doc" == x.as_slice() => {
return Some(v.as_slice());
}
_ => {} _ => {}
} }
} }
@ -161,7 +165,9 @@ impl Item {
Some(ref l) => { Some(ref l) => {
for innerattr in l.iter() { for innerattr in l.iter() {
match *innerattr { match *innerattr {
Word(ref s) if "hidden" == *s => return true, Word(ref s) if "hidden" == s.as_slice() => {
return true
}
_ => (), _ => (),
} }
} }
@ -225,7 +231,7 @@ impl Clean<Item> for doctree::Module {
let name = if self.name.is_some() { let name = if self.name.is_some() {
self.name.unwrap().clean() self.name.unwrap().clean()
} else { } else {
"".to_owned() "".to_strbuf()
}; };
let mut foreigns = Vec::new(); let mut foreigns = Vec::new();
for subforeigns in self.foreigns.clean().move_iter() { for subforeigns in self.foreigns.clean().move_iter() {
@ -281,20 +287,20 @@ impl Clean<Item> for doctree::Module {
#[deriving(Clone, Encodable, Decodable)] #[deriving(Clone, Encodable, Decodable)]
pub enum Attribute { pub enum Attribute {
Word(~str), Word(StrBuf),
List(~str, Vec<Attribute> ), List(StrBuf, Vec<Attribute> ),
NameValue(~str, ~str) NameValue(StrBuf, StrBuf)
} }
impl Clean<Attribute> for ast::MetaItem { impl Clean<Attribute> for ast::MetaItem {
fn clean(&self) -> Attribute { fn clean(&self) -> Attribute {
match self.node { match self.node {
ast::MetaWord(ref s) => Word(s.get().to_owned()), ast::MetaWord(ref s) => Word(s.get().to_strbuf()),
ast::MetaList(ref s, ref l) => { ast::MetaList(ref s, ref l) => {
List(s.get().to_owned(), l.clean().move_iter().collect()) List(s.get().to_strbuf(), l.clean().move_iter().collect())
} }
ast::MetaNameValue(ref s, ref v) => { ast::MetaNameValue(ref s, ref v) => {
NameValue(s.get().to_owned(), lit_to_str(v)) NameValue(s.get().to_strbuf(), lit_to_str(v))
} }
} }
} }
@ -311,14 +317,16 @@ impl<'a> attr::AttrMetaMethods for &'a Attribute {
fn name(&self) -> InternedString { fn name(&self) -> InternedString {
match **self { match **self {
Word(ref n) | List(ref n, _) | NameValue(ref n, _) => { Word(ref n) | List(ref n, _) | NameValue(ref n, _) => {
token::intern_and_get_ident(*n) token::intern_and_get_ident(n.as_slice())
} }
} }
} }
fn value_str(&self) -> Option<InternedString> { fn value_str(&self) -> Option<InternedString> {
match **self { match **self {
NameValue(_, ref v) => Some(token::intern_and_get_ident(*v)), NameValue(_, ref v) => {
Some(token::intern_and_get_ident(v.as_slice()))
}
_ => None, _ => None,
} }
} }
@ -330,7 +338,7 @@ impl<'a> attr::AttrMetaMethods for &'a Attribute {
#[deriving(Clone, Encodable, Decodable)] #[deriving(Clone, Encodable, Decodable)]
pub struct TyParam { pub struct TyParam {
pub name: ~str, pub name: StrBuf,
pub id: ast::NodeId, pub id: ast::NodeId,
pub bounds: Vec<TyParamBound>, pub bounds: Vec<TyParamBound>,
} }
@ -362,19 +370,19 @@ impl Clean<TyParamBound> for ast::TyParamBound {
} }
#[deriving(Clone, Encodable, Decodable)] #[deriving(Clone, Encodable, Decodable)]
pub struct Lifetime(~str); pub struct Lifetime(StrBuf);
impl Lifetime { impl Lifetime {
pub fn get_ref<'a>(&'a self) -> &'a str { pub fn get_ref<'a>(&'a self) -> &'a str {
let Lifetime(ref s) = *self; let Lifetime(ref s) = *self;
let s: &'a str = *s; let s: &'a str = s.as_slice();
return s; return s;
} }
} }
impl Clean<Lifetime> for ast::Lifetime { impl Clean<Lifetime> for ast::Lifetime {
fn clean(&self) -> Lifetime { fn clean(&self) -> Lifetime {
Lifetime(token::get_name(self.name).get().to_owned()) Lifetime(token::get_name(self.name).get().to_strbuf())
} }
} }
@ -566,7 +574,7 @@ impl Clean<FnDecl> for ast::FnDecl {
#[deriving(Clone, Encodable, Decodable)] #[deriving(Clone, Encodable, Decodable)]
pub struct Argument { pub struct Argument {
pub type_: Type, pub type_: Type,
pub name: ~str, pub name: StrBuf,
pub id: ast::NodeId, pub id: ast::NodeId,
} }
@ -687,7 +695,7 @@ pub enum Type {
BareFunction(Box<BareFunctionDecl>), BareFunction(Box<BareFunctionDecl>),
Tuple(Vec<Type>), Tuple(Vec<Type>),
Vector(Box<Type>), Vector(Box<Type>),
FixedVector(Box<Type>, ~str), FixedVector(Box<Type>, StrBuf),
String, String,
Bool, Bool,
/// aka TyNil /// aka TyNil
@ -890,7 +898,7 @@ impl Clean<VariantKind> for ast::VariantKind {
#[deriving(Clone, Encodable, Decodable)] #[deriving(Clone, Encodable, Decodable)]
pub struct Span { pub struct Span {
pub filename: ~str, pub filename: StrBuf,
pub loline: uint, pub loline: uint,
pub locol: uint, pub locol: uint,
pub hiline: uint, pub hiline: uint,
@ -905,7 +913,7 @@ impl Clean<Span> for syntax::codemap::Span {
let lo = cm.lookup_char_pos(self.lo); let lo = cm.lookup_char_pos(self.lo);
let hi = cm.lookup_char_pos(self.hi); let hi = cm.lookup_char_pos(self.hi);
Span { Span {
filename: filename.to_owned(), filename: filename.to_strbuf(),
loline: lo.line, loline: lo.line,
locol: lo.col.to_uint(), locol: lo.col.to_uint(),
hiline: hi.line, hiline: hi.line,
@ -931,7 +939,7 @@ impl Clean<Path> for ast::Path {
#[deriving(Clone, Encodable, Decodable)] #[deriving(Clone, Encodable, Decodable)]
pub struct PathSegment { pub struct PathSegment {
pub name: ~str, pub name: StrBuf,
pub lifetimes: Vec<Lifetime>, pub lifetimes: Vec<Lifetime>,
pub types: Vec<Type>, pub types: Vec<Type>,
} }
@ -946,7 +954,7 @@ impl Clean<PathSegment> for ast::PathSegment {
} }
} }
fn path_to_str(p: &ast::Path) -> ~str { fn path_to_str(p: &ast::Path) -> StrBuf {
use syntax::parse::token; use syntax::parse::token;
let mut s = StrBuf::new(); let mut s = StrBuf::new();
@ -959,12 +967,12 @@ fn path_to_str(p: &ast::Path) -> ~str {
} }
s.push_str(i.get()); s.push_str(i.get());
} }
s.into_owned() s
} }
impl Clean<~str> for ast::Ident { impl Clean<StrBuf> for ast::Ident {
fn clean(&self) -> ~str { fn clean(&self) -> StrBuf {
token::get_ident(*self).get().to_owned() token::get_ident(*self).get().to_strbuf()
} }
} }
@ -995,7 +1003,7 @@ pub struct BareFunctionDecl {
pub fn_style: ast::FnStyle, pub fn_style: ast::FnStyle,
pub generics: Generics, pub generics: Generics,
pub decl: FnDecl, pub decl: FnDecl,
pub abi: ~str, pub abi: StrBuf,
} }
impl Clean<BareFunctionDecl> for ast::BareFnTy { impl Clean<BareFunctionDecl> for ast::BareFnTy {
@ -1007,7 +1015,7 @@ impl Clean<BareFunctionDecl> for ast::BareFnTy {
type_params: Vec::new(), type_params: Vec::new(),
}, },
decl: self.decl.clean(), decl: self.decl.clean(),
abi: self.abi.to_str(), abi: self.abi.to_str().to_strbuf(),
} }
} }
} }
@ -1019,7 +1027,7 @@ pub struct Static {
/// It's useful to have the value of a static documented, but I have no /// It's useful to have the value of a static documented, but I have no
/// desire to represent expressions (that'd basically be all of the AST, /// desire to represent expressions (that'd basically be all of the AST,
/// which is huge!). So, have a string. /// which is huge!). So, have a string.
pub expr: ~str, pub expr: StrBuf,
} }
impl Clean<Item> for doctree::Static { impl Clean<Item> for doctree::Static {
@ -1116,7 +1124,7 @@ impl Clean<Item> for ast::ViewItem {
#[deriving(Clone, Encodable, Decodable)] #[deriving(Clone, Encodable, Decodable)]
pub enum ViewItemInner { pub enum ViewItemInner {
ExternCrate(~str, Option<~str>, ast::NodeId), ExternCrate(StrBuf, Option<StrBuf>, ast::NodeId),
Import(ViewPath) Import(ViewPath)
} }
@ -1126,7 +1134,7 @@ impl Clean<ViewItemInner> for ast::ViewItem_ {
&ast::ViewItemExternCrate(ref i, ref p, ref id) => { &ast::ViewItemExternCrate(ref i, ref p, ref id) => {
let string = match *p { let string = match *p {
None => None, None => None,
Some((ref x, _)) => Some(x.get().to_owned()), Some((ref x, _)) => Some(x.get().to_strbuf()),
}; };
ExternCrate(i.clean(), string, *id) ExternCrate(i.clean(), string, *id)
} }
@ -1140,7 +1148,7 @@ impl Clean<ViewItemInner> for ast::ViewItem_ {
#[deriving(Clone, Encodable, Decodable)] #[deriving(Clone, Encodable, Decodable)]
pub enum ViewPath { pub enum ViewPath {
// use str = source; // use str = source;
SimpleImport(~str, ImportSource), SimpleImport(StrBuf, ImportSource),
// use source::*; // use source::*;
GlobImport(ImportSource), GlobImport(ImportSource),
// use source::{a, b, c}; // use source::{a, b, c};
@ -1170,7 +1178,7 @@ impl Clean<ViewPath> for ast::ViewPath {
#[deriving(Clone, Encodable, Decodable)] #[deriving(Clone, Encodable, Decodable)]
pub struct ViewListIdent { pub struct ViewListIdent {
pub name: ~str, pub name: StrBuf,
pub source: Option<ast::DefId>, pub source: Option<ast::DefId>,
} }
@ -1203,7 +1211,7 @@ impl Clean<Item> for ast::ForeignItem {
ForeignStaticItem(Static { ForeignStaticItem(Static {
type_: ty.clean(), type_: ty.clean(),
mutability: if mutbl {Mutable} else {Immutable}, mutability: if mutbl {Mutable} else {Immutable},
expr: "".to_owned(), expr: "".to_strbuf(),
}) })
} }
}; };
@ -1221,56 +1229,56 @@ impl Clean<Item> for ast::ForeignItem {
// Utilities // Utilities
trait ToSource { trait ToSource {
fn to_src(&self) -> ~str; fn to_src(&self) -> StrBuf;
} }
impl ToSource for syntax::codemap::Span { impl ToSource for syntax::codemap::Span {
fn to_src(&self) -> ~str { fn to_src(&self) -> StrBuf {
debug!("converting span {:?} to snippet", self.clean()); debug!("converting span {:?} to snippet", self.clean());
let ctxt = super::ctxtkey.get().unwrap(); let ctxt = super::ctxtkey.get().unwrap();
let cm = ctxt.sess().codemap().clone(); let cm = ctxt.sess().codemap().clone();
let sn = match cm.span_to_snippet(*self) { let sn = match cm.span_to_snippet(*self) {
Some(x) => x.to_owned(), Some(x) => x.to_strbuf(),
None => "".to_owned() None => "".to_strbuf()
}; };
debug!("got snippet {}", sn); debug!("got snippet {}", sn);
sn sn
} }
} }
fn lit_to_str(lit: &ast::Lit) -> ~str { fn lit_to_str(lit: &ast::Lit) -> StrBuf {
match lit.node { match lit.node {
ast::LitStr(ref st, _) => st.get().to_owned(), ast::LitStr(ref st, _) => st.get().to_strbuf(),
ast::LitBinary(ref data) => format!("{:?}", data.as_slice()), ast::LitBinary(ref data) => format_strbuf!("{:?}", data.as_slice()),
ast::LitChar(c) => format!("'{}'", c), ast::LitChar(c) => format_strbuf!("'{}'", c),
ast::LitInt(i, _t) => i.to_str(), ast::LitInt(i, _t) => i.to_str().to_strbuf(),
ast::LitUint(u, _t) => u.to_str(), ast::LitUint(u, _t) => u.to_str().to_strbuf(),
ast::LitIntUnsuffixed(i) => i.to_str(), ast::LitIntUnsuffixed(i) => i.to_str().to_strbuf(),
ast::LitFloat(ref f, _t) => f.get().to_str(), ast::LitFloat(ref f, _t) => f.get().to_strbuf(),
ast::LitFloatUnsuffixed(ref f) => f.get().to_str(), ast::LitFloatUnsuffixed(ref f) => f.get().to_strbuf(),
ast::LitBool(b) => b.to_str(), ast::LitBool(b) => b.to_str().to_strbuf(),
ast::LitNil => "".to_owned(), ast::LitNil => "".to_strbuf(),
} }
} }
fn name_from_pat(p: &ast::Pat) -> ~str { fn name_from_pat(p: &ast::Pat) -> StrBuf {
use syntax::ast::*; use syntax::ast::*;
debug!("Trying to get a name from pattern: {:?}", p); debug!("Trying to get a name from pattern: {:?}", p);
match p.node { match p.node {
PatWild => "_".to_owned(), PatWild => "_".to_strbuf(),
PatWildMulti => "..".to_owned(), PatWildMulti => "..".to_strbuf(),
PatIdent(_, ref p, _) => path_to_str(p), PatIdent(_, ref p, _) => path_to_str(p),
PatEnum(ref p, _) => path_to_str(p), PatEnum(ref p, _) => path_to_str(p),
PatStruct(..) => fail!("tried to get argument name from pat_struct, \ PatStruct(..) => fail!("tried to get argument name from pat_struct, \
which is not allowed in function arguments"), which is not allowed in function arguments"),
PatTup(..) => "(tuple arg NYI)".to_owned(), PatTup(..) => "(tuple arg NYI)".to_strbuf(),
PatUniq(p) => name_from_pat(p), PatUniq(p) => name_from_pat(p),
PatRegion(p) => name_from_pat(p), PatRegion(p) => name_from_pat(p),
PatLit(..) => { PatLit(..) => {
warn!("tried to get argument name from PatLit, \ warn!("tried to get argument name from PatLit, \
which is silly in function arguments"); which is silly in function arguments");
"()".to_owned() "()".to_strbuf()
}, },
PatRange(..) => fail!("tried to get argument name from PatRange, \ PatRange(..) => fail!("tried to get argument name from PatRange, \
which is not allowed in function arguments"), which is not allowed in function arguments"),
@ -1326,7 +1334,7 @@ fn register_def(cx: &core::DocContext, def: ast::Def) -> ast::DefId {
core::NotTyped(_) => return did core::NotTyped(_) => return did
}; };
let fqn = csearch::get_item_path(tcx, did); let fqn = csearch::get_item_path(tcx, did);
let fqn = fqn.move_iter().map(|i| i.to_str()).collect(); let fqn = fqn.move_iter().map(|i| i.to_str().to_strbuf()).collect();
debug!("recording {} => {}", did, fqn); debug!("recording {} => {}", did, fqn);
cx.external_paths.borrow_mut().get_mut_ref().insert(did, (fqn, kind)); cx.external_paths.borrow_mut().get_mut_ref().insert(did, (fqn, kind));
return did; return did;
@ -1351,13 +1359,13 @@ fn resolve_def(id: ast::NodeId) -> Option<ast::DefId> {
#[deriving(Clone, Encodable, Decodable)] #[deriving(Clone, Encodable, Decodable)]
pub struct Macro { pub struct Macro {
pub source: ~str, pub source: StrBuf,
} }
impl Clean<Item> for doctree::Macro { impl Clean<Item> for doctree::Macro {
fn clean(&self) -> Item { fn clean(&self) -> Item {
Item { Item {
name: Some(self.name.clean() + "!"), name: Some(format_strbuf!("{}!", self.name.clean())),
attrs: self.attrs.clean(), attrs: self.attrs.clean(),
source: self.where.clean(), source: self.where.clean(),
visibility: ast::Public.clean(), visibility: ast::Public.clean(),

View file

@ -32,7 +32,7 @@ pub enum MaybeTyped {
} }
pub type ExternalPaths = RefCell<Option<HashMap<ast::DefId, pub type ExternalPaths = RefCell<Option<HashMap<ast::DefId,
(Vec<~str>, clean::TypeKind)>>>; (Vec<StrBuf>, clean::TypeKind)>>>;
pub struct DocContext { pub struct DocContext {
pub krate: ast::Crate, pub krate: ast::Crate,
@ -57,7 +57,7 @@ pub struct CrateAnalysis {
} }
/// Parses, resolves, and typechecks the given crate /// Parses, resolves, and typechecks the given crate
fn get_ast_and_resolve(cpath: &Path, libs: HashSet<Path>, cfgs: Vec<~str>) fn get_ast_and_resolve(cpath: &Path, libs: HashSet<Path>, cfgs: Vec<StrBuf>)
-> (DocContext, CrateAnalysis) { -> (DocContext, CrateAnalysis) {
use syntax::codemap::dummy_spanned; use syntax::codemap::dummy_spanned;
use rustc::driver::driver::{FileInput, use rustc::driver::driver::{FileInput,
@ -88,7 +88,7 @@ fn get_ast_and_resolve(cpath: &Path, libs: HashSet<Path>, cfgs: Vec<~str>)
let mut cfg = build_configuration(&sess); let mut cfg = build_configuration(&sess);
for cfg_ in cfgs.move_iter() { for cfg_ in cfgs.move_iter() {
let cfg_ = token::intern_and_get_ident(cfg_); let cfg_ = token::intern_and_get_ident(cfg_.as_slice());
cfg.push(@dummy_spanned(ast::MetaWord(cfg_))); cfg.push(@dummy_spanned(ast::MetaWord(cfg_)));
} }
@ -112,7 +112,7 @@ fn get_ast_and_resolve(cpath: &Path, libs: HashSet<Path>, cfgs: Vec<~str>)
}) })
} }
pub fn run_core(libs: HashSet<Path>, cfgs: Vec<~str>, path: &Path) pub fn run_core(libs: HashSet<Path>, cfgs: Vec<StrBuf>, path: &Path)
-> (clean::Crate, CrateAnalysis) { -> (clean::Crate, CrateAnalysis) {
let (ctxt, analysis) = get_ast_and_resolve(path, libs, cfgs); let (ctxt, analysis) = get_ast_and_resolve(path, libs, cfgs);
let ctxt = @ctxt; let ctxt = @ctxt;

View file

@ -151,11 +151,13 @@ fn resolved_path(w: &mut io::Writer, did: ast::DefId, p: &clean::Path,
path(w, p, print_all, path(w, p, print_all,
|cache, loc| { |cache, loc| {
if ast_util::is_local(did) { if ast_util::is_local(did) {
Some("../".repeat(loc.len())) Some(("../".repeat(loc.len())).to_strbuf())
} else { } else {
match *cache.extern_locations.get(&did.krate) { match *cache.extern_locations.get(&did.krate) {
render::Remote(ref s) => Some(s.clone()), render::Remote(ref s) => Some(s.to_strbuf()),
render::Local => Some("../".repeat(loc.len())), render::Local => {
Some(("../".repeat(loc.len())).to_strbuf())
}
render::Unknown => None, render::Unknown => None,
} }
} }
@ -169,8 +171,8 @@ fn resolved_path(w: &mut io::Writer, did: ast::DefId, p: &clean::Path,
} }
fn path(w: &mut io::Writer, path: &clean::Path, print_all: bool, fn path(w: &mut io::Writer, path: &clean::Path, print_all: bool,
root: |&render::Cache, &[~str]| -> Option<~str>, root: |&render::Cache, &[StrBuf]| -> Option<StrBuf>,
info: |&render::Cache| -> Option<(Vec<~str> , ItemType)>) info: |&render::Cache| -> Option<(Vec<StrBuf> , ItemType)>)
-> fmt::Result -> fmt::Result
{ {
// The generics will get written to both the title and link // The generics will get written to both the title and link
@ -206,10 +208,11 @@ fn path(w: &mut io::Writer, path: &clean::Path, print_all: bool,
Some(root) => { Some(root) => {
let mut root = StrBuf::from_str(root); let mut root = StrBuf::from_str(root);
for seg in path.segments.slice_to(amt).iter() { for seg in path.segments.slice_to(amt).iter() {
if "super" == seg.name || "self" == seg.name { if "super" == seg.name.as_slice() ||
"self" == seg.name.as_slice() {
try!(write!(w, "{}::", seg.name)); try!(write!(w, "{}::", seg.name));
} else { } else {
root.push_str(seg.name); root.push_str(seg.name.as_slice());
root.push_str("/"); root.push_str("/");
try!(write!(w, "<a class='mod' try!(write!(w, "<a class='mod'
href='{}index.html'>{}</a>::", href='{}index.html'>{}</a>::",
@ -229,21 +232,21 @@ fn path(w: &mut io::Writer, path: &clean::Path, print_all: bool,
match info(&**cache) { match info(&**cache) {
// This is a documented path, link to it! // This is a documented path, link to it!
Some((ref fqp, shortty)) if abs_root.is_some() => { Some((ref fqp, shortty)) if abs_root.is_some() => {
let mut url = StrBuf::from_str(abs_root.unwrap()); let mut url = StrBuf::from_str(abs_root.unwrap().as_slice());
let to_link = fqp.slice_to(fqp.len() - 1); let to_link = fqp.slice_to(fqp.len() - 1);
for component in to_link.iter() { for component in to_link.iter() {
url.push_str(*component); url.push_str(component.as_slice());
url.push_str("/"); url.push_str("/");
} }
match shortty { match shortty {
item_type::Module => { item_type::Module => {
url.push_str(*fqp.last().unwrap()); url.push_str(fqp.last().unwrap().as_slice());
url.push_str("/index.html"); url.push_str("/index.html");
} }
_ => { _ => {
url.push_str(shortty.to_static_str()); url.push_str(shortty.to_static_str());
url.push_str("."); url.push_str(".");
url.push_str(*fqp.last().unwrap()); url.push_str(fqp.last().unwrap().as_slice());
url.push_str(".html"); url.push_str(".html");
} }
} }
@ -352,16 +355,20 @@ impl fmt::Show for clean::Type {
{arrow, select, yes{ -&gt; {ret}} other{}}", {arrow, select, yes{ -&gt; {ret}} other{}}",
style = FnStyleSpace(decl.fn_style), style = FnStyleSpace(decl.fn_style),
lifetimes = if decl.lifetimes.len() == 0 { lifetimes = if decl.lifetimes.len() == 0 {
"".to_owned() "".to_strbuf()
} else { } else {
format!("&lt;{:#}&gt;", decl.lifetimes) format_strbuf!("&lt;{:#}&gt;", decl.lifetimes)
}, },
args = decl.decl.inputs, args = decl.decl.inputs,
bounds = if decl.bounds.len() == 0 { bounds = if decl.bounds.len() == 0 {
"".to_owned() "".to_strbuf()
} else { } else {
let mut m = decl.bounds.iter().map(|s| s.to_str()); let mut m = decl.bounds
": " + m.collect::<Vec<~str>>().connect(" + ") .iter()
.map(|s| s.to_str().to_strbuf());
format_strbuf!(
": {}",
m.collect::<Vec<StrBuf>>().connect(" + "))
}, },
arrow = match decl.decl.output { clean::Unit => "no", _ => "yes" }, arrow = match decl.decl.output { clean::Unit => "no", _ => "yes" },
ret = decl.decl.output) ret = decl.decl.output)
@ -370,9 +377,9 @@ impl fmt::Show for clean::Type {
write!(f.buf, "{}{}fn{}{}", write!(f.buf, "{}{}fn{}{}",
FnStyleSpace(decl.fn_style), FnStyleSpace(decl.fn_style),
match decl.abi.as_slice() { match decl.abi.as_slice() {
"" => " extern ".to_owned(), "" => " extern ".to_strbuf(),
"\"Rust\"" => "".to_owned(), "\"Rust\"" => "".to_strbuf(),
s => format!(" extern {} ", s) s => format_strbuf!(" extern {} ", s)
}, },
decl.generics, decl.generics,
decl.decl) decl.decl)

View file

@ -25,7 +25,7 @@ use html::escape::Escape;
use t = syntax::parse::token; use t = syntax::parse::token;
/// Highlights some source code, returning the HTML output. /// Highlights some source code, returning the HTML output.
pub fn highlight(src: &str, class: Option<&str>) -> ~str { pub fn highlight(src: &str, class: Option<&str>) -> StrBuf {
let sess = parse::new_parse_sess(); let sess = parse::new_parse_sess();
let fm = parse::string_to_filemap(&sess, let fm = parse::string_to_filemap(&sess,
src.to_strbuf(), src.to_strbuf(),
@ -36,7 +36,7 @@ pub fn highlight(src: &str, class: Option<&str>) -> ~str {
lexer::new_string_reader(&sess.span_diagnostic, fm), lexer::new_string_reader(&sess.span_diagnostic, fm),
class, class,
&mut out).unwrap(); &mut out).unwrap();
str::from_utf8_lossy(out.unwrap().as_slice()).into_owned() str::from_utf8_lossy(out.unwrap().as_slice()).to_strbuf()
} }
/// Exhausts the `lexer` writing the output into `out`. /// Exhausts the `lexer` writing the output into `out`.

View file

@ -13,9 +13,9 @@ use std::io;
#[deriving(Clone)] #[deriving(Clone)]
pub struct Layout { pub struct Layout {
pub logo: ~str, pub logo: StrBuf,
pub favicon: ~str, pub favicon: StrBuf,
pub krate: ~str, pub krate: StrBuf,
} }
pub struct Page<'a> { pub struct Page<'a> {
@ -119,9 +119,9 @@ r##"<!DOCTYPE html>
content = *t, content = *t,
root_path = page.root_path, root_path = page.root_path,
ty = page.ty, ty = page.ty,
logo = nonestr(layout.logo), logo = nonestr(layout.logo.as_slice()),
title = page.title, title = page.title,
favicon = nonestr(layout.favicon), favicon = nonestr(layout.favicon.as_slice()),
sidebar = *sidebar, sidebar = *sidebar,
krate = layout.krate, krate = layout.krate,
) )

View file

@ -139,7 +139,7 @@ fn stripped_filtered_line<'a>(s: &'a str) -> Option<&'a str> {
} }
} }
local_data_key!(used_header_map: RefCell<HashMap<~str, uint>>) local_data_key!(used_header_map: RefCell<HashMap<StrBuf, uint>>)
pub fn render(w: &mut io::Writer, s: &str, print_toc: bool) -> fmt::Result { pub fn render(w: &mut io::Writer, s: &str, print_toc: bool) -> fmt::Result {
extern fn block(ob: *mut hoedown_buffer, text: *hoedown_buffer, extern fn block(ob: *mut hoedown_buffer, text: *hoedown_buffer,
@ -177,7 +177,8 @@ pub fn render(w: &mut io::Writer, s: &str, print_toc: bool) -> fmt::Result {
}; };
if !rendered { if !rendered {
let output = highlight::highlight(text, None).to_c_str(); let output = highlight::highlight(text, None).as_slice()
.to_c_str();
output.with_ref(|r| { output.with_ref(|r| {
hoedown_buffer_puts(ob, r) hoedown_buffer_puts(ob, r)
}) })
@ -201,16 +202,16 @@ pub fn render(w: &mut io::Writer, s: &str, print_toc: bool) -> fmt::Result {
}; };
// Transform the contents of the header into a hyphenated string // Transform the contents of the header into a hyphenated string
let id = s.words().map(|s| { let id = (s.words().map(|s| {
match s.to_ascii_opt() { match s.to_ascii_opt() {
Some(s) => s.to_lower().into_str(), Some(s) => s.to_lower().into_str().to_strbuf(),
None => s.to_owned() None => s.to_strbuf()
} }
}).collect::<Vec<~str>>().connect("-"); }).collect::<Vec<StrBuf>>().connect("-")).to_strbuf();
// This is a terrible hack working around how hoedown gives us rendered // This is a terrible hack working around how hoedown gives us rendered
// html for text rather than the raw text. // html for text rather than the raw text.
let id = id.replace("<code>", "").replace("</code>", ""); let id = id.replace("<code>", "").replace("</code>", "").to_strbuf();
let opaque = opaque as *mut hoedown_html_renderer_state; let opaque = opaque as *mut hoedown_html_renderer_state;
let opaque = unsafe { &mut *((*opaque).opaque as *mut MyOpaque) }; let opaque = unsafe { &mut *((*opaque).opaque as *mut MyOpaque) };
@ -219,13 +220,13 @@ pub fn render(w: &mut io::Writer, s: &str, print_toc: bool) -> fmt::Result {
let map = used_header_map.get().unwrap(); let map = used_header_map.get().unwrap();
let id = match map.borrow_mut().find_mut(&id) { let id = match map.borrow_mut().find_mut(&id) {
None => id, None => id,
Some(a) => { *a += 1; format!("{}-{}", id, *a - 1) } Some(a) => { *a += 1; format_strbuf!("{}-{}", id, *a - 1) }
}; };
map.borrow_mut().insert(id.clone(), 1); map.borrow_mut().insert(id.clone(), 1);
let sec = match opaque.toc_builder { let sec = match opaque.toc_builder {
Some(ref mut builder) => { Some(ref mut builder) => {
builder.push(level as u32, s.clone(), id.clone()) builder.push(level as u32, s.to_strbuf(), id.clone())
} }
None => {""} None => {""}
}; };
@ -298,7 +299,7 @@ pub fn find_testable_code(doc: &str, tests: &mut ::test::Collector) {
stripped_filtered_line(l).unwrap_or(l) stripped_filtered_line(l).unwrap_or(l)
}); });
let text = lines.collect::<Vec<&str>>().connect("\n"); let text = lines.collect::<Vec<&str>>().connect("\n");
tests.add_test(text, should_fail, no_run, ignore); tests.add_test(text.to_strbuf(), should_fail, no_run, ignore);
}) })
} }
} }

View file

@ -70,7 +70,7 @@ use html::markdown;
pub struct Context { pub struct Context {
/// Current hierarchy of components leading down to what's currently being /// Current hierarchy of components leading down to what's currently being
/// rendered /// rendered
pub current: Vec<~str> , pub current: Vec<StrBuf> ,
/// String representation of how to get back to the root path of the 'doc/' /// String representation of how to get back to the root path of the 'doc/'
/// folder in terms of a relative URL. /// folder in terms of a relative URL.
pub root_path: StrBuf, pub root_path: StrBuf,
@ -85,7 +85,7 @@ pub struct Context {
/// functions), and the value is the list of containers belonging to this /// functions), and the value is the list of containers belonging to this
/// header. This map will change depending on the surrounding context of the /// header. This map will change depending on the surrounding context of the
/// page. /// page.
pub sidebar: HashMap<~str, Vec<~str> >, pub sidebar: HashMap<StrBuf, Vec<StrBuf> >,
/// This flag indicates whether [src] links should be generated or not. If /// This flag indicates whether [src] links should be generated or not. If
/// the source files are present in the html rendering, then this will be /// the source files are present in the html rendering, then this will be
/// `true`. /// `true`.
@ -95,7 +95,7 @@ pub struct Context {
/// Indicates where an external crate can be found. /// Indicates where an external crate can be found.
pub enum ExternalLocation { pub enum ExternalLocation {
/// Remote URL root of the external crate /// Remote URL root of the external crate
Remote(~str), Remote(StrBuf),
/// This external crate can be found in the local doc/ folder /// This external crate can be found in the local doc/ folder
Local, Local,
/// The external crate could not be found. /// The external crate could not be found.
@ -124,7 +124,7 @@ pub struct Cache {
/// Mapping of typaram ids to the name of the type parameter. This is used /// Mapping of typaram ids to the name of the type parameter. This is used
/// when pretty-printing a type (so pretty printing doesn't have to /// when pretty-printing a type (so pretty printing doesn't have to
/// painfully maintain a context like this) /// painfully maintain a context like this)
pub typarams: HashMap<ast::NodeId, ~str>, pub typarams: HashMap<ast::NodeId, StrBuf>,
/// Maps a type id to all known implementations for that type. This is only /// Maps a type id to all known implementations for that type. This is only
/// recognized for intra-crate `ResolvedPath` types, and is used to print /// recognized for intra-crate `ResolvedPath` types, and is used to print
@ -132,14 +132,14 @@ pub struct Cache {
/// ///
/// The values of the map are a list of implementations and documentation /// The values of the map are a list of implementations and documentation
/// found on that implementation. /// found on that implementation.
pub impls: HashMap<ast::NodeId, Vec<(clean::Impl, Option<~str>)> >, pub impls: HashMap<ast::NodeId, Vec<(clean::Impl, Option<StrBuf>)> >,
/// Maintains a mapping of local crate node ids to the fully qualified name /// Maintains a mapping of local crate node ids to the fully qualified name
/// and "short type description" of that node. This is used when generating /// and "short type description" of that node. This is used when generating
/// URLs when a type is being linked to. External paths are not located in /// URLs when a type is being linked to. External paths are not located in
/// this map because the `External` type itself has all the information /// this map because the `External` type itself has all the information
/// necessary. /// necessary.
pub paths: HashMap<ast::DefId, (Vec<~str>, ItemType)>, pub paths: HashMap<ast::DefId, (Vec<StrBuf>, ItemType)>,
/// This map contains information about all known traits of this crate. /// This map contains information about all known traits of this crate.
/// Implementations of a crate should inherit the documentation of the /// Implementations of a crate should inherit the documentation of the
@ -157,7 +157,7 @@ pub struct Cache {
// Private fields only used when initially crawling a crate to build a cache // Private fields only used when initially crawling a crate to build a cache
stack: Vec<~str> , stack: Vec<StrBuf> ,
parent_stack: Vec<ast::NodeId> , parent_stack: Vec<ast::NodeId> ,
search_index: Vec<IndexItem> , search_index: Vec<IndexItem> ,
privmod: bool, privmod: bool,
@ -176,7 +176,7 @@ struct SourceCollector<'a> {
cx: &'a mut Context, cx: &'a mut Context,
/// Processed source-file paths /// Processed source-file paths
seen: HashSet<~str>, seen: HashSet<StrBuf>,
/// Root destination to place all HTML output into /// Root destination to place all HTML output into
dst: Path, dst: Path,
} }
@ -195,16 +195,16 @@ struct Sidebar<'a> { cx: &'a Context, item: &'a clean::Item, }
/// by hand to a large JS file at the end of cache-creation. /// by hand to a large JS file at the end of cache-creation.
struct IndexItem { struct IndexItem {
ty: ItemType, ty: ItemType,
name: ~str, name: StrBuf,
path: ~str, path: StrBuf,
desc: ~str, desc: StrBuf,
parent: Option<ast::NodeId>, parent: Option<ast::NodeId>,
} }
// TLS keys used to carry information around during rendering. // TLS keys used to carry information around during rendering.
local_data_key!(pub cache_key: Arc<Cache>) local_data_key!(pub cache_key: Arc<Cache>)
local_data_key!(pub current_location_key: Vec<~str> ) local_data_key!(pub current_location_key: Vec<StrBuf> )
/// Generates the documentation for `crate` into the directory `dst` /// Generates the documentation for `crate` into the directory `dst`
pub fn run(mut krate: clean::Crate, dst: Path) -> io::IoResult<()> { pub fn run(mut krate: clean::Crate, dst: Path) -> io::IoResult<()> {
@ -214,8 +214,8 @@ pub fn run(mut krate: clean::Crate, dst: Path) -> io::IoResult<()> {
root_path: StrBuf::new(), root_path: StrBuf::new(),
sidebar: HashMap::new(), sidebar: HashMap::new(),
layout: layout::Layout { layout: layout::Layout {
logo: "".to_owned(), logo: "".to_strbuf(),
favicon: "".to_owned(), favicon: "".to_strbuf(),
krate: krate.name.clone(), krate: krate.name.clone(),
}, },
include_sources: true, include_sources: true,
@ -226,13 +226,16 @@ pub fn run(mut krate: clean::Crate, dst: Path) -> io::IoResult<()> {
Some(attrs) => { Some(attrs) => {
for attr in attrs.iter() { for attr in attrs.iter() {
match *attr { match *attr {
clean::NameValue(ref x, ref s) if "html_favicon_url" == *x => { clean::NameValue(ref x, ref s)
cx.layout.favicon = s.to_owned(); if "html_favicon_url" == x.as_slice() => {
cx.layout.favicon = s.to_strbuf();
} }
clean::NameValue(ref x, ref s) if "html_logo_url" == *x => { clean::NameValue(ref x, ref s)
cx.layout.logo = s.to_owned(); if "html_logo_url" == x.as_slice() => {
cx.layout.logo = s.to_strbuf();
} }
clean::Word(ref x) if "html_no_source" == *x => { clean::Word(ref x)
if "html_no_source" == x.as_slice() => {
cx.include_sources = false; cx.include_sources = false;
} }
_ => {} _ => {}
@ -291,8 +294,9 @@ pub fn run(mut krate: clean::Crate, dst: Path) -> io::IoResult<()> {
index.push(IndexItem { index.push(IndexItem {
ty: shortty(item), ty: shortty(item),
name: item.name.clone().unwrap(), name: item.name.clone().unwrap(),
path: fqp.slice_to(fqp.len() - 1).connect("::"), path: fqp.slice_to(fqp.len() - 1).connect("::")
desc: shorter(item.doc_value()).to_owned(), .to_strbuf(),
desc: shorter(item.doc_value()).to_strbuf(),
parent: Some(pid), parent: Some(pid),
}); });
}, },
@ -322,14 +326,14 @@ pub fn run(mut krate: clean::Crate, dst: Path) -> io::IoResult<()> {
let mut w = MemWriter::new(); let mut w = MemWriter::new();
try!(write!(&mut w, r#"searchIndex['{}'] = \{"items":["#, krate.name)); try!(write!(&mut w, r#"searchIndex['{}'] = \{"items":["#, krate.name));
let mut lastpath = "".to_owned(); let mut lastpath = "".to_strbuf();
for (i, item) in cache.search_index.iter().enumerate() { for (i, item) in cache.search_index.iter().enumerate() {
// Omit the path if it is same to that of the prior item. // Omit the path if it is same to that of the prior item.
let path; let path;
if lastpath == item.path { if lastpath.as_slice() == item.path.as_slice() {
path = ""; path = "";
} else { } else {
lastpath = item.path.clone(); lastpath = item.path.to_strbuf();
path = item.path.as_slice(); path = item.path.as_slice();
}; };
@ -485,14 +489,15 @@ fn extern_location(e: &clean::ExternalCrate, dst: &Path) -> ExternalLocation {
// external crate // external crate
for attr in e.attrs.iter() { for attr in e.attrs.iter() {
match *attr { match *attr {
clean::List(ref x, ref list) if "doc" == *x => { clean::List(ref x, ref list) if "doc" == x.as_slice() => {
for attr in list.iter() { for attr in list.iter() {
match *attr { match *attr {
clean::NameValue(ref x, ref s) if "html_root_url" == *x => { clean::NameValue(ref x, ref s)
if s.ends_with("/") { if "html_root_url" == x.as_slice() => {
return Remote(s.to_owned()); if s.as_slice().ends_with("/") {
return Remote(s.to_strbuf());
} }
return Remote(*s + "/"); return Remote(format_strbuf!("{}/", s));
} }
_ => {} _ => {}
} }
@ -517,7 +522,10 @@ impl<'a> DocFolder for SourceCollector<'a> {
// something like that), so just don't include sources for the // something like that), so just don't include sources for the
// entire crate. The other option is maintaining this mapping on a // entire crate. The other option is maintaining this mapping on a
// per-file basis, but that's probably not worth it... // per-file basis, but that's probably not worth it...
self.cx.include_sources = match self.emit_source(item.source.filename) { self.cx
.include_sources = match self.emit_source(item.source
.filename
.as_slice()) {
Ok(()) => true, Ok(()) => true,
Err(e) => { Err(e) => {
println!("warning: source code was requested to be rendered, \ println!("warning: source code was requested to be rendered, \
@ -689,9 +697,9 @@ impl DocFolder for Cache {
(parent, Some(path)) if !self.privmod => { (parent, Some(path)) if !self.privmod => {
self.search_index.push(IndexItem { self.search_index.push(IndexItem {
ty: shortty(&item), ty: shortty(&item),
name: s.to_owned(), name: s.to_strbuf(),
path: path.connect("::"), path: path.connect("::").to_strbuf(),
desc: shorter(item.doc_value()).to_owned(), desc: shorter(item.doc_value()).to_strbuf(),
parent: parent, parent: parent,
}); });
} }
@ -710,7 +718,7 @@ impl DocFolder for Cache {
let pushed = if item.name.is_some() { let pushed = if item.name.is_some() {
let n = item.name.get_ref(); let n = item.name.get_ref();
if n.len() > 0 { if n.len() > 0 {
self.stack.push(n.to_owned()); self.stack.push(n.to_strbuf());
true true
} else { false } } else { false }
} else { false }; } else { false };
@ -779,7 +787,10 @@ impl DocFolder for Cache {
// extract relevant documentation for this impl // extract relevant documentation for this impl
match attrs.move_iter().find(|a| { match attrs.move_iter().find(|a| {
match *a { match *a {
clean::NameValue(ref x, _) if "doc" == *x => true, clean::NameValue(ref x, _)
if "doc" == x.as_slice() => {
true
}
_ => false _ => false
} }
}) { }) {
@ -828,7 +839,7 @@ impl<'a> Cache {
impl Context { impl Context {
/// Recurse in the directory structure and change the "root path" to make /// Recurse in the directory structure and change the "root path" to make
/// sure it always points to the top (relatively) /// sure it always points to the top (relatively)
fn recurse<T>(&mut self, s: ~str, f: |&mut Context| -> T) -> T { fn recurse<T>(&mut self, s: StrBuf, f: |&mut Context| -> T) -> T {
if s.len() == 0 { if s.len() == 0 {
fail!("what {:?}", self); fail!("what {:?}", self);
} }
@ -898,7 +909,7 @@ impl Context {
if title.len() > 0 { if title.len() > 0 {
title.push_str("::"); title.push_str("::");
} }
title.push_str(*it.name.get_ref()); title.push_str(it.name.get_ref().as_slice());
} }
title.push_str(" - Rust"); title.push_str(" - Rust");
let page = layout::Page { let page = layout::Page {
@ -923,7 +934,7 @@ impl Context {
// modules are special because they add a namespace. We also need to // modules are special because they add a namespace. We also need to
// recurse into the items of the module as well. // recurse into the items of the module as well.
clean::ModuleItem(..) => { clean::ModuleItem(..) => {
let name = item.name.get_ref().to_owned(); let name = item.name.get_ref().to_strbuf();
let mut item = Some(item); let mut item = Some(item);
self.recurse(name, |this| { self.recurse(name, |this| {
let item = item.take_unwrap(); let item = item.take_unwrap();
@ -963,17 +974,19 @@ impl<'a> Item<'a> {
} }
} }
fn link(&self) -> ~str { fn link(&self) -> StrBuf {
let mut path = Vec::new(); let mut path = Vec::new();
clean_srcpath(self.item.source.filename.as_bytes(), |component| { clean_srcpath(self.item.source.filename.as_bytes(), |component| {
path.push(component.to_owned()); path.push(component.to_owned());
}); });
let href = if self.item.source.loline == self.item.source.hiline { let href = if self.item.source.loline == self.item.source.hiline {
format!("{}", self.item.source.loline) format_strbuf!("{}", self.item.source.loline)
} else { } else {
format!("{}-{}", self.item.source.loline, self.item.source.hiline) format_strbuf!("{}-{}",
self.item.source.loline,
self.item.source.hiline)
}; };
format!("{root}src/{krate}/{path}.html\\#{href}", format_strbuf!("{root}src/{krate}/{path}.html\\#{href}",
root = self.cx.root_path, root = self.cx.root_path,
krate = self.cx.layout.krate, krate = self.cx.layout.krate,
path = path.connect("/"), path = path.connect("/"),
@ -1047,18 +1060,24 @@ impl<'a> fmt::Show for Item<'a> {
} }
} }
fn item_path(item: &clean::Item) -> ~str { fn item_path(item: &clean::Item) -> StrBuf {
match item.inner { match item.inner {
clean::ModuleItem(..) => *item.name.get_ref() + "/index.html", clean::ModuleItem(..) => {
_ => shortty(item).to_static_str() + "." + *item.name.get_ref() + ".html" format_strbuf!("{}/index.html", item.name.get_ref())
}
_ => {
format_strbuf!("{}.{}.html",
shortty(item).to_static_str(),
*item.name.get_ref())
}
} }
} }
fn full_path(cx: &Context, item: &clean::Item) -> ~str { fn full_path(cx: &Context, item: &clean::Item) -> StrBuf {
let mut s = StrBuf::from_str(cx.current.connect("::")); let mut s = StrBuf::from_str(cx.current.connect("::"));
s.push_str("::"); s.push_str("::");
s.push_str(item.name.get_ref().as_slice()); s.push_str(item.name.get_ref().as_slice());
return s.into_owned(); return s
} }
fn blank<'a>(s: Option<&'a str>) -> &'a str { fn blank<'a>(s: Option<&'a str>) -> &'a str {
@ -1197,7 +1216,7 @@ fn item_module(w: &mut Writer, cx: &Context,
VisSpace(myitem.visibility), VisSpace(myitem.visibility),
*myitem.name.get_ref(), *myitem.name.get_ref(),
s.type_, s.type_,
Initializer(s.expr, Item { cx: cx, item: myitem }), Initializer(s.expr.as_slice(), Item { cx: cx, item: myitem }),
Markdown(blank(myitem.doc_value())))); Markdown(blank(myitem.doc_value()))));
} }
@ -1584,11 +1603,11 @@ fn render_methods(w: &mut Writer, it: &clean::Item) -> fmt::Result {
let mut non_trait = v.iter().filter(|p| { let mut non_trait = v.iter().filter(|p| {
p.ref0().trait_.is_none() p.ref0().trait_.is_none()
}); });
let non_trait = non_trait.collect::<Vec<&(clean::Impl, Option<~str>)>>(); let non_trait = non_trait.collect::<Vec<&(clean::Impl, Option<StrBuf>)>>();
let mut traits = v.iter().filter(|p| { let mut traits = v.iter().filter(|p| {
p.ref0().trait_.is_some() p.ref0().trait_.is_some()
}); });
let traits = traits.collect::<Vec<&(clean::Impl, Option<~str>)>>(); let traits = traits.collect::<Vec<&(clean::Impl, Option<StrBuf>)>>();
if non_trait.len() > 0 { if non_trait.len() > 0 {
try!(write!(w, "<h2 id='methods'>Methods</h2>")); try!(write!(w, "<h2 id='methods'>Methods</h2>"));
@ -1624,7 +1643,7 @@ fn render_methods(w: &mut Writer, it: &clean::Item) -> fmt::Result {
} }
fn render_impl(w: &mut Writer, i: &clean::Impl, fn render_impl(w: &mut Writer, i: &clean::Impl,
dox: &Option<~str>) -> fmt::Result { dox: &Option<StrBuf>) -> fmt::Result {
try!(write!(w, "<h3 class='impl'><code>impl{} ", i.generics)); try!(write!(w, "<h3 class='impl'><code>impl{} ", i.generics));
let trait_id = match i.trait_ { let trait_id = match i.trait_ {
Some(ref ty) => { Some(ref ty) => {
@ -1760,15 +1779,15 @@ impl<'a> fmt::Show for Sidebar<'a> {
} }
} }
fn build_sidebar(m: &clean::Module) -> HashMap<~str, Vec<~str> > { fn build_sidebar(m: &clean::Module) -> HashMap<StrBuf, Vec<StrBuf> > {
let mut map = HashMap::new(); let mut map = HashMap::new();
for item in m.items.iter() { for item in m.items.iter() {
let short = shortty(item).to_static_str(); let short = shortty(item).to_static_str();
let myname = match item.name { let myname = match item.name {
None => continue, None => continue,
Some(ref s) => s.to_owned(), Some(ref s) => s.to_strbuf(),
}; };
let v = map.find_or_insert_with(short.to_owned(), |_| Vec::new()); let v = map.find_or_insert_with(short.to_strbuf(), |_| Vec::new());
v.push(myname); v.push(myname);
} }
@ -1800,6 +1819,7 @@ impl<'a> fmt::Show for Source<'a> {
fn item_macro(w: &mut Writer, it: &clean::Item, fn item_macro(w: &mut Writer, it: &clean::Item,
t: &clean::Macro) -> fmt::Result { t: &clean::Macro) -> fmt::Result {
try!(w.write_str(highlight::highlight(t.source, Some("macro")))); try!(w.write_str(highlight::highlight(t.source.as_slice(),
Some("macro")).as_slice()));
document(w, it) document(w, it)
} }

View file

@ -39,9 +39,9 @@ impl Toc {
#[deriving(Eq)] #[deriving(Eq)]
pub struct TocEntry { pub struct TocEntry {
level: u32, level: u32,
sec_number: ~str, sec_number: StrBuf,
name: ~str, name: StrBuf,
id: ~str, id: StrBuf,
children: Toc, children: Toc,
} }
@ -125,7 +125,7 @@ impl TocBuilder {
/// Push a level `level` heading into the appropriate place in the /// Push a level `level` heading into the appropriate place in the
/// heirarchy, returning a string containing the section number in /// heirarchy, returning a string containing the section number in
/// `<num>.<num>.<num>` format. /// `<num>.<num>.<num>` format.
pub fn push<'a>(&'a mut self, level: u32, name: ~str, id: ~str) -> &'a str { pub fn push<'a>(&'a mut self, level: u32, name: StrBuf, id: StrBuf) -> &'a str {
assert!(level >= 1); assert!(level >= 1);
// collapse all previous sections into their parents until we // collapse all previous sections into their parents until we
@ -141,7 +141,8 @@ impl TocBuilder {
(0, &self.top_level) (0, &self.top_level)
} }
Some(entry) => { Some(entry) => {
sec_number = StrBuf::from_str(entry.sec_number.clone()); sec_number = StrBuf::from_str(entry.sec_number
.as_slice());
sec_number.push_str("."); sec_number.push_str(".");
(entry.level, &entry.children) (entry.level, &entry.children)
} }
@ -153,13 +154,13 @@ impl TocBuilder {
sec_number.push_str("0."); sec_number.push_str("0.");
} }
let number = toc.count_entries_with_level(level); let number = toc.count_entries_with_level(level);
sec_number.push_str(format!("{}", number + 1)) sec_number.push_str(format_strbuf!("{}", number + 1).as_slice())
} }
self.chain.push(TocEntry { self.chain.push(TocEntry {
level: level, level: level,
name: name, name: name,
sec_number: sec_number.into_owned(), sec_number: sec_number,
id: id, id: id,
children: Toc { entries: Vec::new() } children: Toc { entries: Vec::new() }
}); });
@ -200,7 +201,10 @@ mod test {
// there's been no macro mistake. // there's been no macro mistake.
macro_rules! push { macro_rules! push {
($level: expr, $name: expr) => { ($level: expr, $name: expr) => {
assert_eq!(builder.push($level, $name.to_owned(), "".to_owned()), $name); assert_eq!(builder.push($level,
$name.to_strbuf(),
"".to_strbuf()),
$name);
} }
} }
push!(2, "0.1"); push!(2, "0.1");
@ -238,9 +242,9 @@ mod test {
$( $(
TocEntry { TocEntry {
level: $level, level: $level,
name: $name.to_owned(), name: $name.to_strbuf(),
sec_number: $name.to_owned(), sec_number: $name.to_strbuf(),
id: "".to_owned(), id: "".to_strbuf(),
children: toc!($($sub),*) children: toc!($($sub),*)
} }
),* ),*

View file

@ -85,7 +85,10 @@ local_data_key!(pub analysiskey: core::CrateAnalysis)
type Output = (clean::Crate, Vec<plugins::PluginJson> ); type Output = (clean::Crate, Vec<plugins::PluginJson> );
pub fn main() { pub fn main() {
std::os::set_exit_status(main_args(std::os::args().as_slice())); std::os::set_exit_status(main_args(std::os::args().iter()
.map(|x| x.to_strbuf())
.collect::<Vec<_>>()
.as_slice()));
} }
pub fn opts() -> Vec<getopts::OptGroup> { pub fn opts() -> Vec<getopts::OptGroup> {
@ -133,8 +136,13 @@ pub fn usage(argv0: &str) {
opts().as_slice())); opts().as_slice()));
} }
pub fn main_args(args: &[~str]) -> int { pub fn main_args(args: &[StrBuf]) -> int {
let matches = match getopts::getopts(args.tail(), opts().as_slice()) { let matches = match getopts::getopts(args.tail()
.iter()
.map(|x| (*x).to_owned())
.collect::<Vec<_>>()
.as_slice(),
opts().as_slice()) {
Ok(m) => m, Ok(m) => m,
Err(err) => { Err(err) => {
println!("{}", err.to_err_msg()); println!("{}", err.to_err_msg());
@ -142,10 +150,10 @@ pub fn main_args(args: &[~str]) -> int {
} }
}; };
if matches.opt_present("h") || matches.opt_present("help") { if matches.opt_present("h") || matches.opt_present("help") {
usage(args[0]); usage(args[0].as_slice());
return 0; return 0;
} else if matches.opt_present("version") { } else if matches.opt_present("version") {
rustc::driver::version(args[0]); rustc::driver::version(args[0].as_slice());
return 0; return 0;
} }
@ -161,9 +169,9 @@ pub fn main_args(args: &[~str]) -> int {
let libs = matches.opt_strs("L").iter().map(|s| Path::new(s.as_slice())).collect(); let libs = matches.opt_strs("L").iter().map(|s| Path::new(s.as_slice())).collect();
let test_args = matches.opt_strs("test-args"); let test_args = matches.opt_strs("test-args");
let test_args: Vec<~str> = test_args.iter() let test_args: Vec<StrBuf> = test_args.iter()
.flat_map(|s| s.words()) .flat_map(|s| s.words())
.map(|s| s.to_owned()) .map(|s| s.to_strbuf())
.collect(); .collect();
let should_test = matches.opt_present("test"); let should_test = matches.opt_present("test");
@ -178,9 +186,14 @@ pub fn main_args(args: &[~str]) -> int {
libs, libs,
test_args.move_iter().collect()) test_args.move_iter().collect())
} }
(true, false) => return test::run(input, cfgs.move_iter().collect(), (true, false) => {
libs, test_args), return test::run(input,
cfgs.move_iter()
.map(|x| x.to_strbuf())
.collect(),
libs,
test_args)
}
(false, true) => return markdown::render(input, output.unwrap_or(Path::new("doc")), (false, true) => return markdown::render(input, output.unwrap_or(Path::new("doc")),
&matches), &matches),
(false, false) => {} (false, false) => {}
@ -235,11 +248,11 @@ pub fn main_args(args: &[~str]) -> int {
/// Looks inside the command line arguments to extract the relevant input format /// Looks inside the command line arguments to extract the relevant input format
/// and files and then generates the necessary rustdoc output for formatting. /// and files and then generates the necessary rustdoc output for formatting.
fn acquire_input(input: &str, fn acquire_input(input: &str,
matches: &getopts::Matches) -> Result<Output, ~str> { matches: &getopts::Matches) -> Result<Output, StrBuf> {
match matches.opt_str("r").as_ref().map(|s| s.as_slice()) { match matches.opt_str("r").as_ref().map(|s| s.as_slice()) {
Some("rust") => Ok(rust_input(input, matches)), Some("rust") => Ok(rust_input(input, matches)),
Some("json") => json_input(input), Some("json") => json_input(input),
Some(s) => Err("unknown input format: " + s), Some(s) => Err(format_strbuf!("unknown input format: {}", s)),
None => { None => {
if input.ends_with(".json") { if input.ends_with(".json") {
json_input(input) json_input(input)
@ -258,7 +271,10 @@ fn acquire_input(input: &str,
fn rust_input(cratefile: &str, matches: &getopts::Matches) -> Output { fn rust_input(cratefile: &str, matches: &getopts::Matches) -> Output {
let mut default_passes = !matches.opt_present("no-defaults"); let mut default_passes = !matches.opt_present("no-defaults");
let mut passes = matches.opt_strs("passes"); let mut passes = matches.opt_strs("passes");
let mut plugins = matches.opt_strs("plugins"); let mut plugins = matches.opt_strs("plugins")
.move_iter()
.map(|x| x.to_strbuf())
.collect::<Vec<_>>();
// First, parse the crate and extract all relevant information. // First, parse the crate and extract all relevant information.
let libs: Vec<Path> = matches.opt_strs("L") let libs: Vec<Path> = matches.opt_strs("L")
@ -270,8 +286,8 @@ fn rust_input(cratefile: &str, matches: &getopts::Matches) -> Output {
info!("starting to run rustc"); info!("starting to run rustc");
let (krate, analysis) = std::task::try(proc() { let (krate, analysis) = std::task::try(proc() {
let cr = cr; let cr = cr;
core::run_core(libs.move_iter().collect(), core::run_core(libs.move_iter().map(|x| x.clone()).collect(),
cfgs.move_iter().collect(), cfgs.move_iter().map(|x| x.to_strbuf()).collect(),
&cr) &cr)
}).unwrap(); }).unwrap();
info!("finished with rustc"); info!("finished with rustc");
@ -283,17 +299,20 @@ fn rust_input(cratefile: &str, matches: &getopts::Matches) -> Output {
Some(nested) => { Some(nested) => {
for inner in nested.iter() { for inner in nested.iter() {
match *inner { match *inner {
clean::Word(ref x) if "no_default_passes" == *x => { clean::Word(ref x)
if "no_default_passes" == x.as_slice() => {
default_passes = false; default_passes = false;
} }
clean::NameValue(ref x, ref value) if "passes" == *x => { clean::NameValue(ref x, ref value)
for pass in value.words() { if "passes" == x.as_slice() => {
for pass in value.as_slice().words() {
passes.push(pass.to_owned()); passes.push(pass.to_owned());
} }
} }
clean::NameValue(ref x, ref value) if "plugins" == *x => { clean::NameValue(ref x, ref value)
for p in value.words() { if "plugins" == x.as_slice() => {
plugins.push(p.to_owned()); for p in value.as_slice().words() {
plugins.push(p.to_strbuf());
} }
} }
_ => {} _ => {}
@ -333,39 +352,45 @@ fn rust_input(cratefile: &str, matches: &getopts::Matches) -> Output {
/// This input format purely deserializes the json output file. No passes are /// This input format purely deserializes the json output file. No passes are
/// run over the deserialized output. /// run over the deserialized output.
fn json_input(input: &str) -> Result<Output, ~str> { fn json_input(input: &str) -> Result<Output, StrBuf> {
let mut input = match File::open(&Path::new(input)) { let mut input = match File::open(&Path::new(input)) {
Ok(f) => f, Ok(f) => f,
Err(e) => return Err(format!("couldn't open {}: {}", input, e)), Err(e) => {
return Err(format_strbuf!("couldn't open {}: {}", input, e))
}
}; };
match json::from_reader(&mut input) { match json::from_reader(&mut input) {
Err(s) => Err(s.to_str()), Err(s) => Err(s.to_str().to_strbuf()),
Ok(json::Object(obj)) => { Ok(json::Object(obj)) => {
let mut obj = obj; let mut obj = obj;
// Make sure the schema is what we expect // Make sure the schema is what we expect
match obj.pop(&"schema".to_owned()) { match obj.pop(&"schema".to_owned()) {
Some(json::String(version)) => { Some(json::String(version)) => {
if version.as_slice() != SCHEMA_VERSION { if version.as_slice() != SCHEMA_VERSION {
return Err(format!("sorry, but I only understand \ return Err(format_strbuf!(
version {}", SCHEMA_VERSION)) "sorry, but I only understand version {}",
SCHEMA_VERSION))
} }
} }
Some(..) => return Err("malformed json".to_owned()), Some(..) => return Err("malformed json".to_strbuf()),
None => return Err("expected a schema version".to_owned()), None => return Err("expected a schema version".to_strbuf()),
} }
let krate = match obj.pop(&"crate".to_owned()) { let krate = match obj.pop(&"crate".to_str()) {
Some(json) => { Some(json) => {
let mut d = json::Decoder::new(json); let mut d = json::Decoder::new(json);
Decodable::decode(&mut d).unwrap() Decodable::decode(&mut d).unwrap()
} }
None => return Err("malformed json".to_owned()), None => return Err("malformed json".to_strbuf()),
}; };
// FIXME: this should read from the "plugins" field, but currently // FIXME: this should read from the "plugins" field, but currently
// Json doesn't implement decodable... // Json doesn't implement decodable...
let plugin_output = Vec::new(); let plugin_output = Vec::new();
Ok((krate, plugin_output)) Ok((krate, plugin_output))
} }
Ok(..) => Err("malformed json input: expected an object at the top".to_owned()), Ok(..) => {
Err("malformed json input: expected an object at the \
top".to_strbuf())
}
} }
} }
@ -380,7 +405,15 @@ fn json_output(krate: clean::Crate, res: Vec<plugins::PluginJson> ,
// } // }
let mut json = box collections::TreeMap::new(); let mut json = box collections::TreeMap::new();
json.insert("schema".to_owned(), json::String(SCHEMA_VERSION.to_owned())); json.insert("schema".to_owned(), json::String(SCHEMA_VERSION.to_owned()));
let plugins_json = box res.move_iter().filter_map(|opt| opt).collect(); let plugins_json = box res.move_iter()
.filter_map(|opt| {
match opt {
None => None,
Some((string, json)) => {
Some((string.to_owned(), json))
}
}
}).collect();
// FIXME #8335: yuck, Rust -> str -> JSON round trip! No way to .encode // FIXME #8335: yuck, Rust -> str -> JSON round trip! No way to .encode
// straight to the Rust JSON representation. // straight to the Rust JSON representation.

View file

@ -19,10 +19,10 @@ use html::escape::Escape;
use html::markdown::{MarkdownWithToc, find_testable_code, reset_headers}; use html::markdown::{MarkdownWithToc, find_testable_code, reset_headers};
use test::Collector; use test::Collector;
fn load_string(input: &Path) -> io::IoResult<Option<~str>> { fn load_string(input: &Path) -> io::IoResult<Option<StrBuf>> {
let mut f = try!(io::File::open(input)); let mut f = try!(io::File::open(input));
let d = try!(f.read_to_end()); let d = try!(f.read_to_end());
Ok(str::from_utf8(d.as_slice()).map(|s| s.to_owned())) Ok(str::from_utf8(d.as_slice()).map(|s| s.to_strbuf()))
} }
macro_rules! load_or_return { macro_rules! load_or_return {
($input: expr, $cant_read: expr, $not_utf8: expr) => { ($input: expr, $cant_read: expr, $not_utf8: expr) => {
@ -61,13 +61,13 @@ fn extract_leading_metadata<'a>(s: &'a str) -> (Vec<&'a str>, &'a str) {
(metadata, "") (metadata, "")
} }
fn load_external_files(names: &[~str]) -> Option<~str> { fn load_external_files(names: &[StrBuf]) -> Option<StrBuf> {
let mut out = StrBuf::new(); let mut out = StrBuf::new();
for name in names.iter() { for name in names.iter() {
out.push_str(load_or_return!(name.as_slice(), None, None)); out.push_str(load_or_return!(name.as_slice(), None, None).as_slice());
out.push_char('\n'); out.push_char('\n');
} }
Some(out.into_owned()) Some(out)
} }
/// Render `input` (e.g. "foo.md") into an HTML file in `output` /// Render `input` (e.g. "foo.md") into an HTML file in `output`
@ -87,10 +87,19 @@ pub fn render(input: &str, mut output: Path, matches: &getopts::Matches) -> int
let (in_header, before_content, after_content) = let (in_header, before_content, after_content) =
match (load_external_files(matches.opt_strs("markdown-in-header") match (load_external_files(matches.opt_strs("markdown-in-header")
.move_iter()
.map(|x| x.to_strbuf())
.collect::<Vec<_>>()
.as_slice()), .as_slice()),
load_external_files(matches.opt_strs("markdown-before-content") load_external_files(matches.opt_strs("markdown-before-content")
.move_iter()
.map(|x| x.to_strbuf())
.collect::<Vec<_>>()
.as_slice()), .as_slice()),
load_external_files(matches.opt_strs("markdown-after-content") load_external_files(matches.opt_strs("markdown-after-content")
.move_iter()
.map(|x| x.to_strbuf())
.collect::<Vec<_>>()
.as_slice())) { .as_slice())) {
(Some(a), Some(b), Some(c)) => (a,b,c), (Some(a), Some(b), Some(c)) => (a,b,c),
_ => return 3 _ => return 3
@ -106,7 +115,7 @@ pub fn render(input: &str, mut output: Path, matches: &getopts::Matches) -> int
Ok(f) => f Ok(f) => f
}; };
let (metadata, text) = extract_leading_metadata(input_str); let (metadata, text) = extract_leading_metadata(input_str.as_slice());
if metadata.len() == 0 { if metadata.len() == 0 {
let _ = writeln!(&mut io::stderr(), let _ = writeln!(&mut io::stderr(),
"invalid markdown file: expecting initial line with `% ...TITLE...`"); "invalid markdown file: expecting initial line with `% ...TITLE...`");
@ -161,12 +170,16 @@ pub fn render(input: &str, mut output: Path, matches: &getopts::Matches) -> int
} }
/// Run any tests/code examples in the markdown file `input`. /// Run any tests/code examples in the markdown file `input`.
pub fn test(input: &str, libs: HashSet<Path>, mut test_args: Vec<~str>) -> int { pub fn test(input: &str, libs: HashSet<Path>, mut test_args: Vec<StrBuf>) -> int {
let input_str = load_or_return!(input, 1, 2); let input_str = load_or_return!(input, 1, 2);
let mut collector = Collector::new(input.to_owned(), libs, true, true); let mut collector = Collector::new(input.to_strbuf(), libs, true, true);
find_testable_code(input_str, &mut collector); find_testable_code(input_str.as_slice(), &mut collector);
test_args.unshift("rustdoctest".to_owned()); test_args.unshift("rustdoctest".to_strbuf());
testing::test_main(test_args.as_slice(), collector.tests); testing::test_main(test_args.move_iter()
.map(|x| x.to_str())
.collect::<Vec<_>>()
.as_slice(),
collector.tests);
0 0
} }

View file

@ -228,8 +228,11 @@ pub fn unindent_comments(krate: clean::Crate) -> plugins::PluginResult {
let mut avec: Vec<clean::Attribute> = Vec::new(); let mut avec: Vec<clean::Attribute> = Vec::new();
for attr in i.attrs.iter() { for attr in i.attrs.iter() {
match attr { match attr {
&clean::NameValue(ref x, ref s) if "doc" == *x => avec.push( &clean::NameValue(ref x, ref s)
clean::NameValue("doc".to_owned(), unindent(*s))), if "doc" == x.as_slice() => {
avec.push(clean::NameValue("doc".to_strbuf(),
unindent(s.as_slice())))
}
x => avec.push(x.clone()) x => avec.push(x.clone())
} }
} }
@ -250,19 +253,20 @@ pub fn collapse_docs(krate: clean::Crate) -> plugins::PluginResult {
let mut i = i; let mut i = i;
for attr in i.attrs.iter() { for attr in i.attrs.iter() {
match *attr { match *attr {
clean::NameValue(ref x, ref s) if "doc" == *x => { clean::NameValue(ref x, ref s)
docstr.push_str(s.clone()); if "doc" == x.as_slice() => {
docstr.push_str(s.as_slice());
docstr.push_char('\n'); docstr.push_char('\n');
}, },
_ => () _ => ()
} }
} }
let mut a: Vec<clean::Attribute> = i.attrs.iter().filter(|&a| match a { let mut a: Vec<clean::Attribute> = i.attrs.iter().filter(|&a| match a {
&clean::NameValue(ref x, _) if "doc" == *x => false, &clean::NameValue(ref x, _) if "doc" == x.as_slice() => false,
_ => true _ => true
}).map(|x| x.clone()).collect(); }).map(|x| x.clone()).collect();
if docstr.len() > 0 { if docstr.len() > 0 {
a.push(clean::NameValue("doc".to_owned(), docstr.into_owned())); a.push(clean::NameValue("doc".to_strbuf(), docstr));
} }
i.attrs = a; i.attrs = a;
self.fold_item_recur(i) self.fold_item_recur(i)
@ -273,7 +277,7 @@ pub fn collapse_docs(krate: clean::Crate) -> plugins::PluginResult {
(krate, None) (krate, None)
} }
pub fn unindent(s: &str) -> ~str { pub fn unindent(s: &str) -> StrBuf {
let lines = s.lines_any().collect::<Vec<&str> >(); let lines = s.lines_any().collect::<Vec<&str> >();
let mut saw_first_line = false; let mut saw_first_line = false;
let mut saw_second_line = false; let mut saw_second_line = false;
@ -318,18 +322,18 @@ pub fn unindent(s: &str) -> ~str {
}); });
if lines.len() >= 1 { if lines.len() >= 1 {
let mut unindented = vec!( lines.get(0).trim() ); let mut unindented = vec![ lines.get(0).trim().to_strbuf() ];
unindented.push_all(lines.tail().iter().map(|&line| { unindented.push_all(lines.tail().iter().map(|&line| {
if line.is_whitespace() { if line.is_whitespace() {
line line.to_strbuf()
} else { } else {
assert!(line.len() >= min_indent); assert!(line.len() >= min_indent);
line.slice_from(min_indent) line.slice_from(min_indent).to_strbuf()
} }
}).collect::<Vec<_>>().as_slice()); }).collect::<Vec<_>>().as_slice());
unindented.connect("\n") unindented.connect("\n").to_strbuf()
} else { } else {
s.to_owned() s.to_strbuf()
} }
} }
@ -341,14 +345,14 @@ mod unindent_tests {
fn should_unindent() { fn should_unindent() {
let s = " line1\n line2".to_owned(); let s = " line1\n line2".to_owned();
let r = unindent(s); let r = unindent(s);
assert_eq!(r, "line1\nline2".to_owned()); assert_eq!(r.as_slice(), "line1\nline2");
} }
#[test] #[test]
fn should_unindent_multiple_paragraphs() { fn should_unindent_multiple_paragraphs() {
let s = " line1\n\n line2".to_owned(); let s = " line1\n\n line2".to_owned();
let r = unindent(s); let r = unindent(s);
assert_eq!(r, "line1\n\nline2".to_owned()); assert_eq!(r.as_slice(), "line1\n\nline2");
} }
#[test] #[test]
@ -357,7 +361,7 @@ mod unindent_tests {
// base indentation and should be preserved // base indentation and should be preserved
let s = " line1\n\n line2".to_owned(); let s = " line1\n\n line2".to_owned();
let r = unindent(s); let r = unindent(s);
assert_eq!(r, "line1\n\n line2".to_owned()); assert_eq!(r.as_slice(), "line1\n\n line2");
} }
#[test] #[test]
@ -369,13 +373,13 @@ mod unindent_tests {
// and continue here"] // and continue here"]
let s = "line1\n line2".to_owned(); let s = "line1\n line2".to_owned();
let r = unindent(s); let r = unindent(s);
assert_eq!(r, "line1\nline2".to_owned()); assert_eq!(r.as_slice(), "line1\nline2");
} }
#[test] #[test]
fn should_not_ignore_first_line_indent_in_a_single_line_para() { fn should_not_ignore_first_line_indent_in_a_single_line_para() {
let s = "line1\n\n line2".to_owned(); let s = "line1\n\n line2".to_owned();
let r = unindent(s); let r = unindent(s);
assert_eq!(r, "line1\n\n line2".to_owned()); assert_eq!(r.as_slice(), "line1\n\n line2");
} }
} }

View file

@ -14,7 +14,7 @@ use dl = std::unstable::dynamic_lib;
use serialize::json; use serialize::json;
use std::strbuf::StrBuf; use std::strbuf::StrBuf;
pub type PluginJson = Option<(~str, json::Json)>; pub type PluginJson = Option<(StrBuf, json::Json)>;
pub type PluginResult = (clean::Crate, PluginJson); pub type PluginResult = (clean::Crate, PluginJson);
pub type PluginCallback = fn (clean::Crate) -> PluginResult; pub type PluginCallback = fn (clean::Crate) -> PluginResult;
@ -41,7 +41,7 @@ impl PluginManager {
/// Turns `name` into the proper dynamic library filename for the given /// Turns `name` into the proper dynamic library filename for the given
/// platform. On windows, it turns into name.dll, on OS X, name.dylib, and /// platform. On windows, it turns into name.dll, on OS X, name.dylib, and
/// elsewhere, libname.so. /// elsewhere, libname.so.
pub fn load_plugin(&mut self, name: ~str) { pub fn load_plugin(&mut self, name: StrBuf) {
let x = self.prefix.join(libname(name)); let x = self.prefix.join(libname(name));
let lib_result = dl::DynamicLibrary::open(Some(&x)); let lib_result = dl::DynamicLibrary::open(Some(&x));
let lib = lib_result.unwrap(); let lib = lib_result.unwrap();
@ -71,23 +71,21 @@ impl PluginManager {
} }
#[cfg(target_os="win32")] #[cfg(target_os="win32")]
fn libname(n: ~str) -> ~str { fn libname(mut n: StrBuf) -> StrBuf {
let mut n = StrBuf::from_owned_str(n);
n.push_str(".dll"); n.push_str(".dll");
n.into_owned() n
} }
#[cfg(target_os="macos")] #[cfg(target_os="macos")]
fn libname(n: ~str) -> ~str { fn libname(mut n: StrBuf) -> StrBuf {
let mut n = StrBuf::from_owned_str(n);
n.push_str(".dylib"); n.push_str(".dylib");
n.into_owned() n
} }
#[cfg(not(target_os="win32"), not(target_os="macos"))] #[cfg(not(target_os="win32"), not(target_os="macos"))]
fn libname(n: ~str) -> ~str { fn libname(n: StrBuf) -> StrBuf {
let mut i = StrBuf::from_str("lib"); let mut i = StrBuf::from_str("lib");
i.push_str(n); i.push_str(n.as_slice());
i.push_str(".so"); i.push_str(".so");
i.into_owned() i
} }

View file

@ -37,9 +37,9 @@ use passes;
use visit_ast::RustdocVisitor; use visit_ast::RustdocVisitor;
pub fn run(input: &str, pub fn run(input: &str,
cfgs: Vec<~str>, cfgs: Vec<StrBuf>,
libs: HashSet<Path>, libs: HashSet<Path>,
mut test_args: Vec<~str>) mut test_args: Vec<StrBuf>)
-> int { -> int {
let input_path = Path::new(input); let input_path = Path::new(input);
let input = driver::FileInput(input_path.clone()); let input = driver::FileInput(input_path.clone());
@ -63,7 +63,7 @@ pub fn run(input: &str,
let mut cfg = config::build_configuration(&sess); let mut cfg = config::build_configuration(&sess);
cfg.extend(cfgs.move_iter().map(|cfg_| { cfg.extend(cfgs.move_iter().map(|cfg_| {
let cfg_ = token::intern_and_get_ident(cfg_); let cfg_ = token::intern_and_get_ident(cfg_.as_slice());
@dummy_spanned(ast::MetaWord(cfg_)) @dummy_spanned(ast::MetaWord(cfg_))
})); }));
let krate = driver::phase_1_parse_input(&sess, cfg, &input); let krate = driver::phase_1_parse_input(&sess, cfg, &input);
@ -84,15 +84,18 @@ pub fn run(input: &str,
let (krate, _) = passes::unindent_comments(krate); let (krate, _) = passes::unindent_comments(krate);
let (krate, _) = passes::collapse_docs(krate); let (krate, _) = passes::collapse_docs(krate);
let mut collector = Collector::new(krate.name.to_owned(), let mut collector = Collector::new(krate.name.to_strbuf(),
libs, libs,
false, false,
false); false);
collector.fold_crate(krate); collector.fold_crate(krate);
test_args.unshift("rustdoctest".to_owned()); test_args.unshift("rustdoctest".to_strbuf());
testing::test_main(test_args.as_slice(), testing::test_main(test_args.move_iter()
.map(|x| x.to_str())
.collect::<Vec<_>>()
.as_slice(),
collector.tests.move_iter().collect()); collector.tests.move_iter().collect());
0 0
} }
@ -173,7 +176,7 @@ fn runtest(test: &str, cratename: &str, libs: HashSet<Path>, should_fail: bool,
} }
} }
fn maketest(s: &str, cratename: &str, loose_feature_gating: bool) -> ~str { fn maketest(s: &str, cratename: &str, loose_feature_gating: bool) -> StrBuf {
let mut prog = StrBuf::from_str(r" let mut prog = StrBuf::from_str(r"
#![deny(warnings)] #![deny(warnings)]
#![allow(unused_variable, dead_assignment, unused_mut, attribute_usage, dead_code)] #![allow(unused_variable, dead_assignment, unused_mut, attribute_usage, dead_code)]
@ -198,23 +201,23 @@ fn maketest(s: &str, cratename: &str, loose_feature_gating: bool) -> ~str {
prog.push_str("\n}"); prog.push_str("\n}");
} }
return prog.into_owned(); return prog
} }
pub struct Collector { pub struct Collector {
pub tests: Vec<testing::TestDescAndFn>, pub tests: Vec<testing::TestDescAndFn>,
names: Vec<~str>, names: Vec<StrBuf>,
libs: HashSet<Path>, libs: HashSet<Path>,
cnt: uint, cnt: uint,
use_headers: bool, use_headers: bool,
current_header: Option<~str>, current_header: Option<StrBuf>,
cratename: ~str, cratename: StrBuf,
loose_feature_gating: bool loose_feature_gating: bool
} }
impl Collector { impl Collector {
pub fn new(cratename: ~str, libs: HashSet<Path>, pub fn new(cratename: StrBuf, libs: HashSet<Path>,
use_headers: bool, loose_feature_gating: bool) -> Collector { use_headers: bool, loose_feature_gating: bool) -> Collector {
Collector { Collector {
tests: Vec::new(), tests: Vec::new(),
@ -229,7 +232,7 @@ impl Collector {
} }
} }
pub fn add_test(&mut self, test: ~str, should_fail: bool, no_run: bool, should_ignore: bool) { pub fn add_test(&mut self, test: StrBuf, should_fail: bool, no_run: bool, should_ignore: bool) {
let name = if self.use_headers { let name = if self.use_headers {
let s = self.current_header.as_ref().map(|s| s.as_slice()).unwrap_or(""); let s = self.current_header.as_ref().map(|s| s.as_slice()).unwrap_or("");
format!("{}_{}", s, self.cnt) format!("{}_{}", s, self.cnt)
@ -248,7 +251,12 @@ impl Collector {
should_fail: false, // compiler failures are test failures should_fail: false, // compiler failures are test failures
}, },
testfn: testing::DynTestFn(proc() { testfn: testing::DynTestFn(proc() {
runtest(test, cratename, libs, should_fail, no_run, loose_feature_gating); runtest(test.as_slice(),
cratename,
libs,
should_fail,
no_run,
loose_feature_gating);
}), }),
}); });
} }
@ -264,7 +272,7 @@ impl Collector {
} else { } else {
'_' '_'
} }
}).collect::<~str>(); }).collect::<StrBuf>();
// new header => reset count. // new header => reset count.
self.cnt = 0; self.cnt = 0;
@ -277,7 +285,7 @@ impl DocFolder for Collector {
fn fold_item(&mut self, item: clean::Item) -> Option<clean::Item> { fn fold_item(&mut self, item: clean::Item) -> Option<clean::Item> {
let pushed = match item.name { let pushed = match item.name {
Some(ref name) if name.len() == 0 => false, Some(ref name) if name.len() == 0 => false,
Some(ref name) => { self.names.push(name.to_owned()); true } Some(ref name) => { self.names.push(name.to_strbuf()); true }
None => false None => false
}; };
match item.doc_value() { match item.doc_value() {