Add Ident::as_str
helper
This commit is contained in:
parent
189c0a1297
commit
1e4269cb83
25 changed files with 50 additions and 46 deletions
|
@ -208,7 +208,7 @@ impl LintPass for Pass {
|
||||||
|
|
||||||
impl EarlyLintPass for Pass {
|
impl EarlyLintPass for Pass {
|
||||||
fn check_item(&mut self, cx: &EarlyContext, it: &ast::Item) {
|
fn check_item(&mut self, cx: &EarlyContext, it: &ast::Item) {
|
||||||
if it.ident.name.as_str() == "lintme" {
|
if it.ident.as_str() == "lintme" {
|
||||||
cx.span_lint(TEST_LINT, it.span, "item is named 'lintme'");
|
cx.span_lint(TEST_LINT, it.span, "item is named 'lintme'");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1214,14 +1214,14 @@ impl TokenTree {
|
||||||
SingleQuote => op!('\''),
|
SingleQuote => op!('\''),
|
||||||
|
|
||||||
Ident(ident, false) => {
|
Ident(ident, false) => {
|
||||||
tt!(self::Ident::new(&ident.name.as_str(), Span(span)))
|
tt!(self::Ident::new(&ident.as_str(), Span(span)))
|
||||||
}
|
}
|
||||||
Ident(ident, true) => {
|
Ident(ident, true) => {
|
||||||
tt!(self::Ident::new_raw(&ident.name.as_str(), Span(span)))
|
tt!(self::Ident::new_raw(&ident.as_str(), Span(span)))
|
||||||
}
|
}
|
||||||
Lifetime(ident) => {
|
Lifetime(ident) => {
|
||||||
let ident = ident.without_first_quote();
|
let ident = ident.without_first_quote();
|
||||||
stack.push(tt!(self::Ident::new(&ident.name.as_str(), Span(span))));
|
stack.push(tt!(self::Ident::new(&ident.as_str(), Span(span))));
|
||||||
tt!(Punct::new('\'', Spacing::Joint))
|
tt!(Punct::new('\'', Spacing::Joint))
|
||||||
}
|
}
|
||||||
Literal(lit, suffix) => tt!(self::Literal { lit, suffix, span: Span(span) }),
|
Literal(lit, suffix) => tt!(self::Literal { lit, suffix, span: Span(span) }),
|
||||||
|
|
|
@ -1983,7 +1983,7 @@ pub struct StructField {
|
||||||
impl StructField {
|
impl StructField {
|
||||||
// Still necessary in couple of places
|
// Still necessary in couple of places
|
||||||
pub fn is_positional(&self) -> bool {
|
pub fn is_positional(&self) -> bool {
|
||||||
let first = self.ident.name.as_str().as_bytes()[0];
|
let first = self.ident.as_str().as_bytes()[0];
|
||||||
first >= b'0' && first <= b'9'
|
first >= b'0' && first <= b'9'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1565,7 +1565,7 @@ impl<'a> State<'a> {
|
||||||
if ident.is_raw_guess() {
|
if ident.is_raw_guess() {
|
||||||
self.s.word(&format!("r#{}", ident.name))?;
|
self.s.word(&format!("r#{}", ident.name))?;
|
||||||
} else {
|
} else {
|
||||||
self.s.word(&ident.name.as_str())?;
|
self.s.word(&ident.as_str())?;
|
||||||
}
|
}
|
||||||
self.ann.post(self, NodeName(&ident.name))
|
self.ann.post(self, NodeName(&ident.name))
|
||||||
}
|
}
|
||||||
|
|
|
@ -1774,7 +1774,7 @@ impl<'tcx> Debug for Rvalue<'tcx> {
|
||||||
CtorKind::Fictive => {
|
CtorKind::Fictive => {
|
||||||
let mut struct_fmt = fmt.debug_struct("");
|
let mut struct_fmt = fmt.debug_struct("");
|
||||||
for (field, place) in variant_def.fields.iter().zip(places) {
|
for (field, place) in variant_def.fields.iter().zip(places) {
|
||||||
struct_fmt.field(&field.ident.name.as_str(), place);
|
struct_fmt.field(&field.ident.as_str(), place);
|
||||||
}
|
}
|
||||||
struct_fmt.finish()
|
struct_fmt.finish()
|
||||||
}
|
}
|
||||||
|
|
|
@ -363,7 +363,7 @@ impl SymbolPathBuffer {
|
||||||
result: String::with_capacity(64),
|
result: String::with_capacity(64),
|
||||||
temp_buf: String::with_capacity(16),
|
temp_buf: String::with_capacity(16),
|
||||||
};
|
};
|
||||||
result.result.push_str(&symbol.name.as_str());
|
result.result.push_str(&symbol.as_str());
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -324,7 +324,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for NonSnakeCase {
|
||||||
_: &hir::Generics,
|
_: &hir::Generics,
|
||||||
_: ast::NodeId) {
|
_: ast::NodeId) {
|
||||||
for sf in s.fields() {
|
for sf in s.fields() {
|
||||||
self.check_snake_case(cx, "structure field", &sf.ident.name.as_str(), Some(sf.span));
|
self.check_snake_case(cx, "structure field", &sf.ident.as_str(), Some(sf.span));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -772,7 +772,7 @@ fn debug_dump<'a, 'b, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
debug!("CodegenUnit {}:", cgu.name());
|
debug!("CodegenUnit {}:", cgu.name());
|
||||||
|
|
||||||
for (mono_item, linkage) in cgu.items() {
|
for (mono_item, linkage) in cgu.items() {
|
||||||
let symbol_name = mono_item.symbol_name(tcx).name.as_str();
|
let symbol_name = mono_item.symbol_name(tcx).as_str();
|
||||||
let symbol_hash_start = symbol_name.rfind('h');
|
let symbol_hash_start = symbol_name.rfind('h');
|
||||||
let symbol_hash = symbol_hash_start.map(|i| &symbol_name[i ..])
|
let symbol_hash = symbol_hash_start.map(|i| &symbol_name[i ..])
|
||||||
.unwrap_or("<no hash>");
|
.unwrap_or("<no hash>");
|
||||||
|
|
|
@ -1060,7 +1060,7 @@ impl<'a> ModuleData<'a> {
|
||||||
fn for_each_child_stable<F: FnMut(Ident, Namespace, &'a NameBinding<'a>)>(&self, mut f: F) {
|
fn for_each_child_stable<F: FnMut(Ident, Namespace, &'a NameBinding<'a>)>(&self, mut f: F) {
|
||||||
let resolutions = self.resolutions.borrow();
|
let resolutions = self.resolutions.borrow();
|
||||||
let mut resolutions = resolutions.iter().collect::<Vec<_>>();
|
let mut resolutions = resolutions.iter().collect::<Vec<_>>();
|
||||||
resolutions.sort_by_cached_key(|&(&(ident, ns), _)| (ident.name.as_str(), ns));
|
resolutions.sort_by_cached_key(|&(&(ident, ns), _)| (ident.as_str(), ns));
|
||||||
for &(&(ident, ns), &resolution) in resolutions.iter() {
|
for &(&(ident, ns), &resolution) in resolutions.iter() {
|
||||||
resolution.borrow().binding.map(|binding| f(ident, ns, binding));
|
resolution.borrow().binding.map(|binding| f(ident, ns, binding));
|
||||||
}
|
}
|
||||||
|
@ -2608,7 +2608,7 @@ impl<'a> Resolver<'a> {
|
||||||
self,
|
self,
|
||||||
ident.span,
|
ident.span,
|
||||||
ResolutionError::IdentifierBoundMoreThanOnceInSamePattern(
|
ResolutionError::IdentifierBoundMoreThanOnceInSamePattern(
|
||||||
&ident.name.as_str())
|
&ident.as_str())
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
Some(..) if pat_src == PatternSource::FnParam => {
|
Some(..) if pat_src == PatternSource::FnParam => {
|
||||||
|
@ -2617,7 +2617,7 @@ impl<'a> Resolver<'a> {
|
||||||
self,
|
self,
|
||||||
ident.span,
|
ident.span,
|
||||||
ResolutionError::IdentifierBoundMoreThanOnceInParameterList(
|
ResolutionError::IdentifierBoundMoreThanOnceInParameterList(
|
||||||
&ident.name.as_str())
|
&ident.as_str())
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
Some(..) if pat_src == PatternSource::Match ||
|
Some(..) if pat_src == PatternSource::Match ||
|
||||||
|
@ -3765,12 +3765,12 @@ impl<'a> Resolver<'a> {
|
||||||
// the closest match
|
// the closest match
|
||||||
let close_match = self.search_label(label.ident, |rib, ident| {
|
let close_match = self.search_label(label.ident, |rib, ident| {
|
||||||
let names = rib.bindings.iter().map(|(id, _)| &id.name);
|
let names = rib.bindings.iter().map(|(id, _)| &id.name);
|
||||||
find_best_match_for_name(names, &*ident.name.as_str(), None)
|
find_best_match_for_name(names, &*ident.as_str(), None)
|
||||||
});
|
});
|
||||||
self.record_def(expr.id, err_path_resolution());
|
self.record_def(expr.id, err_path_resolution());
|
||||||
resolve_error(self,
|
resolve_error(self,
|
||||||
label.ident.span,
|
label.ident.span,
|
||||||
ResolutionError::UndeclaredLabel(&label.ident.name.as_str(),
|
ResolutionError::UndeclaredLabel(&label.ident.as_str(),
|
||||||
close_match));
|
close_match));
|
||||||
}
|
}
|
||||||
Some(Def::Label(id)) => {
|
Some(Def::Label(id)) => {
|
||||||
|
@ -4380,7 +4380,7 @@ fn names_to_string(idents: &[Ident]) -> String {
|
||||||
if i > 0 {
|
if i > 0 {
|
||||||
result.push_str("::");
|
result.push_str("::");
|
||||||
}
|
}
|
||||||
result.push_str(&ident.name.as_str());
|
result.push_str(&ident.as_str());
|
||||||
}
|
}
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
|
@ -649,7 +649,7 @@ impl<'a> Resolver<'a> {
|
||||||
format!("cannot find derive macro `{}` in this scope", ident),
|
format!("cannot find derive macro `{}` in this scope", ident),
|
||||||
};
|
};
|
||||||
let mut err = self.session.struct_span_err(span, &msg);
|
let mut err = self.session.struct_span_err(span, &msg);
|
||||||
self.suggest_macro_name(&ident.name.as_str(), kind, &mut err, span);
|
self.suggest_macro_name(&ident.as_str(), kind, &mut err, span);
|
||||||
err.emit();
|
err.emit();
|
||||||
},
|
},
|
||||||
_ => {},
|
_ => {},
|
||||||
|
|
|
@ -831,7 +831,7 @@ impl<'a, 'b:'a> ImportResolver<'a, 'b> {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
let lev_suggestion =
|
let lev_suggestion =
|
||||||
match find_best_match_for_name(names, &ident.name.as_str(), None) {
|
match find_best_match_for_name(names, &ident.as_str(), None) {
|
||||||
Some(name) => format!(". Did you mean to use `{}`?", name),
|
Some(name) => format!(". Did you mean to use `{}`?", name),
|
||||||
None => "".to_owned(),
|
None => "".to_owned(),
|
||||||
};
|
};
|
||||||
|
|
|
@ -3076,7 +3076,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ty::TyTuple(ref tys) => {
|
ty::TyTuple(ref tys) => {
|
||||||
let fstr = field.name.as_str();
|
let fstr = field.as_str();
|
||||||
if let Ok(index) = fstr.parse::<usize>() {
|
if let Ok(index) = fstr.parse::<usize>() {
|
||||||
if fstr == index.to_string() {
|
if fstr == index.to_string() {
|
||||||
if let Some(field_ty) = tys.get(index) {
|
if let Some(field_ty) = tys.get(index) {
|
||||||
|
@ -3123,7 +3123,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
|
||||||
ty::TyAdt(def, _) if !def.is_enum() => {
|
ty::TyAdt(def, _) if !def.is_enum() => {
|
||||||
if let Some(suggested_field_name) =
|
if let Some(suggested_field_name) =
|
||||||
Self::suggest_field_name(def.non_enum_variant(),
|
Self::suggest_field_name(def.non_enum_variant(),
|
||||||
&field.name.as_str(), vec![]) {
|
&field.as_str(), vec![]) {
|
||||||
err.span_label(field.span,
|
err.span_label(field.span,
|
||||||
format!("did you mean `{}`?", suggested_field_name));
|
format!("did you mean `{}`?", suggested_field_name));
|
||||||
} else {
|
} else {
|
||||||
|
@ -3161,7 +3161,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
|
||||||
-> Option<Symbol> {
|
-> Option<Symbol> {
|
||||||
let names = variant.fields.iter().filter_map(|field| {
|
let names = variant.fields.iter().filter_map(|field| {
|
||||||
// ignore already set fields and private fields from non-local crates
|
// ignore already set fields and private fields from non-local crates
|
||||||
if skip.iter().any(|x| *x == field.ident.name.as_str()) ||
|
if skip.iter().any(|x| *x == field.ident.as_str()) ||
|
||||||
(variant.did.krate != LOCAL_CRATE && field.vis != Visibility::Public) {
|
(variant.did.krate != LOCAL_CRATE && field.vis != Visibility::Public) {
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
|
@ -3223,9 +3223,9 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
|
||||||
},
|
},
|
||||||
ty);
|
ty);
|
||||||
// prevent all specified fields from being suggested
|
// prevent all specified fields from being suggested
|
||||||
let skip_fields = skip_fields.iter().map(|ref x| x.ident.name.as_str());
|
let skip_fields = skip_fields.iter().map(|ref x| x.ident.as_str());
|
||||||
if let Some(field_name) = Self::suggest_field_name(variant,
|
if let Some(field_name) = Self::suggest_field_name(variant,
|
||||||
&field.ident.name.as_str(),
|
&field.ident.as_str(),
|
||||||
skip_fields.collect()) {
|
skip_fields.collect()) {
|
||||||
err.span_label(field.ident.span,
|
err.span_label(field.ident.span,
|
||||||
format!("field does not exist - did you mean `{}`?", field_name));
|
format!("field does not exist - did you mean `{}`?", field_name));
|
||||||
|
@ -3334,7 +3334,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
|
||||||
|
|
||||||
let mut displayable_field_names = remaining_fields
|
let mut displayable_field_names = remaining_fields
|
||||||
.keys()
|
.keys()
|
||||||
.map(|ident| ident.name.as_str())
|
.map(|ident| ident.as_str())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
displayable_field_names.sort();
|
displayable_field_names.sort();
|
||||||
|
|
|
@ -324,7 +324,7 @@ impl<'a> Classifier<'a> {
|
||||||
|
|
||||||
// Keywords are also included in the identifier set.
|
// Keywords are also included in the identifier set.
|
||||||
token::Ident(ident, is_raw) => {
|
token::Ident(ident, is_raw) => {
|
||||||
match &*ident.name.as_str() {
|
match &*ident.as_str() {
|
||||||
"ref" | "mut" if !is_raw => Class::RefKeyWord,
|
"ref" | "mut" if !is_raw => Class::RefKeyWord,
|
||||||
|
|
||||||
"self" | "Self" => Class::Self_,
|
"self" | "Self" => Class::Self_,
|
||||||
|
|
|
@ -162,7 +162,7 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt,
|
||||||
ecx.parse_sess.registered_diagnostics.with_lock(|diagnostics| {
|
ecx.parse_sess.registered_diagnostics.with_lock(|diagnostics| {
|
||||||
if let Err(e) = output_metadata(ecx,
|
if let Err(e) = output_metadata(ecx,
|
||||||
&target_triple,
|
&target_triple,
|
||||||
&crate_name.name.as_str(),
|
&crate_name.as_str(),
|
||||||
diagnostics) {
|
diagnostics) {
|
||||||
ecx.span_bug(span, &format!(
|
ecx.span_bug(span, &format!(
|
||||||
"error writing metadata for triple `{}` and crate `{}`, error: {}, \
|
"error writing metadata for triple `{}` and crate `{}`, error: {}, \
|
||||||
|
|
|
@ -178,7 +178,7 @@ fn macro_bang_format(path: &ast::Path) -> ExpnFormat {
|
||||||
if segment.ident.name != keywords::CrateRoot.name() &&
|
if segment.ident.name != keywords::CrateRoot.name() &&
|
||||||
segment.ident.name != keywords::DollarCrate.name()
|
segment.ident.name != keywords::DollarCrate.name()
|
||||||
{
|
{
|
||||||
path_str.push_str(&segment.ident.name.as_str())
|
path_str.push_str(&segment.ident.as_str())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1266,7 +1266,7 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> {
|
||||||
DirectoryOwnership::Owned { relative: None };
|
DirectoryOwnership::Owned { relative: None };
|
||||||
module.directory.push(&*path.as_str());
|
module.directory.push(&*path.as_str());
|
||||||
} else {
|
} else {
|
||||||
module.directory.push(&*item.ident.name.as_str());
|
module.directory.push(&*item.ident.as_str());
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
let path = self.cx.parse_sess.codemap().span_to_unmapped_path(inner);
|
let path = self.cx.parse_sess.codemap().span_to_unmapped_path(inner);
|
||||||
|
|
|
@ -573,7 +573,7 @@ fn inner_parse_loop<'a>(
|
||||||
TokenTree::MetaVarDecl(_, _, id) => {
|
TokenTree::MetaVarDecl(_, _, id) => {
|
||||||
// Built-in nonterminals never start with these tokens,
|
// Built-in nonterminals never start with these tokens,
|
||||||
// so we can eliminate them from consideration.
|
// so we can eliminate them from consideration.
|
||||||
if may_begin_with(&*id.name.as_str(), token) {
|
if may_begin_with(&*id.as_str(), token) {
|
||||||
bb_items.push(item);
|
bb_items.push(item);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -742,7 +742,7 @@ pub fn parse(
|
||||||
let match_cur = item.match_cur;
|
let match_cur = item.match_cur;
|
||||||
item.push_match(
|
item.push_match(
|
||||||
match_cur,
|
match_cur,
|
||||||
MatchedNonterminal(Rc::new(parse_nt(&mut parser, span, &ident.name.as_str()))),
|
MatchedNonterminal(Rc::new(parse_nt(&mut parser, span, &ident.as_str()))),
|
||||||
);
|
);
|
||||||
item.idx += 1;
|
item.idx += 1;
|
||||||
item.match_cur += 1;
|
item.match_cur += 1;
|
||||||
|
|
|
@ -148,7 +148,7 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt,
|
||||||
};
|
};
|
||||||
let mut p = Parser::new(cx.parse_sess(), tts, Some(directory), true, false);
|
let mut p = Parser::new(cx.parse_sess(), tts, Some(directory), true, false);
|
||||||
p.root_module_name = cx.current_expansion.module.mod_path.last()
|
p.root_module_name = cx.current_expansion.module.mod_path.last()
|
||||||
.map(|id| id.name.as_str().to_string());
|
.map(|id| id.as_str().to_string());
|
||||||
|
|
||||||
p.process_potential_macro_variable();
|
p.process_potential_macro_variable();
|
||||||
// Let the context choose how to interpret the result.
|
// Let the context choose how to interpret the result.
|
||||||
|
@ -730,7 +730,7 @@ fn check_matcher_core(sess: &ParseSess,
|
||||||
'each_last: for token in &last.tokens {
|
'each_last: for token in &last.tokens {
|
||||||
if let TokenTree::MetaVarDecl(_, ref name, ref frag_spec) = *token {
|
if let TokenTree::MetaVarDecl(_, ref name, ref frag_spec) = *token {
|
||||||
for next_token in &suffix_first.tokens {
|
for next_token in &suffix_first.tokens {
|
||||||
match is_in_follow(next_token, &frag_spec.name.as_str()) {
|
match is_in_follow(next_token, &frag_spec.as_str()) {
|
||||||
Err((msg, help)) => {
|
Err((msg, help)) => {
|
||||||
sess.span_diagnostic.struct_span_err(next_token.span(), &msg)
|
sess.span_diagnostic.struct_span_err(next_token.span(), &msg)
|
||||||
.help(help).emit();
|
.help(help).emit();
|
||||||
|
@ -768,7 +768,7 @@ fn check_matcher_core(sess: &ParseSess,
|
||||||
|
|
||||||
fn token_can_be_followed_by_any(tok: "ed::TokenTree) -> bool {
|
fn token_can_be_followed_by_any(tok: "ed::TokenTree) -> bool {
|
||||||
if let quoted::TokenTree::MetaVarDecl(_, _, frag_spec) = *tok {
|
if let quoted::TokenTree::MetaVarDecl(_, _, frag_spec) = *tok {
|
||||||
frag_can_be_followed_by_any(&frag_spec.name.as_str())
|
frag_can_be_followed_by_any(&frag_spec.as_str())
|
||||||
} else {
|
} else {
|
||||||
// (Non NT's can always be followed by anthing in matchers.)
|
// (Non NT's can always be followed by anthing in matchers.)
|
||||||
true
|
true
|
||||||
|
@ -893,7 +893,7 @@ fn has_legal_fragment_specifier(sess: &ParseSess,
|
||||||
tok: "ed::TokenTree) -> Result<(), String> {
|
tok: "ed::TokenTree) -> Result<(), String> {
|
||||||
debug!("has_legal_fragment_specifier({:?})", tok);
|
debug!("has_legal_fragment_specifier({:?})", tok);
|
||||||
if let quoted::TokenTree::MetaVarDecl(_, _, ref frag_spec) = *tok {
|
if let quoted::TokenTree::MetaVarDecl(_, _, ref frag_spec) = *tok {
|
||||||
let frag_name = frag_spec.name.as_str();
|
let frag_name = frag_spec.as_str();
|
||||||
let frag_span = tok.span();
|
let frag_span = tok.span();
|
||||||
if !is_legal_fragment_specifier(sess, features, attrs, &frag_name, frag_span) {
|
if !is_legal_fragment_specifier(sess, features, attrs, &frag_name, frag_span) {
|
||||||
return Err(frag_name.to_string());
|
return Err(frag_name.to_string());
|
||||||
|
|
|
@ -6054,7 +6054,7 @@ impl<'a> Parser<'a> {
|
||||||
self.directory.path.to_mut().push(&path.as_str());
|
self.directory.path.to_mut().push(&path.as_str());
|
||||||
self.directory.ownership = DirectoryOwnership::Owned { relative: None };
|
self.directory.ownership = DirectoryOwnership::Owned { relative: None };
|
||||||
} else {
|
} else {
|
||||||
self.directory.path.to_mut().push(&id.name.as_str());
|
self.directory.path.to_mut().push(&id.as_str());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -6075,7 +6075,7 @@ impl<'a> Parser<'a> {
|
||||||
// `./<id>.rs` and `./<id>/mod.rs`.
|
// `./<id>.rs` and `./<id>/mod.rs`.
|
||||||
let relative_prefix_string;
|
let relative_prefix_string;
|
||||||
let relative_prefix = if let Some(ident) = relative {
|
let relative_prefix = if let Some(ident) = relative {
|
||||||
relative_prefix_string = format!("{}{}", ident.name.as_str(), path::MAIN_SEPARATOR);
|
relative_prefix_string = format!("{}{}", ident.as_str(), path::MAIN_SEPARATOR);
|
||||||
&relative_prefix_string
|
&relative_prefix_string
|
||||||
} else {
|
} else {
|
||||||
""
|
""
|
||||||
|
|
|
@ -341,7 +341,7 @@ impl Token {
|
||||||
/// string slice.
|
/// string slice.
|
||||||
pub fn is_ident_named(&self, name: &str) -> bool {
|
pub fn is_ident_named(&self, name: &str) -> bool {
|
||||||
match self.ident() {
|
match self.ident() {
|
||||||
Some((ident, _)) => ident.name.as_str() == name,
|
Some((ident, _)) => ident.as_str() == name,
|
||||||
None => false
|
None => false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -724,7 +724,7 @@ pub trait PrintState<'a> {
|
||||||
if segment.ident.name != keywords::CrateRoot.name() &&
|
if segment.ident.name != keywords::CrateRoot.name() &&
|
||||||
segment.ident.name != keywords::DollarCrate.name()
|
segment.ident.name != keywords::DollarCrate.name()
|
||||||
{
|
{
|
||||||
self.writer().word(&segment.ident.name.as_str())?;
|
self.writer().word(&segment.ident.as_str())?;
|
||||||
} else if segment.ident.name == keywords::DollarCrate.name() {
|
} else if segment.ident.name == keywords::DollarCrate.name() {
|
||||||
self.print_dollar_crate(segment.ident.span.ctxt())?;
|
self.print_dollar_crate(segment.ident.span.ctxt())?;
|
||||||
}
|
}
|
||||||
|
@ -2373,7 +2373,7 @@ impl<'a> State<'a> {
|
||||||
if ident.is_raw_guess() {
|
if ident.is_raw_guess() {
|
||||||
self.s.word(&format!("r#{}", ident))?;
|
self.s.word(&format!("r#{}", ident))?;
|
||||||
} else {
|
} else {
|
||||||
self.s.word(&ident.name.as_str())?;
|
self.s.word(&ident.as_str())?;
|
||||||
}
|
}
|
||||||
self.ann.post(self, NodeIdent(&ident))
|
self.ann.post(self, NodeIdent(&ident))
|
||||||
}
|
}
|
||||||
|
|
|
@ -647,7 +647,7 @@ fn path_name_i(idents: &[Ident]) -> String {
|
||||||
let mut path_name = "".to_string();
|
let mut path_name = "".to_string();
|
||||||
let mut idents_iter = idents.iter().peekable();
|
let mut idents_iter = idents.iter().peekable();
|
||||||
while let Some(ident) = idents_iter.next() {
|
while let Some(ident) = idents_iter.next() {
|
||||||
path_name.push_str(&ident.name.as_str());
|
path_name.push_str(&ident.as_str());
|
||||||
if let Some(_) = idents_iter.peek() {
|
if let Some(_) = idents_iter.peek() {
|
||||||
path_name.push_str("::")
|
path_name.push_str("::")
|
||||||
}
|
}
|
||||||
|
|
|
@ -49,7 +49,7 @@ pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt,
|
||||||
} else {
|
} else {
|
||||||
match *e {
|
match *e {
|
||||||
TokenTree::Token(_, token::Ident(ident, _)) =>
|
TokenTree::Token(_, token::Ident(ident, _)) =>
|
||||||
res_str.push_str(&ident.name.as_str()),
|
res_str.push_str(&ident.as_str()),
|
||||||
_ => {
|
_ => {
|
||||||
cx.span_err(sp, "concat_idents! requires ident args.");
|
cx.span_err(sp, "concat_idents! requires ident args.");
|
||||||
return DummyResult::expr(sp);
|
return DummyResult::expr(sp);
|
||||||
|
|
|
@ -136,7 +136,7 @@ fn hygienic_type_parameter(item: &Annotatable, base: &str) -> String {
|
||||||
ast::ItemKind::Enum(_, ast::Generics { ref params, .. }) => {
|
ast::ItemKind::Enum(_, ast::Generics { ref params, .. }) => {
|
||||||
for param in params.iter() {
|
for param in params.iter() {
|
||||||
if let ast::GenericParam::Type(ref ty) = *param{
|
if let ast::GenericParam::Type(ref ty) = *param{
|
||||||
typaram.push_str(&ty.ident.name.as_str());
|
typaram.push_str(&ty.ident.as_str());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -166,7 +166,7 @@ fn parse_args(ecx: &mut ExtCtxt,
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let name: &str = &ident.name.as_str();
|
let name: &str = &ident.as_str();
|
||||||
|
|
||||||
panictry!(p.expect(&token::Eq));
|
panictry!(p.expect(&token::Eq));
|
||||||
let e = panictry!(p.parse_expr());
|
let e = panictry!(p.parse_expr());
|
||||||
|
|
|
@ -56,7 +56,7 @@ impl Ident {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn without_first_quote(self) -> Ident {
|
pub fn without_first_quote(self) -> Ident {
|
||||||
Ident::new(Symbol::intern(self.name.as_str().trim_left_matches('\'')), self.span)
|
Ident::new(Symbol::intern(self.as_str().trim_left_matches('\'')), self.span)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn modern(self) -> Ident {
|
pub fn modern(self) -> Ident {
|
||||||
|
@ -66,6 +66,10 @@ impl Ident {
|
||||||
pub fn gensym(self) -> Ident {
|
pub fn gensym(self) -> Ident {
|
||||||
Ident::new(self.name.gensymed(), self.span)
|
Ident::new(self.name.gensymed(), self.span)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn as_str(self) -> LocalInternedString {
|
||||||
|
self.name.as_str()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PartialEq for Ident {
|
impl PartialEq for Ident {
|
||||||
|
@ -96,10 +100,10 @@ impl fmt::Display for Ident {
|
||||||
impl Encodable for Ident {
|
impl Encodable for Ident {
|
||||||
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
|
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
|
||||||
if self.span.ctxt().modern() == SyntaxContext::empty() {
|
if self.span.ctxt().modern() == SyntaxContext::empty() {
|
||||||
s.emit_str(&self.name.as_str())
|
s.emit_str(&self.as_str())
|
||||||
} else { // FIXME(jseyfried) intercrate hygiene
|
} else { // FIXME(jseyfried) intercrate hygiene
|
||||||
let mut string = "#".to_owned();
|
let mut string = "#".to_owned();
|
||||||
string.push_str(&self.name.as_str());
|
string.push_str(&self.as_str());
|
||||||
s.emit_str(&string)
|
s.emit_str(&string)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue