Use Symbol
instead of InternedString
in the AST, HIR, and various other places.
This commit is contained in:
parent
d2f8fb0a0a
commit
e85a0d70b8
72 changed files with 399 additions and 453 deletions
|
@ -115,9 +115,9 @@ impl DefPath {
|
||||||
pub fn to_string(&self, tcx: TyCtxt) -> String {
|
pub fn to_string(&self, tcx: TyCtxt) -> String {
|
||||||
let mut s = String::with_capacity(self.data.len() * 16);
|
let mut s = String::with_capacity(self.data.len() * 16);
|
||||||
|
|
||||||
s.push_str(&tcx.original_crate_name(self.krate));
|
s.push_str(&tcx.original_crate_name(self.krate).as_str());
|
||||||
s.push_str("/");
|
s.push_str("/");
|
||||||
s.push_str(&tcx.crate_disambiguator(self.krate));
|
s.push_str(&tcx.crate_disambiguator(self.krate).as_str());
|
||||||
|
|
||||||
for component in &self.data {
|
for component in &self.data {
|
||||||
write!(s,
|
write!(s,
|
||||||
|
@ -137,8 +137,8 @@ impl DefPath {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn deterministic_hash_to<H: Hasher>(&self, tcx: TyCtxt, state: &mut H) {
|
pub fn deterministic_hash_to<H: Hasher>(&self, tcx: TyCtxt, state: &mut H) {
|
||||||
tcx.original_crate_name(self.krate).hash(state);
|
tcx.original_crate_name(self.krate).as_str().hash(state);
|
||||||
tcx.crate_disambiguator(self.krate).hash(state);
|
tcx.crate_disambiguator(self.krate).as_str().hash(state);
|
||||||
self.data.hash(state);
|
self.data.hash(state);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -41,7 +41,7 @@ use syntax::abi::Abi;
|
||||||
use syntax::ast::{Name, NodeId, DUMMY_NODE_ID, AsmDialect};
|
use syntax::ast::{Name, NodeId, DUMMY_NODE_ID, AsmDialect};
|
||||||
use syntax::ast::{Attribute, Lit, StrStyle, FloatTy, IntTy, UintTy, MetaItem};
|
use syntax::ast::{Attribute, Lit, StrStyle, FloatTy, IntTy, UintTy, MetaItem};
|
||||||
use syntax::ptr::P;
|
use syntax::ptr::P;
|
||||||
use syntax::symbol::{keywords, InternedString};
|
use syntax::symbol::{Symbol, keywords};
|
||||||
use syntax::tokenstream::TokenTree;
|
use syntax::tokenstream::TokenTree;
|
||||||
use syntax::util::ThinVec;
|
use syntax::util::ThinVec;
|
||||||
|
|
||||||
|
@ -1163,18 +1163,18 @@ pub enum Ty_ {
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
|
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
|
||||||
pub struct InlineAsmOutput {
|
pub struct InlineAsmOutput {
|
||||||
pub constraint: InternedString,
|
pub constraint: Symbol,
|
||||||
pub is_rw: bool,
|
pub is_rw: bool,
|
||||||
pub is_indirect: bool,
|
pub is_indirect: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
|
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
|
||||||
pub struct InlineAsm {
|
pub struct InlineAsm {
|
||||||
pub asm: InternedString,
|
pub asm: Symbol,
|
||||||
pub asm_str_style: StrStyle,
|
pub asm_str_style: StrStyle,
|
||||||
pub outputs: HirVec<InlineAsmOutput>,
|
pub outputs: HirVec<InlineAsmOutput>,
|
||||||
pub inputs: HirVec<InternedString>,
|
pub inputs: HirVec<Symbol>,
|
||||||
pub clobbers: HirVec<InternedString>,
|
pub clobbers: HirVec<Symbol>,
|
||||||
pub volatile: bool,
|
pub volatile: bool,
|
||||||
pub alignstack: bool,
|
pub alignstack: bool,
|
||||||
pub dialect: AsmDialect,
|
pub dialect: AsmDialect,
|
||||||
|
|
|
@ -1500,19 +1500,19 @@ impl<'a> State<'a> {
|
||||||
hir::ExprInlineAsm(ref a, ref outputs, ref inputs) => {
|
hir::ExprInlineAsm(ref a, ref outputs, ref inputs) => {
|
||||||
word(&mut self.s, "asm!")?;
|
word(&mut self.s, "asm!")?;
|
||||||
self.popen()?;
|
self.popen()?;
|
||||||
self.print_string(&a.asm, a.asm_str_style)?;
|
self.print_string(&a.asm.as_str(), a.asm_str_style)?;
|
||||||
self.word_space(":")?;
|
self.word_space(":")?;
|
||||||
|
|
||||||
let mut out_idx = 0;
|
let mut out_idx = 0;
|
||||||
self.commasep(Inconsistent, &a.outputs, |s, out| {
|
self.commasep(Inconsistent, &a.outputs, |s, out| {
|
||||||
let mut ch = out.constraint.chars();
|
let constraint = out.constraint.as_str();
|
||||||
|
let mut ch = constraint.chars();
|
||||||
match ch.next() {
|
match ch.next() {
|
||||||
Some('=') if out.is_rw => {
|
Some('=') if out.is_rw => {
|
||||||
s.print_string(&format!("+{}", ch.as_str()),
|
s.print_string(&format!("+{}", ch.as_str()),
|
||||||
ast::StrStyle::Cooked)?
|
ast::StrStyle::Cooked)?
|
||||||
}
|
}
|
||||||
_ => s.print_string(&out.constraint,
|
_ => s.print_string(&constraint, ast::StrStyle::Cooked)?,
|
||||||
ast::StrStyle::Cooked)?,
|
|
||||||
}
|
}
|
||||||
s.popen()?;
|
s.popen()?;
|
||||||
s.print_expr(&outputs[out_idx])?;
|
s.print_expr(&outputs[out_idx])?;
|
||||||
|
@ -1525,7 +1525,7 @@ impl<'a> State<'a> {
|
||||||
|
|
||||||
let mut in_idx = 0;
|
let mut in_idx = 0;
|
||||||
self.commasep(Inconsistent, &a.inputs, |s, co| {
|
self.commasep(Inconsistent, &a.inputs, |s, co| {
|
||||||
s.print_string(&co, ast::StrStyle::Cooked)?;
|
s.print_string(&co.as_str(), ast::StrStyle::Cooked)?;
|
||||||
s.popen()?;
|
s.popen()?;
|
||||||
s.print_expr(&inputs[in_idx])?;
|
s.print_expr(&inputs[in_idx])?;
|
||||||
s.pclose()?;
|
s.pclose()?;
|
||||||
|
@ -1536,7 +1536,7 @@ impl<'a> State<'a> {
|
||||||
self.word_space(":")?;
|
self.word_space(":")?;
|
||||||
|
|
||||||
self.commasep(Inconsistent, &a.clobbers, |s, co| {
|
self.commasep(Inconsistent, &a.clobbers, |s, co| {
|
||||||
s.print_string(&co, ast::StrStyle::Cooked)?;
|
s.print_string(&co.as_str(), ast::StrStyle::Cooked)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
|
|
|
@ -39,7 +39,7 @@ use syntax::ast;
|
||||||
use syntax::attr;
|
use syntax::attr;
|
||||||
use syntax::ext::base::SyntaxExtension;
|
use syntax::ext::base::SyntaxExtension;
|
||||||
use syntax::ptr::P;
|
use syntax::ptr::P;
|
||||||
use syntax::symbol::InternedString;
|
use syntax::symbol::Symbol;
|
||||||
use syntax_pos::Span;
|
use syntax_pos::Span;
|
||||||
use rustc_back::target::Target;
|
use rustc_back::target::Target;
|
||||||
use hir;
|
use hir;
|
||||||
|
@ -52,7 +52,7 @@ pub use self::NativeLibraryKind::{NativeStatic, NativeFramework, NativeUnknown};
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct LinkMeta {
|
pub struct LinkMeta {
|
||||||
pub crate_name: String,
|
pub crate_name: Symbol,
|
||||||
pub crate_hash: Svh,
|
pub crate_hash: Svh,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -92,7 +92,7 @@ pub enum NativeLibraryKind {
|
||||||
#[derive(Clone, Hash, RustcEncodable, RustcDecodable)]
|
#[derive(Clone, Hash, RustcEncodable, RustcDecodable)]
|
||||||
pub struct NativeLibrary {
|
pub struct NativeLibrary {
|
||||||
pub kind: NativeLibraryKind,
|
pub kind: NativeLibraryKind,
|
||||||
pub name: String,
|
pub name: Symbol,
|
||||||
pub cfg: Option<ast::MetaItem>,
|
pub cfg: Option<ast::MetaItem>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -205,11 +205,11 @@ pub trait CrateStore<'tcx> {
|
||||||
fn extern_crate(&self, cnum: CrateNum) -> Option<ExternCrate>;
|
fn extern_crate(&self, cnum: CrateNum) -> Option<ExternCrate>;
|
||||||
/// The name of the crate as it is referred to in source code of the current
|
/// The name of the crate as it is referred to in source code of the current
|
||||||
/// crate.
|
/// crate.
|
||||||
fn crate_name(&self, cnum: CrateNum) -> InternedString;
|
fn crate_name(&self, cnum: CrateNum) -> Symbol;
|
||||||
/// The name of the crate as it is stored in the crate's metadata.
|
/// The name of the crate as it is stored in the crate's metadata.
|
||||||
fn original_crate_name(&self, cnum: CrateNum) -> InternedString;
|
fn original_crate_name(&self, cnum: CrateNum) -> Symbol;
|
||||||
fn crate_hash(&self, cnum: CrateNum) -> Svh;
|
fn crate_hash(&self, cnum: CrateNum) -> Svh;
|
||||||
fn crate_disambiguator(&self, cnum: CrateNum) -> InternedString;
|
fn crate_disambiguator(&self, cnum: CrateNum) -> Symbol;
|
||||||
fn plugin_registrar_fn(&self, cnum: CrateNum) -> Option<DefId>;
|
fn plugin_registrar_fn(&self, cnum: CrateNum) -> Option<DefId>;
|
||||||
fn native_libraries(&self, cnum: CrateNum) -> Vec<NativeLibrary>;
|
fn native_libraries(&self, cnum: CrateNum) -> Vec<NativeLibrary>;
|
||||||
fn reachable_ids(&self, cnum: CrateNum) -> Vec<DefId>;
|
fn reachable_ids(&self, cnum: CrateNum) -> Vec<DefId>;
|
||||||
|
@ -375,13 +375,13 @@ impl<'tcx> CrateStore<'tcx> for DummyCrateStore {
|
||||||
bug!("panic_strategy")
|
bug!("panic_strategy")
|
||||||
}
|
}
|
||||||
fn extern_crate(&self, cnum: CrateNum) -> Option<ExternCrate> { bug!("extern_crate") }
|
fn extern_crate(&self, cnum: CrateNum) -> Option<ExternCrate> { bug!("extern_crate") }
|
||||||
fn crate_name(&self, cnum: CrateNum) -> InternedString { bug!("crate_name") }
|
fn crate_name(&self, cnum: CrateNum) -> Symbol { bug!("crate_name") }
|
||||||
fn original_crate_name(&self, cnum: CrateNum) -> InternedString {
|
fn original_crate_name(&self, cnum: CrateNum) -> Symbol {
|
||||||
bug!("original_crate_name")
|
bug!("original_crate_name")
|
||||||
}
|
}
|
||||||
fn crate_hash(&self, cnum: CrateNum) -> Svh { bug!("crate_hash") }
|
fn crate_hash(&self, cnum: CrateNum) -> Svh { bug!("crate_hash") }
|
||||||
fn crate_disambiguator(&self, cnum: CrateNum)
|
fn crate_disambiguator(&self, cnum: CrateNum)
|
||||||
-> InternedString { bug!("crate_disambiguator") }
|
-> Symbol { bug!("crate_disambiguator") }
|
||||||
fn plugin_registrar_fn(&self, cnum: CrateNum) -> Option<DefId>
|
fn plugin_registrar_fn(&self, cnum: CrateNum) -> Option<DefId>
|
||||||
{ bug!("plugin_registrar_fn") }
|
{ bug!("plugin_registrar_fn") }
|
||||||
fn native_libraries(&self, cnum: CrateNum) -> Vec<NativeLibrary>
|
fn native_libraries(&self, cnum: CrateNum) -> Vec<NativeLibrary>
|
||||||
|
|
|
@ -30,7 +30,7 @@ use middle::weak_lang_items;
|
||||||
use util::nodemap::FxHashMap;
|
use util::nodemap::FxHashMap;
|
||||||
|
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::symbol::InternedString;
|
use syntax::symbol::Symbol;
|
||||||
use hir::itemlikevisit::ItemLikeVisitor;
|
use hir::itemlikevisit::ItemLikeVisitor;
|
||||||
use hir;
|
use hir;
|
||||||
|
|
||||||
|
@ -152,7 +152,7 @@ struct LanguageItemCollector<'a, 'tcx: 'a> {
|
||||||
impl<'a, 'v, 'tcx> ItemLikeVisitor<'v> for LanguageItemCollector<'a, 'tcx> {
|
impl<'a, 'v, 'tcx> ItemLikeVisitor<'v> for LanguageItemCollector<'a, 'tcx> {
|
||||||
fn visit_item(&mut self, item: &hir::Item) {
|
fn visit_item(&mut self, item: &hir::Item) {
|
||||||
if let Some(value) = extract(&item.attrs) {
|
if let Some(value) = extract(&item.attrs) {
|
||||||
let item_index = self.item_refs.get(&value[..]).cloned();
|
let item_index = self.item_refs.get(&*value.as_str()).cloned();
|
||||||
|
|
||||||
if let Some(item_index) = item_index {
|
if let Some(item_index) = item_index {
|
||||||
self.collect_item(item_index, self.ast_map.local_def_id(item.id))
|
self.collect_item(item_index, self.ast_map.local_def_id(item.id))
|
||||||
|
@ -160,7 +160,7 @@ impl<'a, 'v, 'tcx> ItemLikeVisitor<'v> for LanguageItemCollector<'a, 'tcx> {
|
||||||
let span = self.ast_map.span(item.id);
|
let span = self.ast_map.span(item.id);
|
||||||
span_err!(self.session, span, E0522,
|
span_err!(self.session, span, E0522,
|
||||||
"definition of an unknown language item: `{}`.",
|
"definition of an unknown language item: `{}`.",
|
||||||
&value[..]);
|
value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -243,12 +243,10 @@ impl<'a, 'tcx> LanguageItemCollector<'a, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn extract(attrs: &[ast::Attribute]) -> Option<InternedString> {
|
pub fn extract(attrs: &[ast::Attribute]) -> Option<Symbol> {
|
||||||
for attribute in attrs {
|
for attribute in attrs {
|
||||||
match attribute.value_str() {
|
match attribute.value_str() {
|
||||||
Some(ref value) if attribute.check_name("lang") => {
|
Some(value) if attribute.check_name("lang") => return Some(value),
|
||||||
return Some(value.clone());
|
|
||||||
}
|
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,7 +25,7 @@ pub fn update_recursion_limit(sess: &Session, krate: &ast::Crate) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(s) = attr.value_str() {
|
if let Some(s) = attr.value_str() {
|
||||||
if let Some(n) = s.parse().ok() {
|
if let Some(n) = s.as_str().parse().ok() {
|
||||||
sess.recursion_limit.set(n);
|
sess.recursion_limit.set(n);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,7 +21,7 @@ use hir::def::Def;
|
||||||
use hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefId, DefIndex, LOCAL_CRATE};
|
use hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefId, DefIndex, LOCAL_CRATE};
|
||||||
use ty::{self, TyCtxt, AdtKind};
|
use ty::{self, TyCtxt, AdtKind};
|
||||||
use middle::privacy::AccessLevels;
|
use middle::privacy::AccessLevels;
|
||||||
use syntax::symbol::InternedString;
|
use syntax::symbol::Symbol;
|
||||||
use syntax_pos::{Span, DUMMY_SP};
|
use syntax_pos::{Span, DUMMY_SP};
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::ast::{NodeId, Attribute};
|
use syntax::ast::{NodeId, Attribute};
|
||||||
|
@ -36,7 +36,6 @@ use hir::pat_util::EnumerateAndAdjustIterator;
|
||||||
|
|
||||||
use std::mem::replace;
|
use std::mem::replace;
|
||||||
use std::cmp::Ordering;
|
use std::cmp::Ordering;
|
||||||
use std::ops::Deref;
|
|
||||||
|
|
||||||
#[derive(RustcEncodable, RustcDecodable, PartialEq, PartialOrd, Clone, Copy, Debug, Eq, Hash)]
|
#[derive(RustcEncodable, RustcDecodable, PartialEq, PartialOrd, Clone, Copy, Debug, Eq, Hash)]
|
||||||
pub enum StabilityLevel {
|
pub enum StabilityLevel {
|
||||||
|
@ -151,10 +150,11 @@ impl<'a, 'tcx: 'a> Annotator<'a, 'tcx> {
|
||||||
|
|
||||||
// Check if deprecated_since < stable_since. If it is,
|
// Check if deprecated_since < stable_since. If it is,
|
||||||
// this is *almost surely* an accident.
|
// this is *almost surely* an accident.
|
||||||
if let (&Some(attr::RustcDeprecation {since: ref dep_since, ..}),
|
if let (&Some(attr::RustcDeprecation {since: dep_since, ..}),
|
||||||
&attr::Stable {since: ref stab_since}) = (&stab.rustc_depr, &stab.level) {
|
&attr::Stable {since: stab_since}) = (&stab.rustc_depr, &stab.level) {
|
||||||
// Explicit version of iter::order::lt to handle parse errors properly
|
// Explicit version of iter::order::lt to handle parse errors properly
|
||||||
for (dep_v, stab_v) in dep_since.split(".").zip(stab_since.split(".")) {
|
for (dep_v, stab_v) in
|
||||||
|
dep_since.as_str().split(".").zip(stab_since.as_str().split(".")) {
|
||||||
if let (Ok(dep_v), Ok(stab_v)) = (dep_v.parse::<u64>(), stab_v.parse()) {
|
if let (Ok(dep_v), Ok(stab_v)) = (dep_v.parse::<u64>(), stab_v.parse()) {
|
||||||
match dep_v.cmp(&stab_v) {
|
match dep_v.cmp(&stab_v) {
|
||||||
Ordering::Less => {
|
Ordering::Less => {
|
||||||
|
@ -356,7 +356,7 @@ impl<'a, 'tcx> Index<'tcx> {
|
||||||
/// features and possibly prints errors. Returns a list of all
|
/// features and possibly prints errors. Returns a list of all
|
||||||
/// features used.
|
/// features used.
|
||||||
pub fn check_unstable_api_usage<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
|
pub fn check_unstable_api_usage<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
|
||||||
-> FxHashMap<InternedString, attr::StabilityLevel> {
|
-> FxHashMap<Symbol, attr::StabilityLevel> {
|
||||||
let _task = tcx.dep_graph.in_task(DepNode::StabilityCheck);
|
let _task = tcx.dep_graph.in_task(DepNode::StabilityCheck);
|
||||||
let ref active_lib_features = tcx.sess.features.borrow().declared_lib_features;
|
let ref active_lib_features = tcx.sess.features.borrow().declared_lib_features;
|
||||||
|
|
||||||
|
@ -376,8 +376,8 @@ pub fn check_unstable_api_usage<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
|
||||||
|
|
||||||
struct Checker<'a, 'tcx: 'a> {
|
struct Checker<'a, 'tcx: 'a> {
|
||||||
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
active_features: FxHashSet<InternedString>,
|
active_features: FxHashSet<Symbol>,
|
||||||
used_features: FxHashMap<InternedString, attr::StabilityLevel>,
|
used_features: FxHashMap<Symbol, attr::StabilityLevel>,
|
||||||
// Within a block where feature gate checking can be skipped.
|
// Within a block where feature gate checking can be skipped.
|
||||||
in_skip_block: u32,
|
in_skip_block: u32,
|
||||||
}
|
}
|
||||||
|
@ -407,10 +407,10 @@ impl<'a, 'tcx> Checker<'a, 'tcx> {
|
||||||
if !self.active_features.contains(feature) {
|
if !self.active_features.contains(feature) {
|
||||||
let msg = match *reason {
|
let msg = match *reason {
|
||||||
Some(ref r) => format!("use of unstable library feature '{}': {}",
|
Some(ref r) => format!("use of unstable library feature '{}': {}",
|
||||||
&feature, &r),
|
&feature.as_str(), &r),
|
||||||
None => format!("use of unstable library feature '{}'", &feature)
|
None => format!("use of unstable library feature '{}'", &feature)
|
||||||
};
|
};
|
||||||
emit_feature_err(&self.tcx.sess.parse_sess, &feature, span,
|
emit_feature_err(&self.tcx.sess.parse_sess, &feature.as_str(), span,
|
||||||
GateIssue::Library(Some(issue)), &msg);
|
GateIssue::Library(Some(issue)), &msg);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -735,10 +735,10 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
|
||||||
/// were expected to be library features), and the list of features used from
|
/// were expected to be library features), and the list of features used from
|
||||||
/// libraries, identify activated features that don't exist and error about them.
|
/// libraries, identify activated features that don't exist and error about them.
|
||||||
pub fn check_unused_or_stable_features(sess: &Session,
|
pub fn check_unused_or_stable_features(sess: &Session,
|
||||||
lib_features_used: &FxHashMap<InternedString,
|
lib_features_used: &FxHashMap<Symbol,
|
||||||
attr::StabilityLevel>) {
|
attr::StabilityLevel>) {
|
||||||
let ref declared_lib_features = sess.features.borrow().declared_lib_features;
|
let ref declared_lib_features = sess.features.borrow().declared_lib_features;
|
||||||
let mut remaining_lib_features: FxHashMap<InternedString, Span>
|
let mut remaining_lib_features: FxHashMap<Symbol, Span>
|
||||||
= declared_lib_features.clone().into_iter().collect();
|
= declared_lib_features.clone().into_iter().collect();
|
||||||
|
|
||||||
fn format_stable_since_msg(version: &str) -> String {
|
fn format_stable_since_msg(version: &str) -> String {
|
||||||
|
@ -746,7 +746,7 @@ pub fn check_unused_or_stable_features(sess: &Session,
|
||||||
}
|
}
|
||||||
|
|
||||||
for &(ref stable_lang_feature, span) in &sess.features.borrow().declared_stable_lang_features {
|
for &(ref stable_lang_feature, span) in &sess.features.borrow().declared_stable_lang_features {
|
||||||
let version = find_lang_feature_accepted_version(stable_lang_feature.deref())
|
let version = find_lang_feature_accepted_version(&stable_lang_feature.as_str())
|
||||||
.expect("unexpectedly couldn't find version feature was stabilized");
|
.expect("unexpectedly couldn't find version feature was stabilized");
|
||||||
sess.add_lint(lint::builtin::STABLE_FEATURES,
|
sess.add_lint(lint::builtin::STABLE_FEATURES,
|
||||||
ast::CRATE_NODE_ID,
|
ast::CRATE_NODE_ID,
|
||||||
|
@ -761,7 +761,7 @@ pub fn check_unused_or_stable_features(sess: &Session,
|
||||||
sess.add_lint(lint::builtin::STABLE_FEATURES,
|
sess.add_lint(lint::builtin::STABLE_FEATURES,
|
||||||
ast::CRATE_NODE_ID,
|
ast::CRATE_NODE_ID,
|
||||||
span,
|
span,
|
||||||
format_stable_since_msg(version.deref()));
|
format_stable_since_msg(&version.as_str()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None => ( /* used but undeclared, handled during the previous ast visit */ )
|
None => ( /* used but undeclared, handled during the previous ast visit */ )
|
||||||
|
|
|
@ -16,7 +16,7 @@ use middle::lang_items;
|
||||||
|
|
||||||
use rustc_back::PanicStrategy;
|
use rustc_back::PanicStrategy;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::symbol::InternedString;
|
use syntax::symbol::Symbol;
|
||||||
use syntax_pos::Span;
|
use syntax_pos::Span;
|
||||||
use hir::intravisit::Visitor;
|
use hir::intravisit::Visitor;
|
||||||
use hir::intravisit;
|
use hir::intravisit;
|
||||||
|
@ -55,10 +55,10 @@ pub fn check_crate(krate: &hir::Crate,
|
||||||
verify(sess, items);
|
verify(sess, items);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn link_name(attrs: &[ast::Attribute]) -> Option<InternedString> {
|
pub fn link_name(attrs: &[ast::Attribute]) -> Option<Symbol> {
|
||||||
lang_items::extract(attrs).and_then(|name| {
|
lang_items::extract(attrs).and_then(|name| {
|
||||||
$(if &name[..] == stringify!($name) {
|
$(if name == stringify!($name) {
|
||||||
Some(InternedString::new(stringify!($sym)))
|
Some(Symbol::intern(stringify!($sym)))
|
||||||
} else)* {
|
} else)* {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
@ -126,7 +126,7 @@ impl<'a> Context<'a> {
|
||||||
impl<'a, 'v> Visitor<'v> for Context<'a> {
|
impl<'a, 'v> Visitor<'v> for Context<'a> {
|
||||||
fn visit_foreign_item(&mut self, i: &hir::ForeignItem) {
|
fn visit_foreign_item(&mut self, i: &hir::ForeignItem) {
|
||||||
if let Some(lang_item) = lang_items::extract(&i.attrs) {
|
if let Some(lang_item) = lang_items::extract(&i.attrs) {
|
||||||
self.register(&lang_item, i.span);
|
self.register(&lang_item.as_str(), i.span);
|
||||||
}
|
}
|
||||||
intravisit::walk_foreign_item(self, i)
|
intravisit::walk_foreign_item(self, i)
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,7 +26,7 @@ use middle::cstore;
|
||||||
|
|
||||||
use syntax::ast::{self, IntTy, UintTy};
|
use syntax::ast::{self, IntTy, UintTy};
|
||||||
use syntax::parse;
|
use syntax::parse;
|
||||||
use syntax::symbol::{Symbol, InternedString};
|
use syntax::symbol::Symbol;
|
||||||
use syntax::feature_gate::UnstableFeatures;
|
use syntax::feature_gate::UnstableFeatures;
|
||||||
|
|
||||||
use errors::{ColorConfig, FatalError, Handler};
|
use errors::{ColorConfig, FatalError, Handler};
|
||||||
|
@ -927,8 +927,6 @@ pub fn default_lib_output() -> CrateType {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn default_configuration(sess: &Session) -> ast::CrateConfig {
|
pub fn default_configuration(sess: &Session) -> ast::CrateConfig {
|
||||||
use syntax::symbol::intern_and_get_ident as intern;
|
|
||||||
|
|
||||||
let end = &sess.target.target.target_endian;
|
let end = &sess.target.target.target_endian;
|
||||||
let arch = &sess.target.target.arch;
|
let arch = &sess.target.target.arch;
|
||||||
let wordsz = &sess.target.target.target_pointer_width;
|
let wordsz = &sess.target.target.target_pointer_width;
|
||||||
|
@ -938,24 +936,24 @@ pub fn default_configuration(sess: &Session) -> ast::CrateConfig {
|
||||||
let max_atomic_width = sess.target.target.max_atomic_width();
|
let max_atomic_width = sess.target.target.max_atomic_width();
|
||||||
|
|
||||||
let fam = if let Some(ref fam) = sess.target.target.options.target_family {
|
let fam = if let Some(ref fam) = sess.target.target.options.target_family {
|
||||||
intern(fam)
|
Symbol::intern(fam)
|
||||||
} else if sess.target.target.options.is_like_windows {
|
} else if sess.target.target.options.is_like_windows {
|
||||||
InternedString::new("windows")
|
Symbol::intern("windows")
|
||||||
} else {
|
} else {
|
||||||
InternedString::new("unix")
|
Symbol::intern("unix")
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut ret = HashSet::new();
|
let mut ret = HashSet::new();
|
||||||
// Target bindings.
|
// Target bindings.
|
||||||
ret.insert((Symbol::intern("target_os"), Some(intern(os))));
|
ret.insert((Symbol::intern("target_os"), Some(Symbol::intern(os))));
|
||||||
ret.insert((Symbol::intern("target_family"), Some(fam.clone())));
|
ret.insert((Symbol::intern("target_family"), Some(fam)));
|
||||||
ret.insert((Symbol::intern("target_arch"), Some(intern(arch))));
|
ret.insert((Symbol::intern("target_arch"), Some(Symbol::intern(arch))));
|
||||||
ret.insert((Symbol::intern("target_endian"), Some(intern(end))));
|
ret.insert((Symbol::intern("target_endian"), Some(Symbol::intern(end))));
|
||||||
ret.insert((Symbol::intern("target_pointer_width"), Some(intern(wordsz))));
|
ret.insert((Symbol::intern("target_pointer_width"), Some(Symbol::intern(wordsz))));
|
||||||
ret.insert((Symbol::intern("target_env"), Some(intern(env))));
|
ret.insert((Symbol::intern("target_env"), Some(Symbol::intern(env))));
|
||||||
ret.insert((Symbol::intern("target_vendor"), Some(intern(vendor))));
|
ret.insert((Symbol::intern("target_vendor"), Some(Symbol::intern(vendor))));
|
||||||
if &fam == "windows" || &fam == "unix" {
|
if fam == "windows" || fam == "unix" {
|
||||||
ret.insert((Symbol::intern(&fam), None));
|
ret.insert((fam, None));
|
||||||
}
|
}
|
||||||
if sess.target.target.options.has_elf_tls {
|
if sess.target.target.options.has_elf_tls {
|
||||||
ret.insert((Symbol::intern("target_thread_local"), None));
|
ret.insert((Symbol::intern("target_thread_local"), None));
|
||||||
|
@ -963,9 +961,9 @@ pub fn default_configuration(sess: &Session) -> ast::CrateConfig {
|
||||||
for &i in &[8, 16, 32, 64, 128] {
|
for &i in &[8, 16, 32, 64, 128] {
|
||||||
if i <= max_atomic_width {
|
if i <= max_atomic_width {
|
||||||
let s = i.to_string();
|
let s = i.to_string();
|
||||||
ret.insert((Symbol::intern("target_has_atomic"), Some(intern(&s))));
|
ret.insert((Symbol::intern("target_has_atomic"), Some(Symbol::intern(&s))));
|
||||||
if &s == wordsz {
|
if &s == wordsz {
|
||||||
ret.insert((Symbol::intern("target_has_atomic"), Some(intern("ptr"))));
|
ret.insert((Symbol::intern("target_has_atomic"), Some(Symbol::intern("ptr"))));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,7 +28,7 @@ use syntax::json::JsonEmitter;
|
||||||
use syntax::feature_gate;
|
use syntax::feature_gate;
|
||||||
use syntax::parse;
|
use syntax::parse;
|
||||||
use syntax::parse::ParseSess;
|
use syntax::parse::ParseSess;
|
||||||
use syntax::symbol::{Symbol, InternedString};
|
use syntax::symbol::Symbol;
|
||||||
use syntax::{ast, codemap};
|
use syntax::{ast, codemap};
|
||||||
use syntax::feature_gate::AttributeType;
|
use syntax::feature_gate::AttributeType;
|
||||||
use syntax_pos::{Span, MultiSpan};
|
use syntax_pos::{Span, MultiSpan};
|
||||||
|
@ -89,7 +89,7 @@ pub struct Session {
|
||||||
// forms a unique global identifier for the crate. It is used to allow
|
// forms a unique global identifier for the crate. It is used to allow
|
||||||
// multiple crates with the same name to coexist. See the
|
// multiple crates with the same name to coexist. See the
|
||||||
// trans::back::symbol_names module for more information.
|
// trans::back::symbol_names module for more information.
|
||||||
pub crate_disambiguator: RefCell<InternedString>,
|
pub crate_disambiguator: RefCell<Symbol>,
|
||||||
pub features: RefCell<feature_gate::Features>,
|
pub features: RefCell<feature_gate::Features>,
|
||||||
|
|
||||||
/// The maximum recursion limit for potentially infinitely recursive
|
/// The maximum recursion limit for potentially infinitely recursive
|
||||||
|
@ -129,8 +129,8 @@ pub struct PerfStats {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Session {
|
impl Session {
|
||||||
pub fn local_crate_disambiguator(&self) -> InternedString {
|
pub fn local_crate_disambiguator(&self) -> Symbol {
|
||||||
self.crate_disambiguator.borrow().clone()
|
*self.crate_disambiguator.borrow()
|
||||||
}
|
}
|
||||||
pub fn struct_span_warn<'a, S: Into<MultiSpan>>(&'a self,
|
pub fn struct_span_warn<'a, S: Into<MultiSpan>>(&'a self,
|
||||||
sp: S,
|
sp: S,
|
||||||
|
@ -610,7 +610,7 @@ pub fn build_session_(sopts: config::Options,
|
||||||
plugin_attributes: RefCell::new(Vec::new()),
|
plugin_attributes: RefCell::new(Vec::new()),
|
||||||
crate_types: RefCell::new(Vec::new()),
|
crate_types: RefCell::new(Vec::new()),
|
||||||
dependency_formats: RefCell::new(FxHashMap()),
|
dependency_formats: RefCell::new(FxHashMap()),
|
||||||
crate_disambiguator: RefCell::new(Symbol::intern("").as_str()),
|
crate_disambiguator: RefCell::new(Symbol::intern("")),
|
||||||
features: RefCell::new(feature_gate::Features::new()),
|
features: RefCell::new(feature_gate::Features::new()),
|
||||||
recursion_limit: Cell::new(64),
|
recursion_limit: Cell::new(64),
|
||||||
next_node_id: Cell::new(NodeId::new(1)),
|
next_node_id: Cell::new(NodeId::new(1)),
|
||||||
|
|
|
@ -246,12 +246,13 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
|
||||||
let err_sp = item.meta().span.substitute_dummy(span);
|
let err_sp = item.meta().span.substitute_dummy(span);
|
||||||
let def = self.tcx.lookup_trait_def(trait_ref.def_id);
|
let def = self.tcx.lookup_trait_def(trait_ref.def_id);
|
||||||
let trait_str = def.trait_ref.to_string();
|
let trait_str = def.trait_ref.to_string();
|
||||||
if let Some(ref istring) = item.value_str() {
|
if let Some(istring) = item.value_str() {
|
||||||
|
let istring = &*istring.as_str();
|
||||||
let generic_map = def.generics.types.iter().map(|param| {
|
let generic_map = def.generics.types.iter().map(|param| {
|
||||||
(param.name.as_str().to_string(),
|
(param.name.as_str().to_string(),
|
||||||
trait_ref.substs.type_for_def(param).to_string())
|
trait_ref.substs.type_for_def(param).to_string())
|
||||||
}).collect::<FxHashMap<String, String>>();
|
}).collect::<FxHashMap<String, String>>();
|
||||||
let parser = Parser::new(&istring);
|
let parser = Parser::new(istring);
|
||||||
let mut errored = false;
|
let mut errored = false;
|
||||||
let err: String = parser.filter_map(|p| {
|
let err: String = parser.filter_map(|p| {
|
||||||
match p {
|
match p {
|
||||||
|
|
|
@ -49,7 +49,7 @@ use std::rc::Rc;
|
||||||
use std::iter;
|
use std::iter;
|
||||||
use syntax::ast::{self, Name, NodeId};
|
use syntax::ast::{self, Name, NodeId};
|
||||||
use syntax::attr;
|
use syntax::attr;
|
||||||
use syntax::symbol::{InternedString, intern_and_get_ident, keywords};
|
use syntax::symbol::{Symbol, keywords};
|
||||||
|
|
||||||
use hir;
|
use hir;
|
||||||
|
|
||||||
|
@ -561,7 +561,7 @@ pub struct GlobalCtxt<'tcx> {
|
||||||
|
|
||||||
/// The definite name of the current crate after taking into account
|
/// The definite name of the current crate after taking into account
|
||||||
/// attributes, commandline parameters, etc.
|
/// attributes, commandline parameters, etc.
|
||||||
pub crate_name: InternedString,
|
pub crate_name: Symbol,
|
||||||
|
|
||||||
/// Data layout specification for the current target.
|
/// Data layout specification for the current target.
|
||||||
pub data_layout: TargetDataLayout,
|
pub data_layout: TargetDataLayout,
|
||||||
|
@ -574,7 +574,7 @@ pub struct GlobalCtxt<'tcx> {
|
||||||
|
|
||||||
/// Map from function to the `#[derive]` mode that it's defining. Only used
|
/// Map from function to the `#[derive]` mode that it's defining. Only used
|
||||||
/// by `proc-macro` crates.
|
/// by `proc-macro` crates.
|
||||||
pub derive_macros: RefCell<NodeMap<InternedString>>,
|
pub derive_macros: RefCell<NodeMap<Symbol>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'tcx> GlobalCtxt<'tcx> {
|
impl<'tcx> GlobalCtxt<'tcx> {
|
||||||
|
@ -588,15 +588,15 @@ impl<'tcx> GlobalCtxt<'tcx> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||||
pub fn crate_name(self, cnum: CrateNum) -> InternedString {
|
pub fn crate_name(self, cnum: CrateNum) -> Symbol {
|
||||||
if cnum == LOCAL_CRATE {
|
if cnum == LOCAL_CRATE {
|
||||||
self.crate_name.clone()
|
self.crate_name
|
||||||
} else {
|
} else {
|
||||||
self.sess.cstore.crate_name(cnum)
|
self.sess.cstore.crate_name(cnum)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn original_crate_name(self, cnum: CrateNum) -> InternedString {
|
pub fn original_crate_name(self, cnum: CrateNum) -> Symbol {
|
||||||
if cnum == LOCAL_CRATE {
|
if cnum == LOCAL_CRATE {
|
||||||
self.crate_name.clone()
|
self.crate_name.clone()
|
||||||
} else {
|
} else {
|
||||||
|
@ -604,7 +604,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn crate_disambiguator(self, cnum: CrateNum) -> InternedString {
|
pub fn crate_disambiguator(self, cnum: CrateNum) -> Symbol {
|
||||||
if cnum == LOCAL_CRATE {
|
if cnum == LOCAL_CRATE {
|
||||||
self.sess.local_crate_disambiguator()
|
self.sess.local_crate_disambiguator()
|
||||||
} else {
|
} else {
|
||||||
|
@ -835,7 +835,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||||
custom_coerce_unsized_kinds: RefCell::new(DefIdMap()),
|
custom_coerce_unsized_kinds: RefCell::new(DefIdMap()),
|
||||||
cast_kinds: RefCell::new(NodeMap()),
|
cast_kinds: RefCell::new(NodeMap()),
|
||||||
fragment_infos: RefCell::new(DefIdMap()),
|
fragment_infos: RefCell::new(DefIdMap()),
|
||||||
crate_name: intern_and_get_ident(crate_name),
|
crate_name: Symbol::intern(crate_name),
|
||||||
data_layout: data_layout,
|
data_layout: data_layout,
|
||||||
layout_cache: RefCell::new(FxHashMap()),
|
layout_cache: RefCell::new(FxHashMap()),
|
||||||
layout_depth: Cell::new(0),
|
layout_depth: Cell::new(0),
|
||||||
|
|
|
@ -94,14 +94,14 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||||
if let Some(extern_crate_def_id) = opt_extern_crate {
|
if let Some(extern_crate_def_id) = opt_extern_crate {
|
||||||
self.push_item_path(buffer, extern_crate_def_id);
|
self.push_item_path(buffer, extern_crate_def_id);
|
||||||
} else {
|
} else {
|
||||||
buffer.push(&self.crate_name(cnum));
|
buffer.push(&self.crate_name(cnum).as_str());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
RootMode::Absolute => {
|
RootMode::Absolute => {
|
||||||
// In absolute mode, just write the crate name
|
// In absolute mode, just write the crate name
|
||||||
// unconditionally.
|
// unconditionally.
|
||||||
buffer.push(&self.original_crate_name(cnum));
|
buffer.push(&self.original_crate_name(cnum).as_str());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -126,7 +126,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
buffer.push(&self.crate_name(cur_def.krate));
|
buffer.push(&self.crate_name(cur_def.krate).as_str());
|
||||||
cur_path.iter().rev().map(|segment| buffer.push(&segment.as_str())).count();
|
cur_path.iter().rev().map(|segment| buffer.push(&segment.as_str())).count();
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
|
@ -2344,7 +2344,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||||
if let Some(id) = self.map.as_local_node_id(id) {
|
if let Some(id) = self.map.as_local_node_id(id) {
|
||||||
self.map.name(id)
|
self.map.name(id)
|
||||||
} else if id.index == CRATE_DEF_INDEX {
|
} else if id.index == CRATE_DEF_INDEX {
|
||||||
Symbol::intern(&self.sess.cstore.original_crate_name(id.krate))
|
self.sess.cstore.original_crate_name(id.krate)
|
||||||
} else {
|
} else {
|
||||||
let def_key = self.sess.cstore.def_key(id);
|
let def_key = self.sess.cstore.def_key(id);
|
||||||
// The name of a StructCtor is that of its struct parent.
|
// The name of a StructCtor is that of its struct parent.
|
||||||
|
@ -2747,7 +2747,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||||
|
|
||||||
/// Looks up the span of `impl_did` if the impl is local; otherwise returns `Err`
|
/// Looks up the span of `impl_did` if the impl is local; otherwise returns `Err`
|
||||||
/// with the name of the crate containing the impl.
|
/// with the name of the crate containing the impl.
|
||||||
pub fn span_of_impl(self, impl_did: DefId) -> Result<Span, InternedString> {
|
pub fn span_of_impl(self, impl_did: DefId) -> Result<Span, Symbol> {
|
||||||
if impl_did.is_local() {
|
if impl_did.is_local() {
|
||||||
let node_id = self.map.as_local_node_id(impl_did).unwrap();
|
let node_id = self.map.as_local_node_id(impl_did).unwrap();
|
||||||
Ok(self.map.span(node_id))
|
Ok(self.map.span(node_id))
|
||||||
|
|
|
@ -1221,7 +1221,7 @@ fn lit_to_const<'a, 'tcx>(lit: &ast::LitKind,
|
||||||
use syntax::ast::*;
|
use syntax::ast::*;
|
||||||
use syntax::ast::LitIntType::*;
|
use syntax::ast::LitIntType::*;
|
||||||
match *lit {
|
match *lit {
|
||||||
LitKind::Str(ref s, _) => Ok(Str((*s).clone())),
|
LitKind::Str(ref s, _) => Ok(Str(s.as_str())),
|
||||||
LitKind::ByteStr(ref data) => Ok(ByteStr(data.clone())),
|
LitKind::ByteStr(ref data) => Ok(ByteStr(data.clone())),
|
||||||
LitKind::Byte(n) => Ok(Integral(U8(n))),
|
LitKind::Byte(n) => Ok(Integral(U8(n))),
|
||||||
LitKind::Int(n, Signed(ity)) => {
|
LitKind::Int(n, Signed(ity)) => {
|
||||||
|
@ -1249,15 +1249,15 @@ fn lit_to_const<'a, 'tcx>(lit: &ast::LitKind,
|
||||||
infer(Infer(n), tcx, &ty::TyUint(ity)).map(Integral)
|
infer(Infer(n), tcx, &ty::TyUint(ity)).map(Integral)
|
||||||
},
|
},
|
||||||
|
|
||||||
LitKind::Float(ref n, fty) => {
|
LitKind::Float(n, fty) => {
|
||||||
parse_float(n, Some(fty)).map(Float)
|
parse_float(&n.as_str(), Some(fty)).map(Float)
|
||||||
}
|
}
|
||||||
LitKind::FloatUnsuffixed(ref n) => {
|
LitKind::FloatUnsuffixed(n) => {
|
||||||
let fty_hint = match ty_hint.map(|t| &t.sty) {
|
let fty_hint = match ty_hint.map(|t| &t.sty) {
|
||||||
Some(&ty::TyFloat(fty)) => Some(fty),
|
Some(&ty::TyFloat(fty)) => Some(fty),
|
||||||
_ => None
|
_ => None
|
||||||
};
|
};
|
||||||
parse_float(n, fty_hint).map(Float)
|
parse_float(&n.as_str(), fty_hint).map(Float)
|
||||||
}
|
}
|
||||||
LitKind::Bool(b) => Ok(Bool(b)),
|
LitKind::Bool(b) => Ok(Bool(b)),
|
||||||
LitKind::Char(c) => Ok(Char(c)),
|
LitKind::Char(c) => Ok(Char(c)),
|
||||||
|
|
|
@ -561,8 +561,7 @@ pub fn phase_2_configure_and_expand<'a, F>(sess: &Session,
|
||||||
*sess.features.borrow_mut() = features;
|
*sess.features.borrow_mut() = features;
|
||||||
|
|
||||||
*sess.crate_types.borrow_mut() = collect_crate_types(sess, &krate.attrs);
|
*sess.crate_types.borrow_mut() = collect_crate_types(sess, &krate.attrs);
|
||||||
*sess.crate_disambiguator.borrow_mut() =
|
*sess.crate_disambiguator.borrow_mut() = Symbol::intern(&compute_crate_disambiguator(sess));
|
||||||
Symbol::intern(&compute_crate_disambiguator(sess)).as_str();
|
|
||||||
|
|
||||||
time(time_passes, "recursion limit", || {
|
time(time_passes, "recursion limit", || {
|
||||||
middle::recursion_limit::update_recursion_limit(sess, &krate);
|
middle::recursion_limit::update_recursion_limit(sess, &krate);
|
||||||
|
@ -1105,7 +1104,7 @@ pub fn phase_6_link_output(sess: &Session,
|
||||||
outputs: &OutputFilenames) {
|
outputs: &OutputFilenames) {
|
||||||
time(sess.time_passes(),
|
time(sess.time_passes(),
|
||||||
"linking",
|
"linking",
|
||||||
|| link::link_binary(sess, trans, outputs, &trans.link.crate_name));
|
|| link::link_binary(sess, trans, outputs, &trans.link.crate_name.as_str()));
|
||||||
}
|
}
|
||||||
|
|
||||||
fn escape_dep_filename(filename: &str) -> String {
|
fn escape_dep_filename(filename: &str) -> String {
|
||||||
|
|
|
@ -13,7 +13,7 @@ use llvm::LLVMRustHasFeature;
|
||||||
use rustc::session::Session;
|
use rustc::session::Session;
|
||||||
use rustc_trans::back::write::create_target_machine;
|
use rustc_trans::back::write::create_target_machine;
|
||||||
use syntax::feature_gate::UnstableFeatures;
|
use syntax::feature_gate::UnstableFeatures;
|
||||||
use syntax::symbol::{Symbol, intern_and_get_ident as intern};
|
use syntax::symbol::Symbol;
|
||||||
use libc::c_char;
|
use libc::c_char;
|
||||||
|
|
||||||
// WARNING: the features must be known to LLVM or the feature
|
// WARNING: the features must be known to LLVM or the feature
|
||||||
|
@ -44,7 +44,7 @@ pub fn add_configuration(cfg: &mut ast::CrateConfig, sess: &Session) {
|
||||||
for feat in whitelist {
|
for feat in whitelist {
|
||||||
assert_eq!(feat.chars().last(), Some('\0'));
|
assert_eq!(feat.chars().last(), Some('\0'));
|
||||||
if unsafe { LLVMRustHasFeature(target_machine, feat.as_ptr() as *const c_char) } {
|
if unsafe { LLVMRustHasFeature(target_machine, feat.as_ptr() as *const c_char) } {
|
||||||
cfg.insert((tf, Some(intern(&feat[..feat.len() - 1]))));
|
cfg.insert((tf, Some(Symbol::intern(&feat[..feat.len() - 1]))));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -73,6 +73,6 @@ pub fn add_configuration(cfg: &mut ast::CrateConfig, sess: &Session) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if crt_static {
|
if crt_static {
|
||||||
cfg.insert((tf, Some(intern("crt-static"))));
|
cfg.insert((tf, Some(Symbol::intern("crt-static"))));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -172,8 +172,8 @@ impl<'a, 'tcx> HashItemsVisitor<'a, 'tcx> {
|
||||||
|
|
||||||
let crate_disambiguator = self.tcx.sess.local_crate_disambiguator();
|
let crate_disambiguator = self.tcx.sess.local_crate_disambiguator();
|
||||||
"crate_disambiguator".hash(&mut crate_state);
|
"crate_disambiguator".hash(&mut crate_state);
|
||||||
crate_disambiguator.len().hash(&mut crate_state);
|
crate_disambiguator.as_str().len().hash(&mut crate_state);
|
||||||
crate_disambiguator.hash(&mut crate_state);
|
crate_disambiguator.as_str().hash(&mut crate_state);
|
||||||
|
|
||||||
// add each item (in some deterministic order) to the overall
|
// add each item (in some deterministic order) to the overall
|
||||||
// crate hash.
|
// crate hash.
|
||||||
|
|
|
@ -84,8 +84,8 @@ impl DefIdDirectory {
|
||||||
assert_eq!(old_info.krate, krate);
|
assert_eq!(old_info.krate, krate);
|
||||||
let old_name: &str = &old_info.name;
|
let old_name: &str = &old_info.name;
|
||||||
let old_disambiguator: &str = &old_info.disambiguator;
|
let old_disambiguator: &str = &old_info.disambiguator;
|
||||||
let new_name: &str = &tcx.crate_name(krate);
|
let new_name: &str = &tcx.crate_name(krate).as_str();
|
||||||
let new_disambiguator: &str = &tcx.crate_disambiguator(krate);
|
let new_disambiguator: &str = &tcx.crate_disambiguator(krate).as_str();
|
||||||
old_name == new_name && old_disambiguator == new_disambiguator
|
old_name == new_name && old_disambiguator == new_disambiguator
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -99,8 +99,8 @@ impl DefIdDirectory {
|
||||||
let new_krates: HashMap<_, _> =
|
let new_krates: HashMap<_, _> =
|
||||||
once(LOCAL_CRATE)
|
once(LOCAL_CRATE)
|
||||||
.chain(tcx.sess.cstore.crates())
|
.chain(tcx.sess.cstore.crates())
|
||||||
.map(|krate| (make_key(&tcx.crate_name(krate),
|
.map(|krate| (make_key(&tcx.crate_name(krate).as_str(),
|
||||||
&tcx.crate_disambiguator(krate)), krate))
|
&tcx.crate_disambiguator(krate).as_str()), krate))
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
let ids = self.paths.iter()
|
let ids = self.paths.iter()
|
||||||
|
|
|
@ -48,7 +48,6 @@ use rustc::hir::def_id::DefId;
|
||||||
use rustc::hir::itemlikevisit::ItemLikeVisitor;
|
use rustc::hir::itemlikevisit::ItemLikeVisitor;
|
||||||
use syntax::ast::{self, Attribute, NestedMetaItem};
|
use syntax::ast::{self, Attribute, NestedMetaItem};
|
||||||
use rustc_data_structures::fx::{FxHashSet, FxHashMap};
|
use rustc_data_structures::fx::{FxHashSet, FxHashMap};
|
||||||
use syntax::symbol::Symbol;
|
|
||||||
use syntax_pos::Span;
|
use syntax_pos::Span;
|
||||||
use rustc::ty::TyCtxt;
|
use rustc::ty::TyCtxt;
|
||||||
use ich::Fingerprint;
|
use ich::Fingerprint;
|
||||||
|
@ -286,7 +285,7 @@ fn check_config(tcx: TyCtxt, attr: &ast::Attribute) -> bool {
|
||||||
|
|
||||||
fn expect_associated_value(tcx: TyCtxt, item: &NestedMetaItem) -> ast::Name {
|
fn expect_associated_value(tcx: TyCtxt, item: &NestedMetaItem) -> ast::Name {
|
||||||
if let Some(value) = item.value_str() {
|
if let Some(value) = item.value_str() {
|
||||||
Symbol::intern(&value)
|
value
|
||||||
} else {
|
} else {
|
||||||
let msg = if let Some(name) = item.name() {
|
let msg = if let Some(name) = item.name() {
|
||||||
format!("associated value expected for `{}`", name)
|
format!("associated value expected for `{}`", name)
|
||||||
|
|
|
@ -604,7 +604,7 @@ fn string_to_timestamp(s: &str) -> Result<SystemTime, ()> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn crate_path_tcx(tcx: TyCtxt, cnum: CrateNum) -> PathBuf {
|
fn crate_path_tcx(tcx: TyCtxt, cnum: CrateNum) -> PathBuf {
|
||||||
crate_path(tcx.sess, &tcx.crate_name(cnum), &tcx.crate_disambiguator(cnum))
|
crate_path(tcx.sess, &tcx.crate_name(cnum).as_str(), &tcx.crate_disambiguator(cnum).as_str())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Finds the session directory containing the correct metadata hashes file for
|
/// Finds the session directory containing the correct metadata hashes file for
|
||||||
|
|
|
@ -241,8 +241,8 @@ impl LateLintPass for NonSnakeCase {
|
||||||
.and_then(|at| at.value_str().map(|s| (at, s)));
|
.and_then(|at| at.value_str().map(|s| (at, s)));
|
||||||
if let Some(ref name) = cx.tcx.sess.opts.crate_name {
|
if let Some(ref name) = cx.tcx.sess.opts.crate_name {
|
||||||
self.check_snake_case(cx, "crate", name, None);
|
self.check_snake_case(cx, "crate", name, None);
|
||||||
} else if let Some((attr, ref name)) = attr_crate_name {
|
} else if let Some((attr, name)) = attr_crate_name {
|
||||||
self.check_snake_case(cx, "crate", name, Some(attr.span));
|
self.check_snake_case(cx, "crate", &name.as_str(), Some(attr.span));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -45,6 +45,7 @@ use std::collections::HashSet;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::attr;
|
use syntax::attr;
|
||||||
use syntax::feature_gate::{AttributeGate, AttributeType, Stability, deprecated_attributes};
|
use syntax::feature_gate::{AttributeGate, AttributeType, Stability, deprecated_attributes};
|
||||||
|
use syntax::symbol::Symbol;
|
||||||
use syntax_pos::Span;
|
use syntax_pos::Span;
|
||||||
|
|
||||||
use rustc::hir::{self, PatKind};
|
use rustc::hir::{self, PatKind};
|
||||||
|
@ -633,9 +634,9 @@ impl Deprecated {
|
||||||
stability: &Option<&attr::Stability>,
|
stability: &Option<&attr::Stability>,
|
||||||
deprecation: &Option<stability::DeprecationEntry>) {
|
deprecation: &Option<stability::DeprecationEntry>) {
|
||||||
// Deprecated attributes apply in-crate and cross-crate.
|
// Deprecated attributes apply in-crate and cross-crate.
|
||||||
if let Some(&attr::Stability{rustc_depr: Some(attr::RustcDeprecation{ref reason, ..}), ..})
|
if let Some(&attr::Stability{rustc_depr: Some(attr::RustcDeprecation{reason, ..}), ..})
|
||||||
= *stability {
|
= *stability {
|
||||||
output(cx, DEPRECATED, span, Some(&reason))
|
output(cx, DEPRECATED, span, Some(reason))
|
||||||
} else if let Some(ref depr_entry) = *deprecation {
|
} else if let Some(ref depr_entry) = *deprecation {
|
||||||
if let Some(parent_depr) = cx.tcx.lookup_deprecation_entry(self.parent_def(cx)) {
|
if let Some(parent_depr) = cx.tcx.lookup_deprecation_entry(self.parent_def(cx)) {
|
||||||
if parent_depr.same_origin(depr_entry) {
|
if parent_depr.same_origin(depr_entry) {
|
||||||
|
@ -643,10 +644,10 @@ impl Deprecated {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
output(cx, DEPRECATED, span, depr_entry.attr.note.as_ref().map(|x| &**x))
|
output(cx, DEPRECATED, span, depr_entry.attr.note)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn output(cx: &LateContext, lint: &'static Lint, span: Span, note: Option<&str>) {
|
fn output(cx: &LateContext, lint: &'static Lint, span: Span, note: Option<Symbol>) {
|
||||||
let msg = if let Some(note) = note {
|
let msg = if let Some(note) = note {
|
||||||
format!("use of deprecated item: {}", note)
|
format!("use of deprecated item: {}", note)
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -219,9 +219,9 @@ impl LateLintPass for TypeLimits {
|
||||||
ty::TyFloat(t) => {
|
ty::TyFloat(t) => {
|
||||||
let (min, max) = float_ty_range(t);
|
let (min, max) = float_ty_range(t);
|
||||||
let lit_val: f64 = match lit.node {
|
let lit_val: f64 = match lit.node {
|
||||||
ast::LitKind::Float(ref v, _) |
|
ast::LitKind::Float(v, _) |
|
||||||
ast::LitKind::FloatUnsuffixed(ref v) => {
|
ast::LitKind::FloatUnsuffixed(v) => {
|
||||||
match v.parse() {
|
match v.as_str().parse() {
|
||||||
Ok(f) => f,
|
Ok(f) => f,
|
||||||
Err(_) => return,
|
Err(_) => return,
|
||||||
}
|
}
|
||||||
|
|
|
@ -162,7 +162,7 @@ impl LateLintPass for UnusedResults {
|
||||||
// check for #[must_use="..."]
|
// check for #[must_use="..."]
|
||||||
if let Some(s) = attr.value_str() {
|
if let Some(s) = attr.value_str() {
|
||||||
msg.push_str(": ");
|
msg.push_str(": ");
|
||||||
msg.push_str(&s);
|
msg.push_str(&s.as_str());
|
||||||
}
|
}
|
||||||
cx.span_lint(UNUSED_MUST_USE, sp, &msg);
|
cx.span_lint(UNUSED_MUST_USE, sp, &msg);
|
||||||
return true;
|
return true;
|
||||||
|
|
|
@ -37,7 +37,7 @@ use syntax::abi::Abi;
|
||||||
use syntax::attr;
|
use syntax::attr;
|
||||||
use syntax::ext::base::SyntaxExtension;
|
use syntax::ext::base::SyntaxExtension;
|
||||||
use syntax::feature_gate::{self, GateIssue};
|
use syntax::feature_gate::{self, GateIssue};
|
||||||
use syntax::symbol::{Symbol, InternedString};
|
use syntax::symbol::Symbol;
|
||||||
use syntax_pos::{Span, DUMMY_SP};
|
use syntax_pos::{Span, DUMMY_SP};
|
||||||
use log;
|
use log;
|
||||||
|
|
||||||
|
@ -52,7 +52,7 @@ pub struct CrateLoader<'a> {
|
||||||
cstore: &'a CStore,
|
cstore: &'a CStore,
|
||||||
next_crate_num: CrateNum,
|
next_crate_num: CrateNum,
|
||||||
foreign_item_map: FxHashMap<String, Vec<ast::NodeId>>,
|
foreign_item_map: FxHashMap<String, Vec<ast::NodeId>>,
|
||||||
local_crate_name: String,
|
local_crate_name: Symbol,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn dump_crates(cstore: &CStore) {
|
fn dump_crates(cstore: &CStore) {
|
||||||
|
@ -70,8 +70,8 @@ fn dump_crates(cstore: &CStore) {
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
struct ExternCrateInfo {
|
struct ExternCrateInfo {
|
||||||
ident: String,
|
ident: Symbol,
|
||||||
name: String,
|
name: Symbol,
|
||||||
id: ast::NodeId,
|
id: ast::NodeId,
|
||||||
dep_kind: DepKind,
|
dep_kind: DepKind,
|
||||||
}
|
}
|
||||||
|
@ -80,7 +80,7 @@ fn register_native_lib(sess: &Session,
|
||||||
cstore: &CStore,
|
cstore: &CStore,
|
||||||
span: Option<Span>,
|
span: Option<Span>,
|
||||||
lib: NativeLibrary) {
|
lib: NativeLibrary) {
|
||||||
if lib.name.is_empty() {
|
if lib.name.as_str().is_empty() {
|
||||||
match span {
|
match span {
|
||||||
Some(span) => {
|
Some(span) => {
|
||||||
struct_span_err!(sess, span, E0454,
|
struct_span_err!(sess, span, E0454,
|
||||||
|
@ -147,7 +147,7 @@ impl<'a> CrateLoader<'a> {
|
||||||
cstore: cstore,
|
cstore: cstore,
|
||||||
next_crate_num: cstore.next_crate_num(),
|
next_crate_num: cstore.next_crate_num(),
|
||||||
foreign_item_map: FxHashMap(),
|
foreign_item_map: FxHashMap(),
|
||||||
local_crate_name: local_crate_name.to_owned(),
|
local_crate_name: Symbol::intern(local_crate_name),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -160,12 +160,12 @@ impl<'a> CrateLoader<'a> {
|
||||||
Some(name) => {
|
Some(name) => {
|
||||||
validate_crate_name(Some(self.sess), &name.as_str(),
|
validate_crate_name(Some(self.sess), &name.as_str(),
|
||||||
Some(i.span));
|
Some(i.span));
|
||||||
name.to_string()
|
name
|
||||||
}
|
}
|
||||||
None => i.ident.to_string(),
|
None => i.ident.name,
|
||||||
};
|
};
|
||||||
Some(ExternCrateInfo {
|
Some(ExternCrateInfo {
|
||||||
ident: i.ident.to_string(),
|
ident: i.ident.name,
|
||||||
name: name,
|
name: name,
|
||||||
id: i.id,
|
id: i.id,
|
||||||
dep_kind: if attr::contains_name(&i.attrs, "no_link") {
|
dep_kind: if attr::contains_name(&i.attrs, "no_link") {
|
||||||
|
@ -179,7 +179,7 @@ impl<'a> CrateLoader<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn existing_match(&self, name: &str, hash: Option<&Svh>, kind: PathKind)
|
fn existing_match(&self, name: Symbol, hash: Option<&Svh>, kind: PathKind)
|
||||||
-> Option<CrateNum> {
|
-> Option<CrateNum> {
|
||||||
let mut ret = None;
|
let mut ret = None;
|
||||||
self.cstore.iter_crate_data(|cnum, data| {
|
self.cstore.iter_crate_data(|cnum, data| {
|
||||||
|
@ -201,7 +201,7 @@ impl<'a> CrateLoader<'a> {
|
||||||
// `source` stores paths which are normalized which may be different
|
// `source` stores paths which are normalized which may be different
|
||||||
// from the strings on the command line.
|
// from the strings on the command line.
|
||||||
let source = self.cstore.used_crate_source(cnum);
|
let source = self.cstore.used_crate_source(cnum);
|
||||||
if let Some(locs) = self.sess.opts.externs.get(name) {
|
if let Some(locs) = self.sess.opts.externs.get(&*name.as_str()) {
|
||||||
let found = locs.iter().any(|l| {
|
let found = locs.iter().any(|l| {
|
||||||
let l = fs::canonicalize(l).ok();
|
let l = fs::canonicalize(l).ok();
|
||||||
source.dylib.as_ref().map(|p| &p.0) == l.as_ref() ||
|
source.dylib.as_ref().map(|p| &p.0) == l.as_ref() ||
|
||||||
|
@ -233,7 +233,7 @@ impl<'a> CrateLoader<'a> {
|
||||||
root: &CrateRoot) {
|
root: &CrateRoot) {
|
||||||
// Check for (potential) conflicts with the local crate
|
// Check for (potential) conflicts with the local crate
|
||||||
if self.local_crate_name == root.name &&
|
if self.local_crate_name == root.name &&
|
||||||
self.sess.local_crate_disambiguator() == &root.disambiguator[..] {
|
self.sess.local_crate_disambiguator() == root.disambiguator {
|
||||||
span_fatal!(self.sess, span, E0519,
|
span_fatal!(self.sess, span, E0519,
|
||||||
"the current crate is indistinguishable from one of its \
|
"the current crate is indistinguishable from one of its \
|
||||||
dependencies: it has the same crate-name `{}` and was \
|
dependencies: it has the same crate-name `{}` and was \
|
||||||
|
@ -258,8 +258,8 @@ impl<'a> CrateLoader<'a> {
|
||||||
|
|
||||||
fn register_crate(&mut self,
|
fn register_crate(&mut self,
|
||||||
root: &Option<CratePaths>,
|
root: &Option<CratePaths>,
|
||||||
ident: &str,
|
ident: Symbol,
|
||||||
name: &str,
|
name: Symbol,
|
||||||
span: Span,
|
span: Span,
|
||||||
lib: Library,
|
lib: Library,
|
||||||
dep_kind: DepKind)
|
dep_kind: DepKind)
|
||||||
|
@ -290,7 +290,7 @@ impl<'a> CrateLoader<'a> {
|
||||||
let cnum_map = self.resolve_crate_deps(root, &crate_root, &metadata, cnum, span, dep_kind);
|
let cnum_map = self.resolve_crate_deps(root, &crate_root, &metadata, cnum, span, dep_kind);
|
||||||
|
|
||||||
let cmeta = Rc::new(cstore::CrateMetadata {
|
let cmeta = Rc::new(cstore::CrateMetadata {
|
||||||
name: name.to_string(),
|
name: name,
|
||||||
extern_crate: Cell::new(None),
|
extern_crate: Cell::new(None),
|
||||||
key_map: metadata.load_key_map(crate_root.index),
|
key_map: metadata.load_key_map(crate_root.index),
|
||||||
proc_macros: crate_root.macro_derive_registrar.map(|_| {
|
proc_macros: crate_root.macro_derive_registrar.map(|_| {
|
||||||
|
@ -314,8 +314,8 @@ impl<'a> CrateLoader<'a> {
|
||||||
|
|
||||||
fn resolve_crate(&mut self,
|
fn resolve_crate(&mut self,
|
||||||
root: &Option<CratePaths>,
|
root: &Option<CratePaths>,
|
||||||
ident: &str,
|
ident: Symbol,
|
||||||
name: &str,
|
name: Symbol,
|
||||||
hash: Option<&Svh>,
|
hash: Option<&Svh>,
|
||||||
span: Span,
|
span: Span,
|
||||||
path_kind: PathKind,
|
path_kind: PathKind,
|
||||||
|
@ -456,13 +456,12 @@ impl<'a> CrateLoader<'a> {
|
||||||
let deps = crate_root.crate_deps.decode(metadata);
|
let deps = crate_root.crate_deps.decode(metadata);
|
||||||
let map: FxHashMap<_, _> = deps.enumerate().map(|(crate_num, dep)| {
|
let map: FxHashMap<_, _> = deps.enumerate().map(|(crate_num, dep)| {
|
||||||
debug!("resolving dep crate {} hash: `{}`", dep.name, dep.hash);
|
debug!("resolving dep crate {} hash: `{}`", dep.name, dep.hash);
|
||||||
let dep_name = &dep.name.as_str();
|
|
||||||
let dep_kind = match dep_kind {
|
let dep_kind = match dep_kind {
|
||||||
DepKind::MacrosOnly => DepKind::MacrosOnly,
|
DepKind::MacrosOnly => DepKind::MacrosOnly,
|
||||||
_ => dep.kind,
|
_ => dep.kind,
|
||||||
};
|
};
|
||||||
let (local_cnum, ..) = self.resolve_crate(
|
let (local_cnum, ..) = self.resolve_crate(
|
||||||
root, dep_name, dep_name, Some(&dep.hash), span, PathKind::Dependency, dep_kind,
|
root, dep.name, dep.name, Some(&dep.hash), span, PathKind::Dependency, dep_kind,
|
||||||
);
|
);
|
||||||
(CrateNum::new(crate_num + 1), local_cnum)
|
(CrateNum::new(crate_num + 1), local_cnum)
|
||||||
}).collect();
|
}).collect();
|
||||||
|
@ -482,13 +481,11 @@ impl<'a> CrateLoader<'a> {
|
||||||
let target_triple = &self.sess.opts.target_triple[..];
|
let target_triple = &self.sess.opts.target_triple[..];
|
||||||
let is_cross = target_triple != config::host_triple();
|
let is_cross = target_triple != config::host_triple();
|
||||||
let mut target_only = false;
|
let mut target_only = false;
|
||||||
let ident = info.ident.clone();
|
|
||||||
let name = info.name.clone();
|
|
||||||
let mut locate_ctxt = locator::Context {
|
let mut locate_ctxt = locator::Context {
|
||||||
sess: self.sess,
|
sess: self.sess,
|
||||||
span: span,
|
span: span,
|
||||||
ident: &ident[..],
|
ident: info.ident,
|
||||||
crate_name: &name[..],
|
crate_name: info.name,
|
||||||
hash: None,
|
hash: None,
|
||||||
filesearch: self.sess.host_filesearch(PathKind::Crate),
|
filesearch: self.sess.host_filesearch(PathKind::Crate),
|
||||||
target: &self.sess.host,
|
target: &self.sess.host,
|
||||||
|
@ -604,8 +601,8 @@ impl<'a> CrateLoader<'a> {
|
||||||
pub fn find_plugin_registrar(&mut self, span: Span, name: &str)
|
pub fn find_plugin_registrar(&mut self, span: Span, name: &str)
|
||||||
-> Option<(PathBuf, Svh, DefIndex)> {
|
-> Option<(PathBuf, Svh, DefIndex)> {
|
||||||
let ekrate = self.read_extension_crate(span, &ExternCrateInfo {
|
let ekrate = self.read_extension_crate(span, &ExternCrateInfo {
|
||||||
name: name.to_string(),
|
name: Symbol::intern(name),
|
||||||
ident: name.to_string(),
|
ident: Symbol::intern(name),
|
||||||
id: ast::DUMMY_NODE_ID,
|
id: ast::DUMMY_NODE_ID,
|
||||||
dep_kind: DepKind::MacrosOnly,
|
dep_kind: DepKind::MacrosOnly,
|
||||||
});
|
});
|
||||||
|
@ -642,7 +639,7 @@ impl<'a> CrateLoader<'a> {
|
||||||
let libs = self.cstore.get_used_libraries();
|
let libs = self.cstore.get_used_libraries();
|
||||||
for (foreign_lib, list) in self.foreign_item_map.iter() {
|
for (foreign_lib, list) in self.foreign_item_map.iter() {
|
||||||
let is_static = libs.borrow().iter().any(|lib| {
|
let is_static = libs.borrow().iter().any(|lib| {
|
||||||
*foreign_lib == lib.name && lib.kind == cstore::NativeStatic
|
lib.name == &**foreign_lib && lib.kind == cstore::NativeStatic
|
||||||
});
|
});
|
||||||
if is_static {
|
if is_static {
|
||||||
for id in list {
|
for id in list {
|
||||||
|
@ -705,8 +702,8 @@ impl<'a> CrateLoader<'a> {
|
||||||
// in terms of everyone has a compatible panic runtime format, that's
|
// in terms of everyone has a compatible panic runtime format, that's
|
||||||
// performed later as part of the `dependency_format` module.
|
// performed later as part of the `dependency_format` module.
|
||||||
let name = match desired_strategy {
|
let name = match desired_strategy {
|
||||||
PanicStrategy::Unwind => "panic_unwind",
|
PanicStrategy::Unwind => Symbol::intern("panic_unwind"),
|
||||||
PanicStrategy::Abort => "panic_abort",
|
PanicStrategy::Abort => Symbol::intern("panic_abort"),
|
||||||
};
|
};
|
||||||
info!("panic runtime not found -- loading {}", name);
|
info!("panic runtime not found -- loading {}", name);
|
||||||
|
|
||||||
|
@ -788,9 +785,9 @@ impl<'a> CrateLoader<'a> {
|
||||||
// * Staticlibs and Rust dylibs use system malloc
|
// * Staticlibs and Rust dylibs use system malloc
|
||||||
// * Rust dylibs used as dependencies to rust use jemalloc
|
// * Rust dylibs used as dependencies to rust use jemalloc
|
||||||
let name = if need_lib_alloc && !self.sess.opts.cg.prefer_dynamic {
|
let name = if need_lib_alloc && !self.sess.opts.cg.prefer_dynamic {
|
||||||
&self.sess.target.target.options.lib_allocation_crate
|
Symbol::intern(&self.sess.target.target.options.lib_allocation_crate)
|
||||||
} else {
|
} else {
|
||||||
&self.sess.target.target.options.exe_allocation_crate
|
Symbol::intern(&self.sess.target.target.options.exe_allocation_crate)
|
||||||
};
|
};
|
||||||
let dep_kind = DepKind::Implicit;
|
let dep_kind = DepKind::Implicit;
|
||||||
let (cnum, data) =
|
let (cnum, data) =
|
||||||
|
@ -852,8 +849,8 @@ impl<'a> CrateLoader<'a> {
|
||||||
impl<'a> CrateLoader<'a> {
|
impl<'a> CrateLoader<'a> {
|
||||||
pub fn preprocess(&mut self, krate: &ast::Crate) {
|
pub fn preprocess(&mut self, krate: &ast::Crate) {
|
||||||
for attr in krate.attrs.iter().filter(|m| m.name() == "link_args") {
|
for attr in krate.attrs.iter().filter(|m| m.name() == "link_args") {
|
||||||
if let Some(ref linkarg) = attr.value_str() {
|
if let Some(linkarg) = attr.value_str() {
|
||||||
self.cstore.add_used_link_args(&linkarg);
|
self.cstore.add_used_link_args(&linkarg.as_str());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -866,7 +863,7 @@ impl<'a> CrateLoader<'a> {
|
||||||
// First, add all of the custom #[link_args] attributes
|
// First, add all of the custom #[link_args] attributes
|
||||||
for m in i.attrs.iter().filter(|a| a.check_name("link_args")) {
|
for m in i.attrs.iter().filter(|a| a.check_name("link_args")) {
|
||||||
if let Some(linkarg) = m.value_str() {
|
if let Some(linkarg) = m.value_str() {
|
||||||
self.cstore.add_used_link_args(&linkarg);
|
self.cstore.add_used_link_args(&linkarg.as_str());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -878,7 +875,7 @@ impl<'a> CrateLoader<'a> {
|
||||||
};
|
};
|
||||||
let kind = items.iter().find(|k| {
|
let kind = items.iter().find(|k| {
|
||||||
k.check_name("kind")
|
k.check_name("kind")
|
||||||
}).and_then(|a| a.value_str());
|
}).and_then(|a| a.value_str()).map(Symbol::as_str);
|
||||||
let kind = match kind.as_ref().map(|s| &s[..]) {
|
let kind = match kind.as_ref().map(|s| &s[..]) {
|
||||||
Some("static") => cstore::NativeStatic,
|
Some("static") => cstore::NativeStatic,
|
||||||
Some("dylib") => cstore::NativeUnknown,
|
Some("dylib") => cstore::NativeUnknown,
|
||||||
|
@ -900,7 +897,7 @@ impl<'a> CrateLoader<'a> {
|
||||||
struct_span_err!(self.sess, m.span, E0459,
|
struct_span_err!(self.sess, m.span, E0459,
|
||||||
"#[link(...)] specified without `name = \"foo\"`")
|
"#[link(...)] specified without `name = \"foo\"`")
|
||||||
.span_label(m.span, &format!("missing `name` argument")).emit();
|
.span_label(m.span, &format!("missing `name` argument")).emit();
|
||||||
InternedString::new("foo")
|
Symbol::intern("foo")
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let cfg = items.iter().find(|k| {
|
let cfg = items.iter().find(|k| {
|
||||||
|
@ -910,7 +907,7 @@ impl<'a> CrateLoader<'a> {
|
||||||
list[0].meta_item().unwrap().clone()
|
list[0].meta_item().unwrap().clone()
|
||||||
});
|
});
|
||||||
let lib = NativeLibrary {
|
let lib = NativeLibrary {
|
||||||
name: n.to_string(),
|
name: n,
|
||||||
kind: kind,
|
kind: kind,
|
||||||
cfg: cfg,
|
cfg: cfg,
|
||||||
};
|
};
|
||||||
|
@ -941,7 +938,7 @@ impl<'a> middle::cstore::CrateLoader for CrateLoader<'a> {
|
||||||
|
|
||||||
for &(ref name, kind) in &self.sess.opts.libs {
|
for &(ref name, kind) in &self.sess.opts.libs {
|
||||||
let lib = NativeLibrary {
|
let lib = NativeLibrary {
|
||||||
name: name.clone(),
|
name: Symbol::intern(name),
|
||||||
kind: kind,
|
kind: kind,
|
||||||
cfg: None,
|
cfg: None,
|
||||||
};
|
};
|
||||||
|
@ -959,7 +956,7 @@ impl<'a> middle::cstore::CrateLoader for CrateLoader<'a> {
|
||||||
|
|
||||||
let info = self.extract_crate_info(item).unwrap();
|
let info = self.extract_crate_info(item).unwrap();
|
||||||
let (cnum, ..) = self.resolve_crate(
|
let (cnum, ..) = self.resolve_crate(
|
||||||
&None, &info.ident, &info.name, None, item.span, PathKind::Crate, info.dep_kind,
|
&None, info.ident, info.name, None, item.span, PathKind::Crate, info.dep_kind,
|
||||||
);
|
);
|
||||||
|
|
||||||
let def_id = definitions.opt_local_def_id(item.id).unwrap();
|
let def_id = definitions.opt_local_def_id(item.id).unwrap();
|
||||||
|
|
|
@ -29,6 +29,7 @@ use std::path::PathBuf;
|
||||||
use flate::Bytes;
|
use flate::Bytes;
|
||||||
use syntax::{ast, attr};
|
use syntax::{ast, attr};
|
||||||
use syntax::ext::base::SyntaxExtension;
|
use syntax::ext::base::SyntaxExtension;
|
||||||
|
use syntax::symbol::Symbol;
|
||||||
use syntax_pos;
|
use syntax_pos;
|
||||||
|
|
||||||
pub use rustc::middle::cstore::{NativeLibrary, LinkagePreference};
|
pub use rustc::middle::cstore::{NativeLibrary, LinkagePreference};
|
||||||
|
@ -58,7 +59,7 @@ pub struct ImportedFileMap {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct CrateMetadata {
|
pub struct CrateMetadata {
|
||||||
pub name: String,
|
pub name: Symbol,
|
||||||
|
|
||||||
/// Information about the extern crate that caused this crate to
|
/// Information about the extern crate that caused this crate to
|
||||||
/// be loaded. If this is `None`, then the crate was injected
|
/// be loaded. If this is `None`, then the crate was injected
|
||||||
|
@ -213,7 +214,7 @@ impl CStore {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add_used_library(&self, lib: NativeLibrary) {
|
pub fn add_used_library(&self, lib: NativeLibrary) {
|
||||||
assert!(!lib.name.is_empty());
|
assert!(!lib.name.as_str().is_empty());
|
||||||
self.used_libraries.borrow_mut().push(lib);
|
self.used_libraries.borrow_mut().push(lib);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -249,14 +250,14 @@ impl CStore {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CrateMetadata {
|
impl CrateMetadata {
|
||||||
pub fn name(&self) -> &str {
|
pub fn name(&self) -> Symbol {
|
||||||
&self.root.name
|
self.root.name
|
||||||
}
|
}
|
||||||
pub fn hash(&self) -> Svh {
|
pub fn hash(&self) -> Svh {
|
||||||
self.root.hash
|
self.root.hash
|
||||||
}
|
}
|
||||||
pub fn disambiguator(&self) -> &str {
|
pub fn disambiguator(&self) -> Symbol {
|
||||||
&self.root.disambiguator
|
self.root.disambiguator
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_staged_api(&self) -> bool {
|
pub fn is_staged_api(&self) -> bool {
|
||||||
|
|
|
@ -32,7 +32,7 @@ use std::path::PathBuf;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::attr;
|
use syntax::attr;
|
||||||
use syntax::parse::new_parser_from_source_str;
|
use syntax::parse::new_parser_from_source_str;
|
||||||
use syntax::symbol::{InternedString, intern_and_get_ident};
|
use syntax::symbol::Symbol;
|
||||||
use syntax_pos::mk_sp;
|
use syntax_pos::mk_sp;
|
||||||
use rustc::hir::svh::Svh;
|
use rustc::hir::svh::Svh;
|
||||||
use rustc_back::target::Target;
|
use rustc_back::target::Target;
|
||||||
|
@ -263,14 +263,14 @@ impl<'tcx> CrateStore<'tcx> for cstore::CStore {
|
||||||
self.get_crate_data(cnum).panic_strategy()
|
self.get_crate_data(cnum).panic_strategy()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn crate_name(&self, cnum: CrateNum) -> InternedString
|
fn crate_name(&self, cnum: CrateNum) -> Symbol
|
||||||
{
|
{
|
||||||
intern_and_get_ident(&self.get_crate_data(cnum).name[..])
|
self.get_crate_data(cnum).name
|
||||||
}
|
}
|
||||||
|
|
||||||
fn original_crate_name(&self, cnum: CrateNum) -> InternedString
|
fn original_crate_name(&self, cnum: CrateNum) -> Symbol
|
||||||
{
|
{
|
||||||
intern_and_get_ident(&self.get_crate_data(cnum).name())
|
self.get_crate_data(cnum).name()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extern_crate(&self, cnum: CrateNum) -> Option<ExternCrate>
|
fn extern_crate(&self, cnum: CrateNum) -> Option<ExternCrate>
|
||||||
|
@ -283,9 +283,9 @@ impl<'tcx> CrateStore<'tcx> for cstore::CStore {
|
||||||
self.get_crate_hash(cnum)
|
self.get_crate_hash(cnum)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn crate_disambiguator(&self, cnum: CrateNum) -> InternedString
|
fn crate_disambiguator(&self, cnum: CrateNum) -> Symbol
|
||||||
{
|
{
|
||||||
intern_and_get_ident(&self.get_crate_data(cnum).disambiguator())
|
self.get_crate_data(cnum).disambiguator()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn plugin_registrar_fn(&self, cnum: CrateNum) -> Option<DefId>
|
fn plugin_registrar_fn(&self, cnum: CrateNum) -> Option<DefId>
|
||||||
|
|
|
@ -1119,7 +1119,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
|
||||||
let deps = get_ordered_deps(self.cstore);
|
let deps = get_ordered_deps(self.cstore);
|
||||||
self.lazy_seq(deps.iter().map(|&(_, ref dep)| {
|
self.lazy_seq(deps.iter().map(|&(_, ref dep)| {
|
||||||
CrateDep {
|
CrateDep {
|
||||||
name: Symbol::intern(dep.name()),
|
name: dep.name(),
|
||||||
hash: dep.hash(),
|
hash: dep.hash(),
|
||||||
kind: dep.dep_kind.get(),
|
kind: dep.dep_kind.get(),
|
||||||
}
|
}
|
||||||
|
@ -1279,10 +1279,10 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
|
||||||
let is_proc_macro = tcx.sess.crate_types.borrow().contains(&CrateTypeProcMacro);
|
let is_proc_macro = tcx.sess.crate_types.borrow().contains(&CrateTypeProcMacro);
|
||||||
let root = self.lazy(&CrateRoot {
|
let root = self.lazy(&CrateRoot {
|
||||||
rustc_version: rustc_version(),
|
rustc_version: rustc_version(),
|
||||||
name: link_meta.crate_name.clone(),
|
name: link_meta.crate_name,
|
||||||
triple: tcx.sess.opts.target_triple.clone(),
|
triple: tcx.sess.opts.target_triple.clone(),
|
||||||
hash: link_meta.crate_hash,
|
hash: link_meta.crate_hash,
|
||||||
disambiguator: tcx.sess.local_crate_disambiguator().to_string(),
|
disambiguator: tcx.sess.local_crate_disambiguator(),
|
||||||
panic_strategy: tcx.sess.panic_strategy(),
|
panic_strategy: tcx.sess.panic_strategy(),
|
||||||
plugin_registrar_fn: tcx.sess
|
plugin_registrar_fn: tcx.sess
|
||||||
.plugin_registrar_fn
|
.plugin_registrar_fn
|
||||||
|
|
|
@ -227,6 +227,7 @@ use rustc_llvm as llvm;
|
||||||
use rustc_llvm::{False, ObjectFile, mk_section_iter};
|
use rustc_llvm::{False, ObjectFile, mk_section_iter};
|
||||||
use rustc_llvm::archive_ro::ArchiveRO;
|
use rustc_llvm::archive_ro::ArchiveRO;
|
||||||
use errors::DiagnosticBuilder;
|
use errors::DiagnosticBuilder;
|
||||||
|
use syntax::symbol::Symbol;
|
||||||
use syntax_pos::Span;
|
use syntax_pos::Span;
|
||||||
use rustc_back::target::Target;
|
use rustc_back::target::Target;
|
||||||
|
|
||||||
|
@ -249,8 +250,8 @@ pub struct CrateMismatch {
|
||||||
pub struct Context<'a> {
|
pub struct Context<'a> {
|
||||||
pub sess: &'a Session,
|
pub sess: &'a Session,
|
||||||
pub span: Span,
|
pub span: Span,
|
||||||
pub ident: &'a str,
|
pub ident: Symbol,
|
||||||
pub crate_name: &'a str,
|
pub crate_name: Symbol,
|
||||||
pub hash: Option<&'a Svh>,
|
pub hash: Option<&'a Svh>,
|
||||||
// points to either self.sess.target.target or self.sess.host, must match triple
|
// points to either self.sess.target.target or self.sess.host, must match triple
|
||||||
pub target: &'a Target,
|
pub target: &'a Target,
|
||||||
|
@ -422,7 +423,7 @@ impl<'a> Context<'a> {
|
||||||
// must be loaded via -L plus some filtering.
|
// must be loaded via -L plus some filtering.
|
||||||
if self.hash.is_none() {
|
if self.hash.is_none() {
|
||||||
self.should_match_name = false;
|
self.should_match_name = false;
|
||||||
if let Some(s) = self.sess.opts.externs.get(self.crate_name) {
|
if let Some(s) = self.sess.opts.externs.get(&self.crate_name.as_str()) {
|
||||||
return self.find_commandline_library(s.iter());
|
return self.find_commandline_library(s.iter());
|
||||||
}
|
}
|
||||||
self.should_match_name = true;
|
self.should_match_name = true;
|
||||||
|
@ -533,7 +534,7 @@ impl<'a> Context<'a> {
|
||||||
if let Some((ref p, _)) = lib.rlib {
|
if let Some((ref p, _)) = lib.rlib {
|
||||||
err.note(&format!("path: {}", p.display()));
|
err.note(&format!("path: {}", p.display()));
|
||||||
}
|
}
|
||||||
note_crate_name(&mut err, &lib.metadata.get_root().name);
|
note_crate_name(&mut err, &lib.metadata.get_root().name.as_str());
|
||||||
}
|
}
|
||||||
err.emit();
|
err.emit();
|
||||||
None
|
None
|
||||||
|
|
|
@ -22,6 +22,7 @@ use rustc_back::PanicStrategy;
|
||||||
|
|
||||||
use rustc_serialize as serialize;
|
use rustc_serialize as serialize;
|
||||||
use syntax::{ast, attr};
|
use syntax::{ast, attr};
|
||||||
|
use syntax::symbol::Symbol;
|
||||||
use syntax_pos::{self, Span};
|
use syntax_pos::{self, Span};
|
||||||
|
|
||||||
use std::marker::PhantomData;
|
use std::marker::PhantomData;
|
||||||
|
@ -163,10 +164,10 @@ pub enum LazyState {
|
||||||
#[derive(RustcEncodable, RustcDecodable)]
|
#[derive(RustcEncodable, RustcDecodable)]
|
||||||
pub struct CrateRoot {
|
pub struct CrateRoot {
|
||||||
pub rustc_version: String,
|
pub rustc_version: String,
|
||||||
pub name: String,
|
pub name: Symbol,
|
||||||
pub triple: String,
|
pub triple: String,
|
||||||
pub hash: hir::svh::Svh,
|
pub hash: hir::svh::Svh,
|
||||||
pub disambiguator: String,
|
pub disambiguator: Symbol,
|
||||||
pub panic_strategy: PanicStrategy,
|
pub panic_strategy: PanicStrategy,
|
||||||
pub plugin_registrar_fn: Option<DefIndex>,
|
pub plugin_registrar_fn: Option<DefIndex>,
|
||||||
pub macro_derive_registrar: Option<DefIndex>,
|
pub macro_derive_registrar: Option<DefIndex>,
|
||||||
|
|
|
@ -57,7 +57,7 @@ use syntax::ext::hygiene::{Mark, SyntaxContext};
|
||||||
use syntax::ast::{self, FloatTy};
|
use syntax::ast::{self, FloatTy};
|
||||||
use syntax::ast::{CRATE_NODE_ID, Name, NodeId, Ident, SpannedIdent, IntTy, UintTy};
|
use syntax::ast::{CRATE_NODE_ID, Name, NodeId, Ident, SpannedIdent, IntTy, UintTy};
|
||||||
use syntax::ext::base::SyntaxExtension;
|
use syntax::ext::base::SyntaxExtension;
|
||||||
use syntax::symbol::{Symbol, InternedString, keywords};
|
use syntax::symbol::{Symbol, keywords};
|
||||||
use syntax::util::lev_distance::find_best_match_for_name;
|
use syntax::util::lev_distance::find_best_match_for_name;
|
||||||
|
|
||||||
use syntax::visit::{self, FnKind, Visitor};
|
use syntax::visit::{self, FnKind, Visitor};
|
||||||
|
@ -90,7 +90,7 @@ mod resolve_imports;
|
||||||
|
|
||||||
enum SuggestionType {
|
enum SuggestionType {
|
||||||
Macro(String),
|
Macro(String),
|
||||||
Function(InternedString),
|
Function(Symbol),
|
||||||
NotFound,
|
NotFound,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2891,7 +2891,7 @@ impl<'a> Resolver<'a> {
|
||||||
.flat_map(|rib| rib.bindings.keys().map(|ident| &ident.name));
|
.flat_map(|rib| rib.bindings.keys().map(|ident| &ident.name));
|
||||||
|
|
||||||
if let Some(found) = find_best_match_for_name(names, name, None) {
|
if let Some(found) = find_best_match_for_name(names, name, None) {
|
||||||
if name != found {
|
if found != name {
|
||||||
return SuggestionType::Function(found);
|
return SuggestionType::Function(found);
|
||||||
}
|
}
|
||||||
} SuggestionType::NotFound
|
} SuggestionType::NotFound
|
||||||
|
|
|
@ -120,7 +120,7 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
result.push(CrateData {
|
result.push(CrateData {
|
||||||
name: (&self.tcx.sess.cstore.crate_name(n)[..]).to_owned(),
|
name: self.tcx.sess.cstore.crate_name(n).to_string(),
|
||||||
number: n.as_u32(),
|
number: n.as_u32(),
|
||||||
span: span,
|
span: span,
|
||||||
});
|
});
|
||||||
|
@ -734,11 +734,11 @@ fn docs_for_attrs(attrs: &[Attribute]) -> String {
|
||||||
|
|
||||||
for attr in attrs {
|
for attr in attrs {
|
||||||
if attr.name() == doc {
|
if attr.name() == doc {
|
||||||
if let Some(ref val) = attr.value_str() {
|
if let Some(val) = attr.value_str() {
|
||||||
if attr.is_sugared_doc {
|
if attr.is_sugared_doc {
|
||||||
result.push_str(&strip_doc_comment_decoration(val));
|
result.push_str(&strip_doc_comment_decoration(&val.as_str()));
|
||||||
} else {
|
} else {
|
||||||
result.push_str(val);
|
result.push_str(&val.as_str());
|
||||||
}
|
}
|
||||||
result.push('\n');
|
result.push('\n');
|
||||||
}
|
}
|
||||||
|
|
|
@ -88,7 +88,7 @@ pub fn trans_inline_asm<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
||||||
AsmDialect::Intel => llvm::AsmDialect::Intel,
|
AsmDialect::Intel => llvm::AsmDialect::Intel,
|
||||||
};
|
};
|
||||||
|
|
||||||
let asm = CString::new(ia.asm.as_bytes()).unwrap();
|
let asm = CString::new(ia.asm.as_str().as_bytes()).unwrap();
|
||||||
let constraint_cstr = CString::new(all_constraints).unwrap();
|
let constraint_cstr = CString::new(all_constraints).unwrap();
|
||||||
let r = InlineAsmCall(bcx,
|
let r = InlineAsmCall(bcx,
|
||||||
asm.as_ptr(),
|
asm.as_ptr(),
|
||||||
|
|
|
@ -29,7 +29,6 @@
|
||||||
|
|
||||||
use rustc::ty::TyCtxt;
|
use rustc::ty::TyCtxt;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::symbol::Symbol;
|
|
||||||
|
|
||||||
use {ModuleSource, ModuleTranslation};
|
use {ModuleSource, ModuleTranslation};
|
||||||
|
|
||||||
|
@ -117,7 +116,7 @@ impl<'a, 'tcx> AssertModuleSource<'a, 'tcx> {
|
||||||
for item in attr.meta_item_list().unwrap_or(&[]) {
|
for item in attr.meta_item_list().unwrap_or(&[]) {
|
||||||
if item.check_name(name) {
|
if item.check_name(name) {
|
||||||
if let Some(value) = item.value_str() {
|
if let Some(value) = item.value_str() {
|
||||||
return Symbol::intern(&value);
|
return value;
|
||||||
} else {
|
} else {
|
||||||
self.tcx.sess.span_fatal(
|
self.tcx.sess.span_fatal(
|
||||||
item.span,
|
item.span,
|
||||||
|
|
|
@ -44,6 +44,7 @@ use std::str;
|
||||||
use flate;
|
use flate;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::attr;
|
use syntax::attr;
|
||||||
|
use syntax::symbol::Symbol;
|
||||||
use syntax_pos::Span;
|
use syntax_pos::Span;
|
||||||
|
|
||||||
// RLIB LLVM-BYTECODE OBJECT LAYOUT
|
// RLIB LLVM-BYTECODE OBJECT LAYOUT
|
||||||
|
@ -93,8 +94,8 @@ pub fn find_crate_name(sess: Option<&Session>,
|
||||||
|
|
||||||
if let Some(sess) = sess {
|
if let Some(sess) = sess {
|
||||||
if let Some(ref s) = sess.opts.crate_name {
|
if let Some(ref s) = sess.opts.crate_name {
|
||||||
if let Some((attr, ref name)) = attr_crate_name {
|
if let Some((attr, name)) = attr_crate_name {
|
||||||
if *s != &name[..] {
|
if name != &**s {
|
||||||
let msg = format!("--crate-name and #[crate_name] are \
|
let msg = format!("--crate-name and #[crate_name] are \
|
||||||
required to match, but `{}` != `{}`",
|
required to match, but `{}` != `{}`",
|
||||||
s, name);
|
s, name);
|
||||||
|
@ -130,7 +131,7 @@ pub fn build_link_meta(incremental_hashes_map: &IncrementalHashesMap,
|
||||||
name: &str)
|
name: &str)
|
||||||
-> LinkMeta {
|
-> LinkMeta {
|
||||||
let r = LinkMeta {
|
let r = LinkMeta {
|
||||||
crate_name: name.to_owned(),
|
crate_name: Symbol::intern(name),
|
||||||
crate_hash: Svh::new(incremental_hashes_map[&DepNode::Krate].to_smaller_hash()),
|
crate_hash: Svh::new(incremental_hashes_map[&DepNode::Krate].to_smaller_hash()),
|
||||||
};
|
};
|
||||||
info!("{:?}", r);
|
info!("{:?}", r);
|
||||||
|
@ -429,7 +430,7 @@ fn link_rlib<'a>(sess: &'a Session,
|
||||||
NativeLibraryKind::NativeFramework |
|
NativeLibraryKind::NativeFramework |
|
||||||
NativeLibraryKind::NativeUnknown => continue,
|
NativeLibraryKind::NativeUnknown => continue,
|
||||||
}
|
}
|
||||||
ab.add_native_library(&lib.name);
|
ab.add_native_library(&lib.name.as_str());
|
||||||
}
|
}
|
||||||
|
|
||||||
// After adding all files to the archive, we need to update the
|
// After adding all files to the archive, we need to update the
|
||||||
|
@ -615,7 +616,7 @@ fn link_staticlib(sess: &Session, objects: &[PathBuf], out_filename: &Path,
|
||||||
let skip_object_files = native_libs.iter().any(|lib| {
|
let skip_object_files = native_libs.iter().any(|lib| {
|
||||||
lib.kind == NativeLibraryKind::NativeStatic && !relevant_lib(sess, lib)
|
lib.kind == NativeLibraryKind::NativeStatic && !relevant_lib(sess, lib)
|
||||||
});
|
});
|
||||||
ab.add_rlib(path, &name, sess.lto(), skip_object_files).unwrap();
|
ab.add_rlib(path, &name.as_str(), sess.lto(), skip_object_files).unwrap();
|
||||||
|
|
||||||
all_native_libs.extend(sess.cstore.native_libraries(cnum));
|
all_native_libs.extend(sess.cstore.native_libraries(cnum));
|
||||||
});
|
});
|
||||||
|
@ -934,15 +935,15 @@ fn add_local_native_libraries(cmd: &mut Linker, sess: &Session) {
|
||||||
// don't otherwise explicitly reference them. This can occur for
|
// don't otherwise explicitly reference them. This can occur for
|
||||||
// libraries which are just providing bindings, libraries with generic
|
// libraries which are just providing bindings, libraries with generic
|
||||||
// functions, etc.
|
// functions, etc.
|
||||||
cmd.link_whole_staticlib(&l.name, &search_path);
|
cmd.link_whole_staticlib(&l.name.as_str(), &search_path);
|
||||||
}
|
}
|
||||||
|
|
||||||
cmd.hint_dynamic();
|
cmd.hint_dynamic();
|
||||||
|
|
||||||
for lib in others {
|
for lib in others {
|
||||||
match lib.kind {
|
match lib.kind {
|
||||||
NativeLibraryKind::NativeUnknown => cmd.link_dylib(&lib.name),
|
NativeLibraryKind::NativeUnknown => cmd.link_dylib(&lib.name.as_str()),
|
||||||
NativeLibraryKind::NativeFramework => cmd.link_framework(&lib.name),
|
NativeLibraryKind::NativeFramework => cmd.link_framework(&lib.name.as_str()),
|
||||||
NativeLibraryKind::NativeStatic => bug!(),
|
NativeLibraryKind::NativeStatic => bug!(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1185,8 +1186,8 @@ fn add_upstream_native_libraries(cmd: &mut Linker, sess: &Session) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
match lib.kind {
|
match lib.kind {
|
||||||
NativeLibraryKind::NativeUnknown => cmd.link_dylib(&lib.name),
|
NativeLibraryKind::NativeUnknown => cmd.link_dylib(&lib.name.as_str()),
|
||||||
NativeLibraryKind::NativeFramework => cmd.link_framework(&lib.name),
|
NativeLibraryKind::NativeFramework => cmd.link_framework(&lib.name.as_str()),
|
||||||
|
|
||||||
// ignore statically included native libraries here as we've
|
// ignore statically included native libraries here as we've
|
||||||
// already included them when we included the rust library
|
// already included them when we included the rust library
|
||||||
|
|
|
@ -113,7 +113,7 @@ use rustc::hir::map::definitions::{DefPath, DefPathData};
|
||||||
use rustc::util::common::record_time;
|
use rustc::util::common::record_time;
|
||||||
|
|
||||||
use syntax::attr;
|
use syntax::attr;
|
||||||
use syntax::symbol::{Symbol, InternedString, intern_and_get_ident};
|
use syntax::symbol::{Symbol, InternedString};
|
||||||
|
|
||||||
fn get_symbol_hash<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
|
fn get_symbol_hash<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||||
|
|
||||||
|
@ -288,7 +288,7 @@ pub fn exported_name_from_type_and_prefix<'a, 'tcx>(scx: &SharedCrateContext<'a,
|
||||||
krate: LOCAL_CRATE,
|
krate: LOCAL_CRATE,
|
||||||
};
|
};
|
||||||
let hash = get_symbol_hash(scx, &empty_def_path, t, None);
|
let hash = get_symbol_hash(scx, &empty_def_path, t, None);
|
||||||
let path = [intern_and_get_ident(prefix)];
|
let path = [Symbol::intern(prefix).as_str()];
|
||||||
mangle(path.iter().cloned(), &hash)
|
mangle(path.iter().cloned(), &hash)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1128,11 +1128,11 @@ pub fn set_link_section(ccx: &CrateContext,
|
||||||
llval: ValueRef,
|
llval: ValueRef,
|
||||||
attrs: &[ast::Attribute]) {
|
attrs: &[ast::Attribute]) {
|
||||||
if let Some(sect) = attr::first_attr_value_str_by_name(attrs, "link_section") {
|
if let Some(sect) = attr::first_attr_value_str_by_name(attrs, "link_section") {
|
||||||
if contains_null(§) {
|
if contains_null(§.as_str()) {
|
||||||
ccx.sess().fatal(&format!("Illegal null byte in link_section value: `{}`", §));
|
ccx.sess().fatal(&format!("Illegal null byte in link_section value: `{}`", §));
|
||||||
}
|
}
|
||||||
unsafe {
|
unsafe {
|
||||||
let buf = CString::new(sect.as_bytes()).unwrap();
|
let buf = CString::new(sect.as_str().as_bytes()).unwrap();
|
||||||
llvm::LLVMSetSection(llval, buf.as_ptr());
|
llvm::LLVMSetSection(llval, buf.as_ptr());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -123,7 +123,7 @@ pub fn get_static(ccx: &CrateContext, def_id: DefId) -> ValueRef {
|
||||||
// extern "C" fn() from being non-null, so we can't just declare a
|
// extern "C" fn() from being non-null, so we can't just declare a
|
||||||
// static and call it a day. Some linkages (like weak) will make it such
|
// static and call it a day. Some linkages (like weak) will make it such
|
||||||
// that the static actually has a null value.
|
// that the static actually has a null value.
|
||||||
let linkage = match base::llvm_linkage_by_name(&name) {
|
let linkage = match base::llvm_linkage_by_name(&name.as_str()) {
|
||||||
Some(linkage) => linkage,
|
Some(linkage) => linkage,
|
||||||
None => {
|
None => {
|
||||||
ccx.sess().span_fatal(span, "invalid linkage specified");
|
ccx.sess().span_fatal(span, "invalid linkage specified");
|
||||||
|
|
|
@ -808,7 +808,7 @@ pub fn compile_unit_metadata(scc: &SharedCrateContext,
|
||||||
};
|
};
|
||||||
|
|
||||||
fn fallback_path(scc: &SharedCrateContext) -> CString {
|
fn fallback_path(scc: &SharedCrateContext) -> CString {
|
||||||
CString::new(scc.link_meta().crate_name.clone()).unwrap()
|
CString::new(scc.link_meta().crate_name.to_string()).unwrap()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -35,7 +35,7 @@ pub fn mangled_name_of_item(ccx: &CrateContext, def_id: DefId, extra: &str) -> S
|
||||||
}
|
}
|
||||||
|
|
||||||
let name = match def_key.disambiguated_data.data {
|
let name = match def_key.disambiguated_data.data {
|
||||||
DefPathData::CrateRoot => ccx.tcx().crate_name(def_id.krate),
|
DefPathData::CrateRoot => ccx.tcx().crate_name(def_id.krate).as_str(),
|
||||||
data => data.as_interned_str()
|
data => data.as_interned_str()
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -64,7 +64,7 @@ pub fn item_namespace(ccx: &CrateContext, def_id: DefId) -> DIScope {
|
||||||
});
|
});
|
||||||
|
|
||||||
let namespace_name = match def_key.disambiguated_data.data {
|
let namespace_name = match def_key.disambiguated_data.data {
|
||||||
DefPathData::CrateRoot => ccx.tcx().crate_name(def_id.krate),
|
DefPathData::CrateRoot => ccx.tcx().crate_name(def_id.krate).as_str(),
|
||||||
data => data.as_interned_str()
|
data => data.as_interned_str()
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -156,7 +156,7 @@ pub fn push_debuginfo_type_name<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||||
qualified: bool,
|
qualified: bool,
|
||||||
output: &mut String) {
|
output: &mut String) {
|
||||||
if qualified {
|
if qualified {
|
||||||
output.push_str(&cx.tcx().crate_name(def_id.krate));
|
output.push_str(&cx.tcx().crate_name(def_id.krate).as_str());
|
||||||
for path_element in cx.tcx().def_path(def_id).data {
|
for path_element in cx.tcx().def_path(def_id).data {
|
||||||
output.push_str("::");
|
output.push_str("::");
|
||||||
output.push_str(&path_element.data.as_interned_str());
|
output.push_str(&path_element.data.as_interned_str());
|
||||||
|
|
|
@ -30,7 +30,7 @@ use rustc::ty::{self, Ty};
|
||||||
use Disr;
|
use Disr;
|
||||||
use rustc::hir;
|
use rustc::hir;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::symbol::intern_and_get_ident;
|
use syntax::symbol::Symbol;
|
||||||
|
|
||||||
use rustc::session::Session;
|
use rustc::session::Session;
|
||||||
use syntax_pos::{Span, DUMMY_SP};
|
use syntax_pos::{Span, DUMMY_SP};
|
||||||
|
@ -208,7 +208,7 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
|
||||||
}
|
}
|
||||||
(_, "type_name") => {
|
(_, "type_name") => {
|
||||||
let tp_ty = substs.type_at(0);
|
let tp_ty = substs.type_at(0);
|
||||||
let ty_name = intern_and_get_ident(&tp_ty.to_string());
|
let ty_name = Symbol::intern(&tp_ty.to_string()).as_str();
|
||||||
C_str_slice(ccx, ty_name)
|
C_str_slice(ccx, ty_name)
|
||||||
}
|
}
|
||||||
(_, "type_id") => {
|
(_, "type_id") => {
|
||||||
|
|
|
@ -30,7 +30,7 @@ use glue;
|
||||||
use type_::Type;
|
use type_::Type;
|
||||||
|
|
||||||
use rustc_data_structures::fx::FxHashMap;
|
use rustc_data_structures::fx::FxHashMap;
|
||||||
use syntax::symbol::intern_and_get_ident;
|
use syntax::symbol::Symbol;
|
||||||
|
|
||||||
use super::{MirContext, LocalRef};
|
use super::{MirContext, LocalRef};
|
||||||
use super::analyze::CleanupKind;
|
use super::analyze::CleanupKind;
|
||||||
|
@ -321,7 +321,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
|
||||||
|
|
||||||
// Get the location information.
|
// Get the location information.
|
||||||
let loc = bcx.sess().codemap().lookup_char_pos(span.lo);
|
let loc = bcx.sess().codemap().lookup_char_pos(span.lo);
|
||||||
let filename = intern_and_get_ident(&loc.file.name);
|
let filename = Symbol::intern(&loc.file.name).as_str();
|
||||||
let filename = C_str_slice(bcx.ccx(), filename);
|
let filename = C_str_slice(bcx.ccx(), filename);
|
||||||
let line = C_u32(bcx.ccx(), loc.line as u32);
|
let line = C_u32(bcx.ccx(), loc.line as u32);
|
||||||
|
|
||||||
|
@ -351,7 +351,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
|
||||||
const_err)
|
const_err)
|
||||||
}
|
}
|
||||||
mir::AssertMessage::Math(ref err) => {
|
mir::AssertMessage::Math(ref err) => {
|
||||||
let msg_str = intern_and_get_ident(err.description());
|
let msg_str = Symbol::intern(err.description()).as_str();
|
||||||
let msg_str = C_str_slice(bcx.ccx(), msg_str);
|
let msg_str = C_str_slice(bcx.ccx(), msg_str);
|
||||||
let msg_file_line = C_struct(bcx.ccx(),
|
let msg_file_line = C_struct(bcx.ccx(),
|
||||||
&[msg_str, filename, line],
|
&[msg_str, filename, line],
|
||||||
|
|
|
@ -132,7 +132,7 @@ use std::sync::Arc;
|
||||||
use std::collections::hash_map::DefaultHasher;
|
use std::collections::hash_map::DefaultHasher;
|
||||||
use symbol_map::SymbolMap;
|
use symbol_map::SymbolMap;
|
||||||
use syntax::ast::NodeId;
|
use syntax::ast::NodeId;
|
||||||
use syntax::symbol::{InternedString, intern_and_get_ident};
|
use syntax::symbol::{Symbol, InternedString};
|
||||||
use trans_item::TransItem;
|
use trans_item::TransItem;
|
||||||
use util::nodemap::{FxHashMap, FxHashSet};
|
use util::nodemap::{FxHashMap, FxHashSet};
|
||||||
|
|
||||||
|
@ -272,7 +272,7 @@ pub fn partition<'a, 'tcx, I>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||||
// If the partitioning should produce a fixed count of codegen units, merge
|
// If the partitioning should produce a fixed count of codegen units, merge
|
||||||
// until that count is reached.
|
// until that count is reached.
|
||||||
if let PartitioningStrategy::FixedUnitCount(count) = strategy {
|
if let PartitioningStrategy::FixedUnitCount(count) = strategy {
|
||||||
merge_codegen_units(&mut initial_partitioning, count, &tcx.crate_name[..]);
|
merge_codegen_units(&mut initial_partitioning, count, &tcx.crate_name.as_str());
|
||||||
|
|
||||||
debug_dump(scx, "POST MERGING:", initial_partitioning.codegen_units.iter());
|
debug_dump(scx, "POST MERGING:", initial_partitioning.codegen_units.iter());
|
||||||
}
|
}
|
||||||
|
@ -523,7 +523,7 @@ fn compute_codegen_unit_name<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
let mut mod_path = String::with_capacity(64);
|
let mut mod_path = String::with_capacity(64);
|
||||||
|
|
||||||
let def_path = tcx.def_path(def_id);
|
let def_path = tcx.def_path(def_id);
|
||||||
mod_path.push_str(&tcx.crate_name(def_path.krate));
|
mod_path.push_str(&tcx.crate_name(def_path.krate).as_str());
|
||||||
|
|
||||||
for part in tcx.def_path(def_id)
|
for part in tcx.def_path(def_id)
|
||||||
.data
|
.data
|
||||||
|
@ -542,14 +542,11 @@ fn compute_codegen_unit_name<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||||
mod_path.push_str(".volatile");
|
mod_path.push_str(".volatile");
|
||||||
}
|
}
|
||||||
|
|
||||||
return intern_and_get_ident(&mod_path[..]);
|
return Symbol::intern(&mod_path[..]).as_str();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn numbered_codegen_unit_name(crate_name: &str, index: usize) -> InternedString {
|
fn numbered_codegen_unit_name(crate_name: &str, index: usize) -> InternedString {
|
||||||
intern_and_get_ident(&format!("{}{}{}",
|
Symbol::intern(&format!("{}{}{}", crate_name, NUMBERED_CODEGEN_UNIT_MARKER, index)).as_str()
|
||||||
crate_name,
|
|
||||||
NUMBERED_CODEGEN_UNIT_MARKER,
|
|
||||||
index)[..])
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn debug_dump<'a, 'b, 'tcx, I>(scx: &SharedCrateContext<'a, 'tcx>,
|
fn debug_dump<'a, 'b, 'tcx, I>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||||
|
|
|
@ -285,7 +285,7 @@ impl<'a, 'tcx> TransItem<'tcx> {
|
||||||
|
|
||||||
let attributes = tcx.get_attrs(def_id);
|
let attributes = tcx.get_attrs(def_id);
|
||||||
if let Some(name) = attr::first_attr_value_str_by_name(&attributes, "linkage") {
|
if let Some(name) = attr::first_attr_value_str_by_name(&attributes, "linkage") {
|
||||||
if let Some(linkage) = base::llvm_linkage_by_name(&name) {
|
if let Some(linkage) = base::llvm_linkage_by_name(&name.as_str()) {
|
||||||
Some(linkage)
|
Some(linkage)
|
||||||
} else {
|
} else {
|
||||||
let span = tcx.map.span_if_local(def_id);
|
let span = tcx.map.span_if_local(def_id);
|
||||||
|
@ -531,7 +531,7 @@ impl<'a, 'tcx> DefPathBasedNames<'a, 'tcx> {
|
||||||
|
|
||||||
// some_crate::
|
// some_crate::
|
||||||
if !(self.omit_local_crate_name && def_id.is_local()) {
|
if !(self.omit_local_crate_name && def_id.is_local()) {
|
||||||
output.push_str(&self.tcx.crate_name(def_path.krate));
|
output.push_str(&self.tcx.crate_name(def_path.krate).as_str());
|
||||||
output.push_str("::");
|
output.push_str("::");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -931,7 +931,8 @@ fn check_on_unimplemented<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
|
||||||
if let Some(ref attr) = item.attrs.iter().find(|a| {
|
if let Some(ref attr) = item.attrs.iter().find(|a| {
|
||||||
a.check_name("rustc_on_unimplemented")
|
a.check_name("rustc_on_unimplemented")
|
||||||
}) {
|
}) {
|
||||||
if let Some(ref istring) = attr.value_str() {
|
if let Some(istring) = attr.value_str() {
|
||||||
|
let istring = istring.as_str();
|
||||||
let parser = Parser::new(&istring);
|
let parser = Parser::new(&istring);
|
||||||
let types = &generics.types;
|
let types = &generics.types;
|
||||||
for token in parser {
|
for token in parser {
|
||||||
|
@ -3027,7 +3028,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
|
||||||
fn suggest_field_name(variant: ty::VariantDef<'tcx>,
|
fn suggest_field_name(variant: ty::VariantDef<'tcx>,
|
||||||
field: &Spanned<ast::Name>,
|
field: &Spanned<ast::Name>,
|
||||||
skip : Vec<InternedString>)
|
skip : Vec<InternedString>)
|
||||||
-> Option<InternedString> {
|
-> Option<Symbol> {
|
||||||
let name = field.node.as_str();
|
let name = field.node.as_str();
|
||||||
let names = variant.fields.iter().filter_map(|field| {
|
let names = variant.fields.iter().filter_map(|field| {
|
||||||
// ignore already set fields and private fields from non-local crates
|
// ignore already set fields and private fields from non-local crates
|
||||||
|
|
|
@ -23,7 +23,7 @@ use abi::Abi;
|
||||||
use ext::hygiene::SyntaxContext;
|
use ext::hygiene::SyntaxContext;
|
||||||
use print::pprust;
|
use print::pprust;
|
||||||
use ptr::P;
|
use ptr::P;
|
||||||
use symbol::{Symbol, keywords, InternedString};
|
use symbol::{Symbol, keywords};
|
||||||
use tokenstream::{TokenTree};
|
use tokenstream::{TokenTree};
|
||||||
|
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
|
@ -451,7 +451,7 @@ pub struct WhereEqPredicate {
|
||||||
|
|
||||||
/// The set of MetaItems that define the compilation environment of the crate,
|
/// The set of MetaItems that define the compilation environment of the crate,
|
||||||
/// used to drive conditional compilation
|
/// used to drive conditional compilation
|
||||||
pub type CrateConfig = HashSet<(Name, Option<InternedString>)>;
|
pub type CrateConfig = HashSet<(Name, Option<Symbol>)>;
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
|
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
|
||||||
pub struct Crate {
|
pub struct Crate {
|
||||||
|
@ -1098,7 +1098,7 @@ pub enum LitIntType {
|
||||||
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
|
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
|
||||||
pub enum LitKind {
|
pub enum LitKind {
|
||||||
/// A string literal (`"foo"`)
|
/// A string literal (`"foo"`)
|
||||||
Str(InternedString, StrStyle),
|
Str(Symbol, StrStyle),
|
||||||
/// A byte string (`b"foo"`)
|
/// A byte string (`b"foo"`)
|
||||||
ByteStr(Rc<Vec<u8>>),
|
ByteStr(Rc<Vec<u8>>),
|
||||||
/// A byte char (`b'f'`)
|
/// A byte char (`b'f'`)
|
||||||
|
@ -1108,9 +1108,9 @@ pub enum LitKind {
|
||||||
/// An integer literal (`1`)
|
/// An integer literal (`1`)
|
||||||
Int(u64, LitIntType),
|
Int(u64, LitIntType),
|
||||||
/// A float literal (`1f64` or `1E10f64`)
|
/// A float literal (`1f64` or `1E10f64`)
|
||||||
Float(InternedString, FloatTy),
|
Float(Symbol, FloatTy),
|
||||||
/// A float literal without a suffix (`1.0 or 1.0E10`)
|
/// A float literal without a suffix (`1.0 or 1.0E10`)
|
||||||
FloatUnsuffixed(InternedString),
|
FloatUnsuffixed(Symbol),
|
||||||
/// A boolean literal
|
/// A boolean literal
|
||||||
Bool(bool),
|
Bool(bool),
|
||||||
}
|
}
|
||||||
|
@ -1442,7 +1442,7 @@ pub enum AsmDialect {
|
||||||
/// E.g. `"={eax}"(result)` as in `asm!("mov eax, 2" : "={eax}"(result) : : : "intel")``
|
/// E.g. `"={eax}"(result)` as in `asm!("mov eax, 2" : "={eax}"(result) : : : "intel")``
|
||||||
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
|
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
|
||||||
pub struct InlineAsmOutput {
|
pub struct InlineAsmOutput {
|
||||||
pub constraint: InternedString,
|
pub constraint: Symbol,
|
||||||
pub expr: P<Expr>,
|
pub expr: P<Expr>,
|
||||||
pub is_rw: bool,
|
pub is_rw: bool,
|
||||||
pub is_indirect: bool,
|
pub is_indirect: bool,
|
||||||
|
@ -1453,11 +1453,11 @@ pub struct InlineAsmOutput {
|
||||||
/// E.g. `asm!("NOP");`
|
/// E.g. `asm!("NOP");`
|
||||||
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
|
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
|
||||||
pub struct InlineAsm {
|
pub struct InlineAsm {
|
||||||
pub asm: InternedString,
|
pub asm: Symbol,
|
||||||
pub asm_str_style: StrStyle,
|
pub asm_str_style: StrStyle,
|
||||||
pub outputs: Vec<InlineAsmOutput>,
|
pub outputs: Vec<InlineAsmOutput>,
|
||||||
pub inputs: Vec<(InternedString, P<Expr>)>,
|
pub inputs: Vec<(Symbol, P<Expr>)>,
|
||||||
pub clobbers: Vec<InternedString>,
|
pub clobbers: Vec<Symbol>,
|
||||||
pub volatile: bool,
|
pub volatile: bool,
|
||||||
pub alignstack: bool,
|
pub alignstack: bool,
|
||||||
pub dialect: AsmDialect,
|
pub dialect: AsmDialect,
|
||||||
|
|
|
@ -25,7 +25,7 @@ use feature_gate::{Features, GatedCfg};
|
||||||
use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
|
use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
|
||||||
use parse::ParseSess;
|
use parse::ParseSess;
|
||||||
use ptr::P;
|
use ptr::P;
|
||||||
use symbol::{self, Symbol, InternedString};
|
use symbol::Symbol;
|
||||||
use util::ThinVec;
|
use util::ThinVec;
|
||||||
|
|
||||||
use std::cell::{RefCell, Cell};
|
use std::cell::{RefCell, Cell};
|
||||||
|
@ -140,7 +140,7 @@ impl NestedMetaItem {
|
||||||
|
|
||||||
/// Gets the string value if self is a MetaItem and the MetaItem is a
|
/// Gets the string value if self is a MetaItem and the MetaItem is a
|
||||||
/// MetaItemKind::NameValue variant containing a string, otherwise None.
|
/// MetaItemKind::NameValue variant containing a string, otherwise None.
|
||||||
pub fn value_str(&self) -> Option<InternedString> {
|
pub fn value_str(&self) -> Option<Symbol> {
|
||||||
self.meta_item().and_then(|meta_item| meta_item.value_str())
|
self.meta_item().and_then(|meta_item| meta_item.value_str())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -195,7 +195,7 @@ impl Attribute {
|
||||||
|
|
||||||
pub fn name(&self) -> Name { self.meta().name() }
|
pub fn name(&self) -> Name { self.meta().name() }
|
||||||
|
|
||||||
pub fn value_str(&self) -> Option<InternedString> {
|
pub fn value_str(&self) -> Option<Symbol> {
|
||||||
self.meta().value_str()
|
self.meta().value_str()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -222,7 +222,7 @@ impl MetaItem {
|
||||||
self.name
|
self.name
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn value_str(&self) -> Option<InternedString> {
|
pub fn value_str(&self) -> Option<Symbol> {
|
||||||
match self.node {
|
match self.node {
|
||||||
MetaItemKind::NameValue(ref v) => {
|
MetaItemKind::NameValue(ref v) => {
|
||||||
match v.node {
|
match v.node {
|
||||||
|
@ -279,8 +279,7 @@ impl Attribute {
|
||||||
let comment = self.value_str().unwrap();
|
let comment = self.value_str().unwrap();
|
||||||
let meta = mk_name_value_item_str(
|
let meta = mk_name_value_item_str(
|
||||||
Symbol::intern("doc"),
|
Symbol::intern("doc"),
|
||||||
symbol::intern_and_get_ident(&strip_doc_comment_decoration(
|
Symbol::intern(&strip_doc_comment_decoration(&comment.as_str())));
|
||||||
&comment)));
|
|
||||||
if self.style == ast::AttrStyle::Outer {
|
if self.style == ast::AttrStyle::Outer {
|
||||||
f(&mk_attr_outer(self.id, meta))
|
f(&mk_attr_outer(self.id, meta))
|
||||||
} else {
|
} else {
|
||||||
|
@ -294,7 +293,7 @@ impl Attribute {
|
||||||
|
|
||||||
/* Constructors */
|
/* Constructors */
|
||||||
|
|
||||||
pub fn mk_name_value_item_str(name: Name, value: InternedString) -> MetaItem {
|
pub fn mk_name_value_item_str(name: Name, value: Symbol) -> MetaItem {
|
||||||
let value_lit = dummy_spanned(ast::LitKind::Str(value, ast::StrStyle::Cooked));
|
let value_lit = dummy_spanned(ast::LitKind::Str(value, ast::StrStyle::Cooked));
|
||||||
mk_spanned_name_value_item(DUMMY_SP, name, value_lit)
|
mk_spanned_name_value_item(DUMMY_SP, name, value_lit)
|
||||||
}
|
}
|
||||||
|
@ -383,9 +382,9 @@ pub fn mk_doc_attr_outer(id: AttrId, item: MetaItem, is_sugared_doc: bool) -> At
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn mk_sugared_doc_attr(id: AttrId, text: InternedString, lo: BytePos, hi: BytePos)
|
pub fn mk_sugared_doc_attr(id: AttrId, text: Symbol, lo: BytePos, hi: BytePos)
|
||||||
-> Attribute {
|
-> Attribute {
|
||||||
let style = doc_comment_style(&text);
|
let style = doc_comment_style(&text.as_str());
|
||||||
let lit = spanned(lo, hi, ast::LitKind::Str(text, ast::StrStyle::Cooked));
|
let lit = spanned(lo, hi, ast::LitKind::Str(text, ast::StrStyle::Cooked));
|
||||||
Attribute {
|
Attribute {
|
||||||
id: id,
|
id: id,
|
||||||
|
@ -416,14 +415,13 @@ pub fn contains_name(attrs: &[Attribute], name: &str) -> bool {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn first_attr_value_str_by_name(attrs: &[Attribute], name: &str)
|
pub fn first_attr_value_str_by_name(attrs: &[Attribute], name: &str) -> Option<Symbol> {
|
||||||
-> Option<InternedString> {
|
|
||||||
attrs.iter()
|
attrs.iter()
|
||||||
.find(|at| at.check_name(name))
|
.find(|at| at.check_name(name))
|
||||||
.and_then(|at| at.value_str())
|
.and_then(|at| at.value_str())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn last_meta_item_value_str_by_name(items: &[MetaItem], name: &str) -> Option<InternedString> {
|
pub fn last_meta_item_value_str_by_name(items: &[MetaItem], name: &str) -> Option<Symbol> {
|
||||||
items.iter()
|
items.iter()
|
||||||
.rev()
|
.rev()
|
||||||
.find(|mi| mi.check_name(name))
|
.find(|mi| mi.check_name(name))
|
||||||
|
@ -432,12 +430,12 @@ pub fn last_meta_item_value_str_by_name(items: &[MetaItem], name: &str) -> Optio
|
||||||
|
|
||||||
/* Higher-level applications */
|
/* Higher-level applications */
|
||||||
|
|
||||||
pub fn find_crate_name(attrs: &[Attribute]) -> Option<InternedString> {
|
pub fn find_crate_name(attrs: &[Attribute]) -> Option<Symbol> {
|
||||||
first_attr_value_str_by_name(attrs, "crate_name")
|
first_attr_value_str_by_name(attrs, "crate_name")
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Find the value of #[export_name=*] attribute and check its validity.
|
/// Find the value of #[export_name=*] attribute and check its validity.
|
||||||
pub fn find_export_name_attr(diag: &Handler, attrs: &[Attribute]) -> Option<InternedString> {
|
pub fn find_export_name_attr(diag: &Handler, attrs: &[Attribute]) -> Option<Symbol> {
|
||||||
attrs.iter().fold(None, |ia,attr| {
|
attrs.iter().fold(None, |ia,attr| {
|
||||||
if attr.check_name("export_name") {
|
if attr.check_name("export_name") {
|
||||||
if let s@Some(_) = attr.value_str() {
|
if let s@Some(_) = attr.value_str() {
|
||||||
|
@ -555,7 +553,7 @@ pub fn cfg_matches(cfg: &ast::MetaItem, sess: &ParseSess, features: Option<&Feat
|
||||||
#[derive(RustcEncodable, RustcDecodable, Clone, Debug, PartialEq, Eq, Hash)]
|
#[derive(RustcEncodable, RustcDecodable, Clone, Debug, PartialEq, Eq, Hash)]
|
||||||
pub struct Stability {
|
pub struct Stability {
|
||||||
pub level: StabilityLevel,
|
pub level: StabilityLevel,
|
||||||
pub feature: InternedString,
|
pub feature: Symbol,
|
||||||
pub rustc_depr: Option<RustcDeprecation>,
|
pub rustc_depr: Option<RustcDeprecation>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -563,20 +561,20 @@ pub struct Stability {
|
||||||
#[derive(RustcEncodable, RustcDecodable, PartialEq, PartialOrd, Clone, Debug, Eq, Hash)]
|
#[derive(RustcEncodable, RustcDecodable, PartialEq, PartialOrd, Clone, Debug, Eq, Hash)]
|
||||||
pub enum StabilityLevel {
|
pub enum StabilityLevel {
|
||||||
// Reason for the current stability level and the relevant rust-lang issue
|
// Reason for the current stability level and the relevant rust-lang issue
|
||||||
Unstable { reason: Option<InternedString>, issue: u32 },
|
Unstable { reason: Option<Symbol>, issue: u32 },
|
||||||
Stable { since: InternedString },
|
Stable { since: Symbol },
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(RustcEncodable, RustcDecodable, PartialEq, PartialOrd, Clone, Debug, Eq, Hash)]
|
#[derive(RustcEncodable, RustcDecodable, PartialEq, PartialOrd, Clone, Debug, Eq, Hash)]
|
||||||
pub struct RustcDeprecation {
|
pub struct RustcDeprecation {
|
||||||
pub since: InternedString,
|
pub since: Symbol,
|
||||||
pub reason: InternedString,
|
pub reason: Symbol,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(RustcEncodable, RustcDecodable, PartialEq, PartialOrd, Clone, Debug, Eq, Hash)]
|
#[derive(RustcEncodable, RustcDecodable, PartialEq, PartialOrd, Clone, Debug, Eq, Hash)]
|
||||||
pub struct Deprecation {
|
pub struct Deprecation {
|
||||||
pub since: Option<InternedString>,
|
pub since: Option<Symbol>,
|
||||||
pub note: Option<InternedString>,
|
pub note: Option<Symbol>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl StabilityLevel {
|
impl StabilityLevel {
|
||||||
|
@ -602,7 +600,7 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler,
|
||||||
mark_used(attr);
|
mark_used(attr);
|
||||||
|
|
||||||
if let Some(metas) = attr.meta_item_list() {
|
if let Some(metas) = attr.meta_item_list() {
|
||||||
let get = |meta: &MetaItem, item: &mut Option<InternedString>| {
|
let get = |meta: &MetaItem, item: &mut Option<Symbol>| {
|
||||||
if item.is_some() {
|
if item.is_some() {
|
||||||
handle_errors(diagnostic, meta.span, AttrError::MultipleItem(meta.name()));
|
handle_errors(diagnostic, meta.span, AttrError::MultipleItem(meta.name()));
|
||||||
return false
|
return false
|
||||||
|
@ -693,7 +691,7 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler,
|
||||||
level: Unstable {
|
level: Unstable {
|
||||||
reason: reason,
|
reason: reason,
|
||||||
issue: {
|
issue: {
|
||||||
if let Ok(issue) = issue.parse() {
|
if let Ok(issue) = issue.as_str().parse() {
|
||||||
issue
|
issue
|
||||||
} else {
|
} else {
|
||||||
span_err!(diagnostic, attr.span(), E0545,
|
span_err!(diagnostic, attr.span(), E0545,
|
||||||
|
@ -804,7 +802,7 @@ fn find_deprecation_generic<'a, I>(diagnostic: &Handler,
|
||||||
}
|
}
|
||||||
|
|
||||||
depr = if let Some(metas) = attr.meta_item_list() {
|
depr = if let Some(metas) = attr.meta_item_list() {
|
||||||
let get = |meta: &MetaItem, item: &mut Option<InternedString>| {
|
let get = |meta: &MetaItem, item: &mut Option<Symbol>| {
|
||||||
if item.is_some() {
|
if item.is_some() {
|
||||||
handle_errors(diagnostic, meta.span, AttrError::MultipleItem(meta.name()));
|
handle_errors(diagnostic, meta.span, AttrError::MultipleItem(meta.name()));
|
||||||
return false
|
return false
|
||||||
|
|
|
@ -195,11 +195,11 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt,
|
||||||
let (count, expr) =
|
let (count, expr) =
|
||||||
with_registered_diagnostics(|diagnostics| {
|
with_registered_diagnostics(|diagnostics| {
|
||||||
let descriptions: Vec<P<ast::Expr>> =
|
let descriptions: Vec<P<ast::Expr>> =
|
||||||
diagnostics.iter().filter_map(|(code, info)| {
|
diagnostics.iter().filter_map(|(&code, info)| {
|
||||||
info.description.map(|description| {
|
info.description.map(|description| {
|
||||||
ecx.expr_tuple(span, vec![
|
ecx.expr_tuple(span, vec![
|
||||||
ecx.expr_str(span, code.as_str()),
|
ecx.expr_str(span, code),
|
||||||
ecx.expr_str(span, description.as_str())
|
ecx.expr_str(span, description)
|
||||||
])
|
])
|
||||||
})
|
})
|
||||||
}).collect();
|
}).collect();
|
||||||
|
|
|
@ -21,7 +21,7 @@ use fold::{self, Folder};
|
||||||
use parse::{self, parser};
|
use parse::{self, parser};
|
||||||
use parse::token;
|
use parse::token;
|
||||||
use ptr::P;
|
use ptr::P;
|
||||||
use symbol::{Symbol, InternedString};
|
use symbol::Symbol;
|
||||||
use util::small_vector::SmallVector;
|
use util::small_vector::SmallVector;
|
||||||
|
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
@ -754,7 +754,7 @@ impl<'a> ExtCtxt<'a> {
|
||||||
/// emitting `err_msg` if `expr` is not a string literal. This does not stop
|
/// emitting `err_msg` if `expr` is not a string literal. This does not stop
|
||||||
/// compilation on error, merely emits a non-fatal error and returns None.
|
/// compilation on error, merely emits a non-fatal error and returns None.
|
||||||
pub fn expr_to_spanned_string(cx: &mut ExtCtxt, expr: P<ast::Expr>, err_msg: &str)
|
pub fn expr_to_spanned_string(cx: &mut ExtCtxt, expr: P<ast::Expr>, err_msg: &str)
|
||||||
-> Option<Spanned<(InternedString, ast::StrStyle)>> {
|
-> Option<Spanned<(Symbol, ast::StrStyle)>> {
|
||||||
// Update `expr.span`'s expn_id now in case expr is an `include!` macro invocation.
|
// Update `expr.span`'s expn_id now in case expr is an `include!` macro invocation.
|
||||||
let expr = expr.map(|mut expr| {
|
let expr = expr.map(|mut expr| {
|
||||||
expr.span.expn_id = cx.backtrace();
|
expr.span.expn_id = cx.backtrace();
|
||||||
|
@ -765,7 +765,7 @@ pub fn expr_to_spanned_string(cx: &mut ExtCtxt, expr: P<ast::Expr>, err_msg: &st
|
||||||
let expr = cx.expander().fold_expr(expr);
|
let expr = cx.expander().fold_expr(expr);
|
||||||
match expr.node {
|
match expr.node {
|
||||||
ast::ExprKind::Lit(ref l) => match l.node {
|
ast::ExprKind::Lit(ref l) => match l.node {
|
||||||
ast::LitKind::Str(ref s, style) => return Some(respan(expr.span, (s.clone(), style))),
|
ast::LitKind::Str(s, style) => return Some(respan(expr.span, (s, style))),
|
||||||
_ => cx.span_err(l.span, err_msg)
|
_ => cx.span_err(l.span, err_msg)
|
||||||
},
|
},
|
||||||
_ => cx.span_err(expr.span, err_msg)
|
_ => cx.span_err(expr.span, err_msg)
|
||||||
|
@ -774,7 +774,7 @@ pub fn expr_to_spanned_string(cx: &mut ExtCtxt, expr: P<ast::Expr>, err_msg: &st
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expr_to_string(cx: &mut ExtCtxt, expr: P<ast::Expr>, err_msg: &str)
|
pub fn expr_to_string(cx: &mut ExtCtxt, expr: P<ast::Expr>, err_msg: &str)
|
||||||
-> Option<(InternedString, ast::StrStyle)> {
|
-> Option<(Symbol, ast::StrStyle)> {
|
||||||
expr_to_spanned_string(cx, expr, err_msg).map(|s| s.node)
|
expr_to_spanned_string(cx, expr, err_msg).map(|s| s.node)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -15,7 +15,7 @@ use syntax_pos::{Span, DUMMY_SP, Pos};
|
||||||
use codemap::{dummy_spanned, respan, Spanned};
|
use codemap::{dummy_spanned, respan, Spanned};
|
||||||
use ext::base::ExtCtxt;
|
use ext::base::ExtCtxt;
|
||||||
use ptr::P;
|
use ptr::P;
|
||||||
use symbol::{intern_and_get_ident, keywords, InternedString};
|
use symbol::{Symbol, keywords};
|
||||||
|
|
||||||
// Transitional reexports so qquote can find the paths it is looking for
|
// Transitional reexports so qquote can find the paths it is looking for
|
||||||
mod syntax {
|
mod syntax {
|
||||||
|
@ -149,7 +149,7 @@ pub trait AstBuilder {
|
||||||
fn expr_vec(&self, sp: Span, exprs: Vec<P<ast::Expr>>) -> P<ast::Expr>;
|
fn expr_vec(&self, sp: Span, exprs: Vec<P<ast::Expr>>) -> P<ast::Expr>;
|
||||||
fn expr_vec_ng(&self, sp: Span) -> P<ast::Expr>;
|
fn expr_vec_ng(&self, sp: Span) -> P<ast::Expr>;
|
||||||
fn expr_vec_slice(&self, sp: Span, exprs: Vec<P<ast::Expr>>) -> P<ast::Expr>;
|
fn expr_vec_slice(&self, sp: Span, exprs: Vec<P<ast::Expr>>) -> P<ast::Expr>;
|
||||||
fn expr_str(&self, sp: Span, s: InternedString) -> P<ast::Expr>;
|
fn expr_str(&self, sp: Span, s: Symbol) -> P<ast::Expr>;
|
||||||
|
|
||||||
fn expr_some(&self, sp: Span, expr: P<ast::Expr>) -> P<ast::Expr>;
|
fn expr_some(&self, sp: Span, expr: P<ast::Expr>) -> P<ast::Expr>;
|
||||||
fn expr_none(&self, sp: Span) -> P<ast::Expr>;
|
fn expr_none(&self, sp: Span) -> P<ast::Expr>;
|
||||||
|
@ -158,7 +158,7 @@ pub trait AstBuilder {
|
||||||
|
|
||||||
fn expr_tuple(&self, sp: Span, exprs: Vec<P<ast::Expr>>) -> P<ast::Expr>;
|
fn expr_tuple(&self, sp: Span, exprs: Vec<P<ast::Expr>>) -> P<ast::Expr>;
|
||||||
|
|
||||||
fn expr_fail(&self, span: Span, msg: InternedString) -> P<ast::Expr>;
|
fn expr_fail(&self, span: Span, msg: Symbol) -> P<ast::Expr>;
|
||||||
fn expr_unreachable(&self, span: Span) -> P<ast::Expr>;
|
fn expr_unreachable(&self, span: Span) -> P<ast::Expr>;
|
||||||
|
|
||||||
fn expr_ok(&self, span: Span, expr: P<ast::Expr>) -> P<ast::Expr>;
|
fn expr_ok(&self, span: Span, expr: P<ast::Expr>) -> P<ast::Expr>;
|
||||||
|
@ -755,7 +755,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
|
||||||
fn expr_vec_slice(&self, sp: Span, exprs: Vec<P<ast::Expr>>) -> P<ast::Expr> {
|
fn expr_vec_slice(&self, sp: Span, exprs: Vec<P<ast::Expr>>) -> P<ast::Expr> {
|
||||||
self.expr_addr_of(sp, self.expr_vec(sp, exprs))
|
self.expr_addr_of(sp, self.expr_vec(sp, exprs))
|
||||||
}
|
}
|
||||||
fn expr_str(&self, sp: Span, s: InternedString) -> P<ast::Expr> {
|
fn expr_str(&self, sp: Span, s: Symbol) -> P<ast::Expr> {
|
||||||
self.expr_lit(sp, ast::LitKind::Str(s, ast::StrStyle::Cooked))
|
self.expr_lit(sp, ast::LitKind::Str(s, ast::StrStyle::Cooked))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -785,9 +785,9 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
|
||||||
self.expr(sp, ast::ExprKind::Tup(exprs))
|
self.expr(sp, ast::ExprKind::Tup(exprs))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expr_fail(&self, span: Span, msg: InternedString) -> P<ast::Expr> {
|
fn expr_fail(&self, span: Span, msg: Symbol) -> P<ast::Expr> {
|
||||||
let loc = self.codemap().lookup_char_pos(span.lo);
|
let loc = self.codemap().lookup_char_pos(span.lo);
|
||||||
let expr_file = self.expr_str(span, intern_and_get_ident(&loc.file.name));
|
let expr_file = self.expr_str(span, Symbol::intern(&loc.file.name));
|
||||||
let expr_line = self.expr_u32(span, loc.line as u32);
|
let expr_line = self.expr_u32(span, loc.line as u32);
|
||||||
let expr_file_line_tuple = self.expr_tuple(span, vec![expr_file, expr_line]);
|
let expr_file_line_tuple = self.expr_tuple(span, vec![expr_file, expr_line]);
|
||||||
let expr_file_line_ptr = self.expr_addr_of(span, expr_file_line_tuple);
|
let expr_file_line_ptr = self.expr_addr_of(span, expr_file_line_tuple);
|
||||||
|
@ -800,9 +800,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expr_unreachable(&self, span: Span) -> P<ast::Expr> {
|
fn expr_unreachable(&self, span: Span) -> P<ast::Expr> {
|
||||||
self.expr_fail(span,
|
self.expr_fail(span, Symbol::intern("internal error: entered unreachable code"))
|
||||||
InternedString::new(
|
|
||||||
"internal error: entered unreachable code"))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expr_ok(&self, sp: Span, expr: P<ast::Expr>) -> P<ast::Expr> {
|
fn expr_ok(&self, sp: Span, expr: P<ast::Expr>) -> P<ast::Expr> {
|
||||||
|
|
|
@ -780,7 +780,7 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> {
|
||||||
if inline_module {
|
if inline_module {
|
||||||
if let Some(path) = attr::first_attr_value_str_by_name(&item.attrs, "path") {
|
if let Some(path) = attr::first_attr_value_str_by_name(&item.attrs, "path") {
|
||||||
self.cx.current_expansion.no_noninline_mod = false;
|
self.cx.current_expansion.no_noninline_mod = false;
|
||||||
module.directory.push(&*path);
|
module.directory.push(&*path.as_str());
|
||||||
} else {
|
} else {
|
||||||
module.directory.push(&*item.ident.name.as_str());
|
module.directory.push(&*item.ident.name.as_str());
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,7 +33,7 @@ pub mod rt {
|
||||||
use parse::{self, token, classify};
|
use parse::{self, token, classify};
|
||||||
use ptr::P;
|
use ptr::P;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
use symbol;
|
use symbol::Symbol;
|
||||||
|
|
||||||
use tokenstream::{self, TokenTree};
|
use tokenstream::{self, TokenTree};
|
||||||
|
|
||||||
|
@ -239,8 +239,7 @@ pub mod rt {
|
||||||
|
|
||||||
impl ToTokens for str {
|
impl ToTokens for str {
|
||||||
fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
|
fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
|
||||||
let lit = ast::LitKind::Str(
|
let lit = ast::LitKind::Str(Symbol::intern(self), ast::StrStyle::Cooked);
|
||||||
symbol::intern_and_get_ident(self), ast::StrStyle::Cooked);
|
|
||||||
dummy_spanned(lit).to_tokens(cx)
|
dummy_spanned(lit).to_tokens(cx)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -538,7 +537,7 @@ fn id_ext(s: &str) -> ast::Ident {
|
||||||
|
|
||||||
// Lift an ident to the expr that evaluates to that ident.
|
// Lift an ident to the expr that evaluates to that ident.
|
||||||
fn mk_ident(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> P<ast::Expr> {
|
fn mk_ident(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> P<ast::Expr> {
|
||||||
let e_str = cx.expr_str(sp, ident.name.as_str());
|
let e_str = cx.expr_str(sp, ident.name);
|
||||||
cx.expr_method_call(sp,
|
cx.expr_method_call(sp,
|
||||||
cx.expr_ident(sp, id_ext("ext_cx")),
|
cx.expr_ident(sp, id_ext("ext_cx")),
|
||||||
id_ext("ident_of"),
|
id_ext("ident_of"),
|
||||||
|
@ -547,7 +546,7 @@ fn mk_ident(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> P<ast::Expr> {
|
||||||
|
|
||||||
// Lift a name to the expr that evaluates to that name
|
// Lift a name to the expr that evaluates to that name
|
||||||
fn mk_name(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> P<ast::Expr> {
|
fn mk_name(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> P<ast::Expr> {
|
||||||
let e_str = cx.expr_str(sp, ident.name.as_str());
|
let e_str = cx.expr_str(sp, ident.name);
|
||||||
cx.expr_method_call(sp,
|
cx.expr_method_call(sp,
|
||||||
cx.expr_ident(sp, id_ext("ext_cx")),
|
cx.expr_ident(sp, id_ext("ext_cx")),
|
||||||
id_ext("name_of"),
|
id_ext("name_of"),
|
||||||
|
|
|
@ -17,7 +17,7 @@ use parse::token;
|
||||||
use parse;
|
use parse;
|
||||||
use print::pprust;
|
use print::pprust;
|
||||||
use ptr::P;
|
use ptr::P;
|
||||||
use symbol;
|
use symbol::Symbol;
|
||||||
use tokenstream;
|
use tokenstream;
|
||||||
use util::small_vector::SmallVector;
|
use util::small_vector::SmallVector;
|
||||||
|
|
||||||
|
@ -61,14 +61,13 @@ pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
|
||||||
|
|
||||||
let topmost = cx.expansion_cause();
|
let topmost = cx.expansion_cause();
|
||||||
let loc = cx.codemap().lookup_char_pos(topmost.lo);
|
let loc = cx.codemap().lookup_char_pos(topmost.lo);
|
||||||
let filename = symbol::intern_and_get_ident(&loc.file.name);
|
base::MacEager::expr(cx.expr_str(topmost, Symbol::intern(&loc.file.name)))
|
||||||
base::MacEager::expr(cx.expr_str(topmost, filename))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expand_stringify(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
|
pub fn expand_stringify(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
|
||||||
-> Box<base::MacResult+'static> {
|
-> Box<base::MacResult+'static> {
|
||||||
let s = pprust::tts_to_string(tts);
|
let s = pprust::tts_to_string(tts);
|
||||||
base::MacEager::expr(cx.expr_str(sp, symbol::intern_and_get_ident(&s)))
|
base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&s)))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
|
pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
|
||||||
|
@ -77,7 +76,7 @@ pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
|
||||||
let mod_path = &cx.current_expansion.module.mod_path;
|
let mod_path = &cx.current_expansion.module.mod_path;
|
||||||
let string = mod_path.iter().map(|x| x.to_string()).collect::<Vec<String>>().join("::");
|
let string = mod_path.iter().map(|x| x.to_string()).collect::<Vec<String>>().join("::");
|
||||||
|
|
||||||
base::MacEager::expr(cx.expr_str(sp, symbol::intern_and_get_ident(&string)))
|
base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&string)))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// include! : parse the given file as an expr
|
/// include! : parse the given file as an expr
|
||||||
|
@ -142,10 +141,9 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenT
|
||||||
// Add this input file to the code map to make it available as
|
// Add this input file to the code map to make it available as
|
||||||
// dependency information
|
// dependency information
|
||||||
let filename = format!("{}", file.display());
|
let filename = format!("{}", file.display());
|
||||||
let interned = symbol::intern_and_get_ident(&src);
|
|
||||||
cx.codemap().new_filemap_and_lines(&filename, None, &src);
|
cx.codemap().new_filemap_and_lines(&filename, None, &src);
|
||||||
|
|
||||||
base::MacEager::expr(cx.expr_str(sp, interned))
|
base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&src)))
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
cx.span_err(sp,
|
cx.span_err(sp,
|
||||||
|
|
|
@ -33,7 +33,7 @@ use syntax_pos::Span;
|
||||||
use errors::{DiagnosticBuilder, Handler};
|
use errors::{DiagnosticBuilder, Handler};
|
||||||
use visit::{self, FnKind, Visitor};
|
use visit::{self, FnKind, Visitor};
|
||||||
use parse::ParseSess;
|
use parse::ParseSess;
|
||||||
use symbol::InternedString;
|
use symbol::Symbol;
|
||||||
|
|
||||||
use std::ascii::AsciiExt;
|
use std::ascii::AsciiExt;
|
||||||
use std::env;
|
use std::env;
|
||||||
|
@ -59,9 +59,9 @@ macro_rules! declare_features {
|
||||||
/// A set of features to be used by later passes.
|
/// A set of features to be used by later passes.
|
||||||
pub struct Features {
|
pub struct Features {
|
||||||
/// #![feature] attrs for stable language features, for error reporting
|
/// #![feature] attrs for stable language features, for error reporting
|
||||||
pub declared_stable_lang_features: Vec<(InternedString, Span)>,
|
pub declared_stable_lang_features: Vec<(Symbol, Span)>,
|
||||||
/// #![feature] attrs for non-language (library) features
|
/// #![feature] attrs for non-language (library) features
|
||||||
pub declared_lib_features: Vec<(InternedString, Span)>,
|
pub declared_lib_features: Vec<(Symbol, Span)>,
|
||||||
$(pub $feature: bool),+
|
$(pub $feature: bool),+
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1121,9 +1121,8 @@ impl<'a> Visitor for PostExpansionVisitor<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_foreign_item(&mut self, i: &ast::ForeignItem) {
|
fn visit_foreign_item(&mut self, i: &ast::ForeignItem) {
|
||||||
let links_to_llvm = match attr::first_attr_value_str_by_name(&i.attrs,
|
let links_to_llvm = match attr::first_attr_value_str_by_name(&i.attrs, "link_name") {
|
||||||
"link_name") {
|
Some(val) => val.as_str().starts_with("llvm."),
|
||||||
Some(val) => val.starts_with("llvm."),
|
|
||||||
_ => false
|
_ => false
|
||||||
};
|
};
|
||||||
if links_to_llvm {
|
if links_to_llvm {
|
||||||
|
@ -1351,7 +1350,7 @@ pub fn get_features(span_handler: &Handler, krate_attrs: &[ast::Attribute]) -> F
|
||||||
Some(list) => {
|
Some(list) => {
|
||||||
for mi in list {
|
for mi in list {
|
||||||
let name = if let Some(word) = mi.word() {
|
let name = if let Some(word) = mi.word() {
|
||||||
word.name().as_str()
|
word.name()
|
||||||
} else {
|
} else {
|
||||||
span_err!(span_handler, mi.span, E0556,
|
span_err!(span_handler, mi.span, E0556,
|
||||||
"malformed feature, expected just one word");
|
"malformed feature, expected just one word");
|
||||||
|
|
|
@ -48,12 +48,8 @@ impl<'a> Parser<'a> {
|
||||||
just_parsed_doc_comment = false;
|
just_parsed_doc_comment = false;
|
||||||
}
|
}
|
||||||
token::DocComment(s) => {
|
token::DocComment(s) => {
|
||||||
let attr = ::attr::mk_sugared_doc_attr(
|
let Span { lo, hi, .. } = self.span;
|
||||||
attr::mk_attr_id(),
|
let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, lo, hi);
|
||||||
self.id_to_interned_str(ast::Ident::with_empty_ctxt(s)),
|
|
||||||
self.span.lo,
|
|
||||||
self.span.hi
|
|
||||||
);
|
|
||||||
if attr.style != ast::AttrStyle::Outer {
|
if attr.style != ast::AttrStyle::Outer {
|
||||||
let mut err = self.fatal("expected outer doc comment");
|
let mut err = self.fatal("expected outer doc comment");
|
||||||
err.note("inner doc comments like this (starting with \
|
err.note("inner doc comments like this (starting with \
|
||||||
|
@ -175,8 +171,7 @@ impl<'a> Parser<'a> {
|
||||||
token::DocComment(s) => {
|
token::DocComment(s) => {
|
||||||
// we need to get the position of this token before we bump.
|
// we need to get the position of this token before we bump.
|
||||||
let Span { lo, hi, .. } = self.span;
|
let Span { lo, hi, .. } = self.span;
|
||||||
let str = self.id_to_interned_str(ast::Ident::with_empty_ctxt(s));
|
let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, lo, hi);
|
||||||
let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), str, lo, hi);
|
|
||||||
if attr.style == ast::AttrStyle::Inner {
|
if attr.style == ast::AttrStyle::Inner {
|
||||||
attrs.push(attr);
|
attrs.push(attr);
|
||||||
self.bump();
|
self.bump();
|
||||||
|
|
|
@ -18,7 +18,7 @@ use feature_gate::UnstableFeatures;
|
||||||
use parse::parser::Parser;
|
use parse::parser::Parser;
|
||||||
use ptr::P;
|
use ptr::P;
|
||||||
use str::char_at;
|
use str::char_at;
|
||||||
use symbol::{self, InternedString};
|
use symbol::Symbol;
|
||||||
use tokenstream;
|
use tokenstream;
|
||||||
|
|
||||||
use std::cell::RefCell;
|
use std::cell::RefCell;
|
||||||
|
@ -372,13 +372,18 @@ fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool {
|
||||||
s[1..].chars().all(|c| '0' <= c && c <= '9')
|
s[1..].chars().all(|c| '0' <= c && c <= '9')
|
||||||
}
|
}
|
||||||
|
|
||||||
fn filtered_float_lit(data: InternedString, suffix: Option<&str>,
|
fn filtered_float_lit(data: Symbol, suffix: Option<Symbol>, sd: &Handler, sp: Span)
|
||||||
sd: &Handler, sp: Span) -> ast::LitKind {
|
-> ast::LitKind {
|
||||||
debug!("filtered_float_lit: {}, {:?}", data, suffix);
|
debug!("filtered_float_lit: {}, {:?}", data, suffix);
|
||||||
match suffix.as_ref().map(|s| &**s) {
|
let suffix = match suffix {
|
||||||
Some("f32") => ast::LitKind::Float(data, ast::FloatTy::F32),
|
Some(suffix) => suffix,
|
||||||
Some("f64") => ast::LitKind::Float(data, ast::FloatTy::F64),
|
None => return ast::LitKind::FloatUnsuffixed(data),
|
||||||
Some(suf) => {
|
};
|
||||||
|
|
||||||
|
match &*suffix.as_str() {
|
||||||
|
"f32" => ast::LitKind::Float(data, ast::FloatTy::F32),
|
||||||
|
"f64" => ast::LitKind::Float(data, ast::FloatTy::F64),
|
||||||
|
suf => {
|
||||||
if suf.len() >= 2 && looks_like_width_suffix(&['f'], suf) {
|
if suf.len() >= 2 && looks_like_width_suffix(&['f'], suf) {
|
||||||
// if it looks like a width, lets try to be helpful.
|
// if it looks like a width, lets try to be helpful.
|
||||||
sd.struct_span_err(sp, &format!("invalid width `{}` for float literal", &suf[1..]))
|
sd.struct_span_err(sp, &format!("invalid width `{}` for float literal", &suf[1..]))
|
||||||
|
@ -392,16 +397,13 @@ fn filtered_float_lit(data: InternedString, suffix: Option<&str>,
|
||||||
|
|
||||||
ast::LitKind::FloatUnsuffixed(data)
|
ast::LitKind::FloatUnsuffixed(data)
|
||||||
}
|
}
|
||||||
None => ast::LitKind::FloatUnsuffixed(data)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pub fn float_lit(s: &str, suffix: Option<InternedString>,
|
pub fn float_lit(s: &str, suffix: Option<Symbol>, sd: &Handler, sp: Span) -> ast::LitKind {
|
||||||
sd: &Handler, sp: Span) -> ast::LitKind {
|
|
||||||
debug!("float_lit: {:?}, {:?}", s, suffix);
|
debug!("float_lit: {:?}, {:?}", s, suffix);
|
||||||
// FIXME #2252: bounds checking float literals is deferred until trans
|
// FIXME #2252: bounds checking float literals is deferred until trans
|
||||||
let s = s.chars().filter(|&c| c != '_').collect::<String>();
|
let s = s.chars().filter(|&c| c != '_').collect::<String>();
|
||||||
let data = symbol::intern_and_get_ident(&s);
|
filtered_float_lit(Symbol::intern(&s), suffix, sd, sp)
|
||||||
filtered_float_lit(data, suffix.as_ref().map(|s| &**s), sd, sp)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parse a string representing a byte literal into its final form. Similar to `char_lit`
|
/// Parse a string representing a byte literal into its final form. Similar to `char_lit`
|
||||||
|
@ -496,11 +498,7 @@ pub fn byte_str_lit(lit: &str) -> Rc<Vec<u8>> {
|
||||||
Rc::new(res)
|
Rc::new(res)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn integer_lit(s: &str,
|
pub fn integer_lit(s: &str, suffix: Option<Symbol>, sd: &Handler, sp: Span) -> ast::LitKind {
|
||||||
suffix: Option<InternedString>,
|
|
||||||
sd: &Handler,
|
|
||||||
sp: Span)
|
|
||||||
-> ast::LitKind {
|
|
||||||
// s can only be ascii, byte indexing is fine
|
// s can only be ascii, byte indexing is fine
|
||||||
|
|
||||||
let s2 = s.chars().filter(|&c| c != '_').collect::<String>();
|
let s2 = s.chars().filter(|&c| c != '_').collect::<String>();
|
||||||
|
@ -522,16 +520,15 @@ pub fn integer_lit(s: &str,
|
||||||
}
|
}
|
||||||
|
|
||||||
// 1f64 and 2f32 etc. are valid float literals.
|
// 1f64 and 2f32 etc. are valid float literals.
|
||||||
if let Some(ref suf) = suffix {
|
if let Some(suf) = suffix {
|
||||||
if looks_like_width_suffix(&['f'], suf) {
|
if looks_like_width_suffix(&['f'], &suf.as_str()) {
|
||||||
match base {
|
match base {
|
||||||
16 => sd.span_err(sp, "hexadecimal float literal is not supported"),
|
16 => sd.span_err(sp, "hexadecimal float literal is not supported"),
|
||||||
8 => sd.span_err(sp, "octal float literal is not supported"),
|
8 => sd.span_err(sp, "octal float literal is not supported"),
|
||||||
2 => sd.span_err(sp, "binary float literal is not supported"),
|
2 => sd.span_err(sp, "binary float literal is not supported"),
|
||||||
_ => ()
|
_ => ()
|
||||||
}
|
}
|
||||||
let ident = symbol::intern_and_get_ident(&s);
|
return filtered_float_lit(Symbol::intern(&s), Some(suf), sd, sp)
|
||||||
return filtered_float_lit(ident, Some(&suf), sd, sp)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -539,9 +536,9 @@ pub fn integer_lit(s: &str,
|
||||||
s = &s[2..];
|
s = &s[2..];
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(ref suf) = suffix {
|
if let Some(suf) = suffix {
|
||||||
if suf.is_empty() { sd.span_bug(sp, "found empty literal suffix in Some")}
|
if suf.as_str().is_empty() { sd.span_bug(sp, "found empty literal suffix in Some")}
|
||||||
ty = match &**suf {
|
ty = match &*suf.as_str() {
|
||||||
"isize" => ast::LitIntType::Signed(ast::IntTy::Is),
|
"isize" => ast::LitIntType::Signed(ast::IntTy::Is),
|
||||||
"i8" => ast::LitIntType::Signed(ast::IntTy::I8),
|
"i8" => ast::LitIntType::Signed(ast::IntTy::I8),
|
||||||
"i16" => ast::LitIntType::Signed(ast::IntTy::I16),
|
"i16" => ast::LitIntType::Signed(ast::IntTy::I16),
|
||||||
|
@ -552,7 +549,7 @@ pub fn integer_lit(s: &str,
|
||||||
"u16" => ast::LitIntType::Unsigned(ast::UintTy::U16),
|
"u16" => ast::LitIntType::Unsigned(ast::UintTy::U16),
|
||||||
"u32" => ast::LitIntType::Unsigned(ast::UintTy::U32),
|
"u32" => ast::LitIntType::Unsigned(ast::UintTy::U32),
|
||||||
"u64" => ast::LitIntType::Unsigned(ast::UintTy::U64),
|
"u64" => ast::LitIntType::Unsigned(ast::UintTy::U64),
|
||||||
_ => {
|
suf => {
|
||||||
// i<digits> and u<digits> look like widths, so lets
|
// i<digits> and u<digits> look like widths, so lets
|
||||||
// give an error message along those lines
|
// give an error message along those lines
|
||||||
if looks_like_width_suffix(&['i', 'u'], suf) {
|
if looks_like_width_suffix(&['i', 'u'], suf) {
|
||||||
|
|
|
@ -55,7 +55,7 @@ use print::pprust;
|
||||||
use ptr::P;
|
use ptr::P;
|
||||||
use parse::PResult;
|
use parse::PResult;
|
||||||
use tokenstream::{self, Delimited, SequenceRepetition, TokenTree};
|
use tokenstream::{self, Delimited, SequenceRepetition, TokenTree};
|
||||||
use symbol::{self, Symbol, keywords, InternedString};
|
use symbol::{Symbol, keywords};
|
||||||
use util::ThinVec;
|
use util::ThinVec;
|
||||||
|
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
|
@ -999,10 +999,6 @@ impl<'a> Parser<'a> {
|
||||||
&self.sess.span_diagnostic
|
&self.sess.span_diagnostic
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn id_to_interned_str(&mut self, id: Ident) -> InternedString {
|
|
||||||
id.name.as_str()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Is the current token one of the keywords that signals a bare function
|
/// Is the current token one of the keywords that signals a bare function
|
||||||
/// type?
|
/// type?
|
||||||
pub fn token_is_bare_fn_keyword(&mut self) -> bool {
|
pub fn token_is_bare_fn_keyword(&mut self) -> bool {
|
||||||
|
@ -1524,34 +1520,28 @@ impl<'a> Parser<'a> {
|
||||||
// float literals, so all the handling is done
|
// float literals, so all the handling is done
|
||||||
// internally.
|
// internally.
|
||||||
token::Integer(s) => {
|
token::Integer(s) => {
|
||||||
(false, parse::integer_lit(&s.as_str(),
|
let diag = &self.sess.span_diagnostic;
|
||||||
suf.as_ref().map(|s| s.as_str()),
|
(false, parse::integer_lit(&s.as_str(), suf, diag, self.span))
|
||||||
&self.sess.span_diagnostic,
|
|
||||||
self.span))
|
|
||||||
}
|
}
|
||||||
token::Float(s) => {
|
token::Float(s) => {
|
||||||
(false, parse::float_lit(&s.as_str(),
|
let diag = &self.sess.span_diagnostic;
|
||||||
suf.as_ref().map(|s| s.as_str()),
|
(false, parse::float_lit(&s.as_str(), suf, diag, self.span))
|
||||||
&self.sess.span_diagnostic,
|
|
||||||
self.span))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
token::Str_(s) => {
|
token::Str_(s) => {
|
||||||
(true,
|
let s = Symbol::intern(&parse::str_lit(&s.as_str()));
|
||||||
LitKind::Str(symbol::intern_and_get_ident(&parse::str_lit(&s.as_str())),
|
(true, LitKind::Str(s, ast::StrStyle::Cooked))
|
||||||
ast::StrStyle::Cooked))
|
|
||||||
}
|
}
|
||||||
token::StrRaw(s, n) => {
|
token::StrRaw(s, n) => {
|
||||||
(true,
|
let s = Symbol::intern(&parse::raw_str_lit(&s.as_str()));
|
||||||
LitKind::Str(
|
(true, LitKind::Str(s, ast::StrStyle::Raw(n)))
|
||||||
symbol::intern_and_get_ident(&parse::raw_str_lit(&s.as_str())),
|
}
|
||||||
ast::StrStyle::Raw(n)))
|
token::ByteStr(i) => {
|
||||||
|
(true, LitKind::ByteStr(parse::byte_str_lit(&i.as_str())))
|
||||||
|
}
|
||||||
|
token::ByteStrRaw(i, _) => {
|
||||||
|
(true, LitKind::ByteStr(Rc::new(i.to_string().into_bytes())))
|
||||||
}
|
}
|
||||||
token::ByteStr(i) =>
|
|
||||||
(true, LitKind::ByteStr(parse::byte_str_lit(&i.as_str()))),
|
|
||||||
token::ByteStrRaw(i, _) =>
|
|
||||||
(true,
|
|
||||||
LitKind::ByteStr(Rc::new(i.to_string().into_bytes()))),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
if suffix_illegal {
|
if suffix_illegal {
|
||||||
|
@ -5303,17 +5293,16 @@ impl<'a> Parser<'a> {
|
||||||
|
|
||||||
fn push_directory(&mut self, id: Ident, attrs: &[Attribute]) -> Restrictions {
|
fn push_directory(&mut self, id: Ident, attrs: &[Attribute]) -> Restrictions {
|
||||||
if let Some(path) = ::attr::first_attr_value_str_by_name(attrs, "path") {
|
if let Some(path) = ::attr::first_attr_value_str_by_name(attrs, "path") {
|
||||||
self.directory.push(&*path);
|
self.directory.push(&*path.as_str());
|
||||||
self.restrictions - Restrictions::NO_NONINLINE_MOD
|
self.restrictions - Restrictions::NO_NONINLINE_MOD
|
||||||
} else {
|
} else {
|
||||||
let default_path = self.id_to_interned_str(id);
|
self.directory.push(&*id.name.as_str());
|
||||||
self.directory.push(&*default_path);
|
|
||||||
self.restrictions
|
self.restrictions
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn submod_path_from_attr(attrs: &[ast::Attribute], dir_path: &Path) -> Option<PathBuf> {
|
pub fn submod_path_from_attr(attrs: &[ast::Attribute], dir_path: &Path) -> Option<PathBuf> {
|
||||||
::attr::first_attr_value_str_by_name(attrs, "path").map(|d| dir_path.join(&*d))
|
::attr::first_attr_value_str_by_name(attrs, "path").map(|d| dir_path.join(&*d.as_str()))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns either a path to a module, or .
|
/// Returns either a path to a module, or .
|
||||||
|
@ -6127,26 +6116,17 @@ impl<'a> Parser<'a> {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_optional_str(&mut self)
|
pub fn parse_optional_str(&mut self) -> Option<(Symbol, ast::StrStyle, Option<ast::Name>)> {
|
||||||
-> Option<(InternedString,
|
|
||||||
ast::StrStyle,
|
|
||||||
Option<ast::Name>)> {
|
|
||||||
let ret = match self.token {
|
let ret = match self.token {
|
||||||
token::Literal(token::Str_(s), suf) => {
|
token::Literal(token::Str_(s), suf) => (s, ast::StrStyle::Cooked, suf),
|
||||||
let s = self.id_to_interned_str(ast::Ident::with_empty_ctxt(s));
|
token::Literal(token::StrRaw(s, n), suf) => (s, ast::StrStyle::Raw(n), suf),
|
||||||
(s, ast::StrStyle::Cooked, suf)
|
|
||||||
}
|
|
||||||
token::Literal(token::StrRaw(s, n), suf) => {
|
|
||||||
let s = self.id_to_interned_str(ast::Ident::with_empty_ctxt(s));
|
|
||||||
(s, ast::StrStyle::Raw(n), suf)
|
|
||||||
}
|
|
||||||
_ => return None
|
_ => return None
|
||||||
};
|
};
|
||||||
self.bump();
|
self.bump();
|
||||||
Some(ret)
|
Some(ret)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_str(&mut self) -> PResult<'a, (InternedString, StrStyle)> {
|
pub fn parse_str(&mut self) -> PResult<'a, (Symbol, StrStyle)> {
|
||||||
match self.parse_optional_str() {
|
match self.parse_optional_str() {
|
||||||
Some((s, style, suf)) => {
|
Some((s, style, suf)) => {
|
||||||
let sp = self.prev_span;
|
let sp = self.prev_span;
|
||||||
|
|
|
@ -630,7 +630,7 @@ pub trait PrintState<'a> {
|
||||||
_ => ()
|
_ => ()
|
||||||
}
|
}
|
||||||
match lit.node {
|
match lit.node {
|
||||||
ast::LitKind::Str(ref st, style) => self.print_string(&st, style),
|
ast::LitKind::Str(st, style) => self.print_string(&st.as_str(), style),
|
||||||
ast::LitKind::Byte(byte) => {
|
ast::LitKind::Byte(byte) => {
|
||||||
let mut res = String::from("b'");
|
let mut res = String::from("b'");
|
||||||
res.extend(ascii::escape_default(byte).map(|c| c as char));
|
res.extend(ascii::escape_default(byte).map(|c| c as char));
|
||||||
|
@ -664,7 +664,7 @@ pub trait PrintState<'a> {
|
||||||
&f,
|
&f,
|
||||||
t.ty_to_string()))
|
t.ty_to_string()))
|
||||||
}
|
}
|
||||||
ast::LitKind::FloatUnsuffixed(ref f) => word(self.writer(), &f[..]),
|
ast::LitKind::FloatUnsuffixed(ref f) => word(self.writer(), &f.as_str()),
|
||||||
ast::LitKind::Bool(val) => {
|
ast::LitKind::Bool(val) => {
|
||||||
if val { word(self.writer(), "true") } else { word(self.writer(), "false") }
|
if val { word(self.writer(), "true") } else { word(self.writer(), "false") }
|
||||||
}
|
}
|
||||||
|
@ -752,7 +752,7 @@ pub trait PrintState<'a> {
|
||||||
}
|
}
|
||||||
try!(self.maybe_print_comment(attr.span.lo));
|
try!(self.maybe_print_comment(attr.span.lo));
|
||||||
if attr.is_sugared_doc {
|
if attr.is_sugared_doc {
|
||||||
try!(word(self.writer(), &attr.value_str().unwrap()));
|
try!(word(self.writer(), &attr.value_str().unwrap().as_str()));
|
||||||
hardbreak(self.writer())
|
hardbreak(self.writer())
|
||||||
} else {
|
} else {
|
||||||
match attr.style {
|
match attr.style {
|
||||||
|
@ -2220,19 +2220,18 @@ impl<'a> State<'a> {
|
||||||
ast::ExprKind::InlineAsm(ref a) => {
|
ast::ExprKind::InlineAsm(ref a) => {
|
||||||
try!(word(&mut self.s, "asm!"));
|
try!(word(&mut self.s, "asm!"));
|
||||||
try!(self.popen());
|
try!(self.popen());
|
||||||
try!(self.print_string(&a.asm, a.asm_str_style));
|
try!(self.print_string(&a.asm.as_str(), a.asm_str_style));
|
||||||
try!(self.word_space(":"));
|
try!(self.word_space(":"));
|
||||||
|
|
||||||
try!(self.commasep(Inconsistent, &a.outputs,
|
try!(self.commasep(Inconsistent, &a.outputs, |s, out| {
|
||||||
|s, out| {
|
let constraint = out.constraint.as_str();
|
||||||
let mut ch = out.constraint.chars();
|
let mut ch = constraint.chars();
|
||||||
match ch.next() {
|
match ch.next() {
|
||||||
Some('=') if out.is_rw => {
|
Some('=') if out.is_rw => {
|
||||||
try!(s.print_string(&format!("+{}", ch.as_str()),
|
try!(s.print_string(&format!("+{}", ch.as_str()),
|
||||||
ast::StrStyle::Cooked))
|
ast::StrStyle::Cooked))
|
||||||
}
|
}
|
||||||
_ => try!(s.print_string(&out.constraint,
|
_ => try!(s.print_string(&constraint, ast::StrStyle::Cooked))
|
||||||
ast::StrStyle::Cooked))
|
|
||||||
}
|
}
|
||||||
try!(s.popen());
|
try!(s.popen());
|
||||||
try!(s.print_expr(&out.expr));
|
try!(s.print_expr(&out.expr));
|
||||||
|
@ -2242,9 +2241,8 @@ impl<'a> State<'a> {
|
||||||
try!(space(&mut self.s));
|
try!(space(&mut self.s));
|
||||||
try!(self.word_space(":"));
|
try!(self.word_space(":"));
|
||||||
|
|
||||||
try!(self.commasep(Inconsistent, &a.inputs,
|
try!(self.commasep(Inconsistent, &a.inputs, |s, &(co, ref o)| {
|
||||||
|s, &(ref co, ref o)| {
|
try!(s.print_string(&co.as_str(), ast::StrStyle::Cooked));
|
||||||
try!(s.print_string(&co, ast::StrStyle::Cooked));
|
|
||||||
try!(s.popen());
|
try!(s.popen());
|
||||||
try!(s.print_expr(&o));
|
try!(s.print_expr(&o));
|
||||||
try!(s.pclose());
|
try!(s.pclose());
|
||||||
|
@ -2255,7 +2253,7 @@ impl<'a> State<'a> {
|
||||||
|
|
||||||
try!(self.commasep(Inconsistent, &a.clobbers,
|
try!(self.commasep(Inconsistent, &a.clobbers,
|
||||||
|s, co| {
|
|s, co| {
|
||||||
try!(s.print_string(&co, ast::StrStyle::Cooked));
|
try!(s.print_string(&co.as_str(), ast::StrStyle::Cooked));
|
||||||
Ok(())
|
Ok(())
|
||||||
}));
|
}));
|
||||||
|
|
||||||
|
|
|
@ -305,13 +305,6 @@ impl Encodable for InternedString {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Interns and returns the string contents of an identifier, using the
|
|
||||||
/// thread-local interner.
|
|
||||||
#[inline]
|
|
||||||
pub fn intern_and_get_ident(s: &str) -> InternedString {
|
|
||||||
Symbol::intern(s).as_str()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
|
@ -38,12 +38,12 @@ use parse::{token, ParseSess};
|
||||||
use print::pprust;
|
use print::pprust;
|
||||||
use ast::{self, Ident};
|
use ast::{self, Ident};
|
||||||
use ptr::P;
|
use ptr::P;
|
||||||
use symbol::{self, Symbol, keywords, InternedString};
|
use symbol::{self, Symbol, keywords};
|
||||||
use util::small_vector::SmallVector;
|
use util::small_vector::SmallVector;
|
||||||
|
|
||||||
enum ShouldPanic {
|
enum ShouldPanic {
|
||||||
No,
|
No,
|
||||||
Yes(Option<InternedString>),
|
Yes(Option<Symbol>),
|
||||||
}
|
}
|
||||||
|
|
||||||
struct Test {
|
struct Test {
|
||||||
|
@ -60,7 +60,7 @@ struct TestCtxt<'a> {
|
||||||
path: Vec<Ident>,
|
path: Vec<Ident>,
|
||||||
ext_cx: ExtCtxt<'a>,
|
ext_cx: ExtCtxt<'a>,
|
||||||
testfns: Vec<Test>,
|
testfns: Vec<Test>,
|
||||||
reexport_test_harness_main: Option<InternedString>,
|
reexport_test_harness_main: Option<Symbol>,
|
||||||
is_test_crate: bool,
|
is_test_crate: bool,
|
||||||
|
|
||||||
// top-level re-export submodule, filled out after folding is finished
|
// top-level re-export submodule, filled out after folding is finished
|
||||||
|
@ -267,7 +267,7 @@ fn mk_reexport_mod(cx: &mut TestCtxt,
|
||||||
|
|
||||||
fn generate_test_harness(sess: &ParseSess,
|
fn generate_test_harness(sess: &ParseSess,
|
||||||
resolver: &mut Resolver,
|
resolver: &mut Resolver,
|
||||||
reexport_test_harness_main: Option<InternedString>,
|
reexport_test_harness_main: Option<Symbol>,
|
||||||
krate: ast::Crate,
|
krate: ast::Crate,
|
||||||
sd: &errors::Handler) -> ast::Crate {
|
sd: &errors::Handler) -> ast::Crate {
|
||||||
// Remove the entry points
|
// Remove the entry points
|
||||||
|
@ -548,9 +548,9 @@ fn mk_test_module(cx: &mut TestCtxt) -> (P<ast::Item>, Option<P<ast::Item>>) {
|
||||||
vis: ast::Visibility::Public,
|
vis: ast::Visibility::Public,
|
||||||
span: DUMMY_SP,
|
span: DUMMY_SP,
|
||||||
})).pop().unwrap();
|
})).pop().unwrap();
|
||||||
let reexport = cx.reexport_test_harness_main.as_ref().map(|s| {
|
let reexport = cx.reexport_test_harness_main.map(|s| {
|
||||||
// building `use <ident> = __test::main`
|
// building `use <ident> = __test::main`
|
||||||
let reexport_ident = Ident::from_str(&s);
|
let reexport_ident = Ident::with_empty_ctxt(s);
|
||||||
|
|
||||||
let use_path =
|
let use_path =
|
||||||
nospan(ast::ViewPathSimple(reexport_ident,
|
nospan(ast::ViewPathSimple(reexport_ident,
|
||||||
|
@ -618,7 +618,7 @@ fn mk_tests(cx: &TestCtxt) -> P<ast::Item> {
|
||||||
|
|
||||||
fn is_test_crate(krate: &ast::Crate) -> bool {
|
fn is_test_crate(krate: &ast::Crate) -> bool {
|
||||||
match attr::find_crate_name(&krate.attrs) {
|
match attr::find_crate_name(&krate.attrs) {
|
||||||
Some(ref s) if "test" == &s[..] => true,
|
Some(s) if "test" == &*s.as_str() => true,
|
||||||
_ => false
|
_ => false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -664,7 +664,7 @@ fn mk_test_desc_and_fn_rec(cx: &TestCtxt, test: &Test) -> P<ast::Expr> {
|
||||||
|
|
||||||
// path to the #[test] function: "foo::bar::baz"
|
// path to the #[test] function: "foo::bar::baz"
|
||||||
let path_string = path_name_i(&path[..]);
|
let path_string = path_name_i(&path[..]);
|
||||||
let name_expr = ecx.expr_str(span, symbol::intern_and_get_ident(&path_string[..]));
|
let name_expr = ecx.expr_str(span, Symbol::intern(&path_string));
|
||||||
|
|
||||||
// self::test::StaticTestName($name_expr)
|
// self::test::StaticTestName($name_expr)
|
||||||
let name_expr = ecx.expr_call(span,
|
let name_expr = ecx.expr_call(span,
|
||||||
|
@ -677,10 +677,10 @@ fn mk_test_desc_and_fn_rec(cx: &TestCtxt, test: &Test) -> P<ast::Expr> {
|
||||||
};
|
};
|
||||||
let fail_expr = match test.should_panic {
|
let fail_expr = match test.should_panic {
|
||||||
ShouldPanic::No => ecx.expr_path(should_panic_path("No")),
|
ShouldPanic::No => ecx.expr_path(should_panic_path("No")),
|
||||||
ShouldPanic::Yes(ref msg) => {
|
ShouldPanic::Yes(msg) => {
|
||||||
match *msg {
|
match msg {
|
||||||
Some(ref msg) => {
|
Some(msg) => {
|
||||||
let msg = ecx.expr_str(span, msg.clone());
|
let msg = ecx.expr_str(span, msg);
|
||||||
let path = should_panic_path("YesWithMessage");
|
let path = should_panic_path("YesWithMessage");
|
||||||
ecx.expr_call(span, ecx.expr_path(path), vec![msg])
|
ecx.expr_call(span, ecx.expr_path(path), vec![msg])
|
||||||
}
|
}
|
||||||
|
|
|
@ -34,7 +34,7 @@ use parse::lexer;
|
||||||
use parse;
|
use parse;
|
||||||
use parse::token::{self, Token, Lit, Nonterminal};
|
use parse::token::{self, Token, Lit, Nonterminal};
|
||||||
use print::pprust;
|
use print::pprust;
|
||||||
use symbol::{self, Symbol};
|
use symbol::Symbol;
|
||||||
|
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::iter::*;
|
use std::iter::*;
|
||||||
|
@ -296,7 +296,7 @@ impl TokenTree {
|
||||||
pub fn maybe_str(&self) -> Option<ast::Lit> {
|
pub fn maybe_str(&self) -> Option<ast::Lit> {
|
||||||
match *self {
|
match *self {
|
||||||
TokenTree::Token(sp, Token::Literal(Lit::Str_(s), _)) => {
|
TokenTree::Token(sp, Token::Literal(Lit::Str_(s), _)) => {
|
||||||
let l = LitKind::Str(symbol::intern_and_get_ident(&parse::str_lit(&s.as_str())),
|
let l = LitKind::Str(Symbol::intern(&parse::str_lit(&s.as_str())),
|
||||||
ast::StrStyle::Cooked);
|
ast::StrStyle::Cooked);
|
||||||
Some(Spanned {
|
Some(Spanned {
|
||||||
node: l,
|
node: l,
|
||||||
|
@ -304,7 +304,7 @@ impl TokenTree {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
TokenTree::Token(sp, Token::Literal(Lit::StrRaw(s, n), _)) => {
|
TokenTree::Token(sp, Token::Literal(Lit::StrRaw(s, n), _)) => {
|
||||||
let l = LitKind::Str(symbol::intern_and_get_ident(&parse::raw_str_lit(&s.as_str())),
|
let l = LitKind::Str(Symbol::intern(&parse::raw_str_lit(&s.as_str())),
|
||||||
ast::StrStyle::Raw(n));
|
ast::StrStyle::Raw(n));
|
||||||
Some(Spanned {
|
Some(Spanned {
|
||||||
node: l,
|
node: l,
|
||||||
|
|
|
@ -8,9 +8,8 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
use ast::Name;
|
|
||||||
use std::cmp;
|
use std::cmp;
|
||||||
use symbol::InternedString;
|
use symbol::Symbol;
|
||||||
|
|
||||||
/// To find the Levenshtein distance between two strings
|
/// To find the Levenshtein distance between two strings
|
||||||
pub fn lev_distance(a: &str, b: &str) -> usize {
|
pub fn lev_distance(a: &str, b: &str) -> usize {
|
||||||
|
@ -48,14 +47,14 @@ pub fn lev_distance(a: &str, b: &str) -> usize {
|
||||||
/// to one-third of the given word
|
/// to one-third of the given word
|
||||||
pub fn find_best_match_for_name<'a, T>(iter_names: T,
|
pub fn find_best_match_for_name<'a, T>(iter_names: T,
|
||||||
lookup: &str,
|
lookup: &str,
|
||||||
dist: Option<usize>) -> Option<InternedString>
|
dist: Option<usize>) -> Option<Symbol>
|
||||||
where T: Iterator<Item = &'a Name> {
|
where T: Iterator<Item = &'a Symbol> {
|
||||||
let max_dist = dist.map_or_else(|| cmp::max(lookup.len(), 3) / 3, |d| d);
|
let max_dist = dist.map_or_else(|| cmp::max(lookup.len(), 3) / 3, |d| d);
|
||||||
iter_names
|
iter_names
|
||||||
.filter_map(|name| {
|
.filter_map(|&name| {
|
||||||
let dist = lev_distance(lookup, &name.as_str());
|
let dist = lev_distance(lookup, &name.as_str());
|
||||||
match dist <= max_dist { // filter the unwanted cases
|
match dist <= max_dist { // filter the unwanted cases
|
||||||
true => Some((name.as_str(), dist)),
|
true => Some((name, dist)),
|
||||||
false => None,
|
false => None,
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
|
@ -19,7 +19,7 @@ use syntax::ext::base::*;
|
||||||
use syntax::feature_gate;
|
use syntax::feature_gate;
|
||||||
use syntax::parse::{self, token};
|
use syntax::parse::{self, token};
|
||||||
use syntax::ptr::P;
|
use syntax::ptr::P;
|
||||||
use syntax::symbol::{self, Symbol, InternedString};
|
use syntax::symbol::Symbol;
|
||||||
use syntax::ast::AsmDialect;
|
use syntax::ast::AsmDialect;
|
||||||
use syntax_pos::Span;
|
use syntax_pos::Span;
|
||||||
use syntax::tokenstream;
|
use syntax::tokenstream;
|
||||||
|
@ -73,7 +73,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt,
|
||||||
})
|
})
|
||||||
.unwrap_or(tts.len());
|
.unwrap_or(tts.len());
|
||||||
let mut p = cx.new_parser_from_tts(&tts[first_colon..]);
|
let mut p = cx.new_parser_from_tts(&tts[first_colon..]);
|
||||||
let mut asm = InternedString::new("");
|
let mut asm = Symbol::intern("");
|
||||||
let mut asm_str_style = None;
|
let mut asm_str_style = None;
|
||||||
let mut outputs = Vec::new();
|
let mut outputs = Vec::new();
|
||||||
let mut inputs = Vec::new();
|
let mut inputs = Vec::new();
|
||||||
|
@ -135,11 +135,12 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt,
|
||||||
// It's the opposite of '=&' which means that the memory
|
// It's the opposite of '=&' which means that the memory
|
||||||
// cannot be shared with any other operand (usually when
|
// cannot be shared with any other operand (usually when
|
||||||
// a register is clobbered early.)
|
// a register is clobbered early.)
|
||||||
let mut ch = constraint.chars();
|
let constraint_str = constraint.as_str();
|
||||||
|
let mut ch = constraint_str.chars();
|
||||||
let output = match ch.next() {
|
let output = match ch.next() {
|
||||||
Some('=') => None,
|
Some('=') => None,
|
||||||
Some('+') => {
|
Some('+') => {
|
||||||
Some(symbol::intern_and_get_ident(&format!("={}", ch.as_str())))
|
Some(Symbol::intern(&format!("={}", ch.as_str())))
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
cx.span_err(span, "output operand constraint lacks '=' or '+'");
|
cx.span_err(span, "output operand constraint lacks '=' or '+'");
|
||||||
|
@ -148,9 +149,9 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt,
|
||||||
};
|
};
|
||||||
|
|
||||||
let is_rw = output.is_some();
|
let is_rw = output.is_some();
|
||||||
let is_indirect = constraint.contains("*");
|
let is_indirect = constraint_str.contains("*");
|
||||||
outputs.push(ast::InlineAsmOutput {
|
outputs.push(ast::InlineAsmOutput {
|
||||||
constraint: output.unwrap_or(constraint.clone()),
|
constraint: output.unwrap_or(constraint),
|
||||||
expr: out,
|
expr: out,
|
||||||
is_rw: is_rw,
|
is_rw: is_rw,
|
||||||
is_indirect: is_indirect,
|
is_indirect: is_indirect,
|
||||||
|
@ -166,9 +167,9 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt,
|
||||||
|
|
||||||
let (constraint, _str_style) = panictry!(p.parse_str());
|
let (constraint, _str_style) = panictry!(p.parse_str());
|
||||||
|
|
||||||
if constraint.starts_with("=") {
|
if constraint.as_str().starts_with("=") {
|
||||||
cx.span_err(p.prev_span, "input operand constraint contains '='");
|
cx.span_err(p.prev_span, "input operand constraint contains '='");
|
||||||
} else if constraint.starts_with("+") {
|
} else if constraint.as_str().starts_with("+") {
|
||||||
cx.span_err(p.prev_span, "input operand constraint contains '+'");
|
cx.span_err(p.prev_span, "input operand constraint contains '+'");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -190,7 +191,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt,
|
||||||
|
|
||||||
if OPTIONS.iter().any(|&opt| s == opt) {
|
if OPTIONS.iter().any(|&opt| s == opt) {
|
||||||
cx.span_warn(p.prev_span, "expected a clobber, found an option");
|
cx.span_warn(p.prev_span, "expected a clobber, found an option");
|
||||||
} else if s.starts_with("{") || s.ends_with("}") {
|
} else if s.as_str().starts_with("{") || s.as_str().ends_with("}") {
|
||||||
cx.span_err(p.prev_span, "clobber should not be surrounded by braces");
|
cx.span_err(p.prev_span, "clobber should not be surrounded by braces");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -251,7 +252,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt,
|
||||||
MacEager::expr(P(ast::Expr {
|
MacEager::expr(P(ast::Expr {
|
||||||
id: ast::DUMMY_NODE_ID,
|
id: ast::DUMMY_NODE_ID,
|
||||||
node: ast::ExprKind::InlineAsm(P(ast::InlineAsm {
|
node: ast::ExprKind::InlineAsm(P(ast::InlineAsm {
|
||||||
asm: symbol::intern_and_get_ident(&asm),
|
asm: asm,
|
||||||
asm_str_style: asm_str_style.unwrap(),
|
asm_str_style: asm_str_style.unwrap(),
|
||||||
outputs: outputs,
|
outputs: outputs,
|
||||||
inputs: inputs,
|
inputs: inputs,
|
||||||
|
|
|
@ -11,7 +11,7 @@
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::ext::base;
|
use syntax::ext::base;
|
||||||
use syntax::ext::build::AstBuilder;
|
use syntax::ext::build::AstBuilder;
|
||||||
use syntax::symbol::intern_and_get_ident;
|
use syntax::symbol::Symbol;
|
||||||
use syntax_pos;
|
use syntax_pos;
|
||||||
use syntax::tokenstream;
|
use syntax::tokenstream;
|
||||||
|
|
||||||
|
@ -33,7 +33,7 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt,
|
||||||
ast::LitKind::Str(ref s, _) |
|
ast::LitKind::Str(ref s, _) |
|
||||||
ast::LitKind::Float(ref s, _) |
|
ast::LitKind::Float(ref s, _) |
|
||||||
ast::LitKind::FloatUnsuffixed(ref s) => {
|
ast::LitKind::FloatUnsuffixed(ref s) => {
|
||||||
accumulator.push_str(&s);
|
accumulator.push_str(&s.as_str());
|
||||||
}
|
}
|
||||||
ast::LitKind::Char(c) => {
|
ast::LitKind::Char(c) => {
|
||||||
accumulator.push(c);
|
accumulator.push(c);
|
||||||
|
@ -57,5 +57,5 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
base::MacEager::expr(cx.expr_str(sp, intern_and_get_ident(&accumulator[..])))
|
base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&accumulator)))
|
||||||
}
|
}
|
||||||
|
|
|
@ -68,8 +68,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P<E
|
||||||
|
|
||||||
// We want to make sure we have the expn_id set so that we can use unstable methods
|
// We want to make sure we have the expn_id set so that we can use unstable methods
|
||||||
let span = Span { expn_id: cx.backtrace(), ..span };
|
let span = Span { expn_id: cx.backtrace(), ..span };
|
||||||
let name = cx.expr_lit(span,
|
let name = cx.expr_lit(span, ast::LitKind::Str(ident.name, ast::StrStyle::Cooked));
|
||||||
ast::LitKind::Str(ident.name.as_str(), ast::StrStyle::Cooked));
|
|
||||||
let builder = Ident::from_str("builder");
|
let builder = Ident::from_str("builder");
|
||||||
let builder_expr = cx.expr_ident(span, builder.clone());
|
let builder_expr = cx.expr_ident(span, builder.clone());
|
||||||
|
|
||||||
|
@ -107,7 +106,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P<E
|
||||||
|
|
||||||
for field in fields {
|
for field in fields {
|
||||||
let name = cx.expr_lit(field.span,
|
let name = cx.expr_lit(field.span,
|
||||||
ast::LitKind::Str(field.name.unwrap().name.as_str(),
|
ast::LitKind::Str(field.name.unwrap().name,
|
||||||
ast::StrStyle::Cooked));
|
ast::StrStyle::Cooked));
|
||||||
|
|
||||||
// Use double indirection to make sure this works for unsized types
|
// Use double indirection to make sure this works for unsized types
|
||||||
|
|
|
@ -19,8 +19,7 @@ use syntax::ast::{Expr, MetaItem, Mutability};
|
||||||
use syntax::ext::base::{Annotatable, ExtCtxt};
|
use syntax::ext::base::{Annotatable, ExtCtxt};
|
||||||
use syntax::ext::build::AstBuilder;
|
use syntax::ext::build::AstBuilder;
|
||||||
use syntax::ptr::P;
|
use syntax::ptr::P;
|
||||||
use syntax::symbol::InternedString;
|
use syntax::symbol::Symbol;
|
||||||
use syntax::symbol::intern_and_get_ident;
|
|
||||||
use syntax_pos::Span;
|
use syntax_pos::Span;
|
||||||
|
|
||||||
pub fn expand_deriving_rustc_decodable(cx: &mut ExtCtxt,
|
pub fn expand_deriving_rustc_decodable(cx: &mut ExtCtxt,
|
||||||
|
@ -131,7 +130,7 @@ fn decodable_substructure(cx: &mut ExtCtxt,
|
||||||
cx.expr_method_call(trait_span,
|
cx.expr_method_call(trait_span,
|
||||||
decoder,
|
decoder,
|
||||||
cx.ident_of("read_struct"),
|
cx.ident_of("read_struct"),
|
||||||
vec![cx.expr_str(trait_span, substr.type_ident.name.as_str()),
|
vec![cx.expr_str(trait_span, substr.type_ident.name),
|
||||||
cx.expr_usize(trait_span, nfields),
|
cx.expr_usize(trait_span, nfields),
|
||||||
cx.lambda1(trait_span, result, blkarg)])
|
cx.lambda1(trait_span, result, blkarg)])
|
||||||
}
|
}
|
||||||
|
@ -143,7 +142,7 @@ fn decodable_substructure(cx: &mut ExtCtxt,
|
||||||
let rvariant_arg = cx.ident_of("read_enum_variant_arg");
|
let rvariant_arg = cx.ident_of("read_enum_variant_arg");
|
||||||
|
|
||||||
for (i, &(ident, v_span, ref parts)) in fields.iter().enumerate() {
|
for (i, &(ident, v_span, ref parts)) in fields.iter().enumerate() {
|
||||||
variants.push(cx.expr_str(v_span, ident.name.as_str()));
|
variants.push(cx.expr_str(v_span, ident.name));
|
||||||
|
|
||||||
let path = cx.path(trait_span, vec![substr.type_ident, ident]);
|
let path = cx.path(trait_span, vec![substr.type_ident, ident]);
|
||||||
let decoded = decode_static_fields(cx, v_span, path, parts, |cx, span, _, field| {
|
let decoded = decode_static_fields(cx, v_span, path, parts, |cx, span, _, field| {
|
||||||
|
@ -175,7 +174,7 @@ fn decodable_substructure(cx: &mut ExtCtxt,
|
||||||
cx.expr_method_call(trait_span,
|
cx.expr_method_call(trait_span,
|
||||||
decoder,
|
decoder,
|
||||||
cx.ident_of("read_enum"),
|
cx.ident_of("read_enum"),
|
||||||
vec![cx.expr_str(trait_span, substr.type_ident.name.as_str()),
|
vec![cx.expr_str(trait_span, substr.type_ident.name),
|
||||||
cx.lambda1(trait_span, result, blkarg)])
|
cx.lambda1(trait_span, result, blkarg)])
|
||||||
}
|
}
|
||||||
_ => cx.bug("expected StaticEnum or StaticStruct in derive(Decodable)"),
|
_ => cx.bug("expected StaticEnum or StaticStruct in derive(Decodable)"),
|
||||||
|
@ -191,7 +190,7 @@ fn decode_static_fields<F>(cx: &mut ExtCtxt,
|
||||||
fields: &StaticFields,
|
fields: &StaticFields,
|
||||||
mut getarg: F)
|
mut getarg: F)
|
||||||
-> P<Expr>
|
-> P<Expr>
|
||||||
where F: FnMut(&mut ExtCtxt, Span, InternedString, usize) -> P<Expr>
|
where F: FnMut(&mut ExtCtxt, Span, Symbol, usize) -> P<Expr>
|
||||||
{
|
{
|
||||||
match *fields {
|
match *fields {
|
||||||
Unnamed(ref fields, is_tuple) => {
|
Unnamed(ref fields, is_tuple) => {
|
||||||
|
@ -202,7 +201,7 @@ fn decode_static_fields<F>(cx: &mut ExtCtxt,
|
||||||
let fields = fields.iter()
|
let fields = fields.iter()
|
||||||
.enumerate()
|
.enumerate()
|
||||||
.map(|(i, &span)| {
|
.map(|(i, &span)| {
|
||||||
getarg(cx, span, intern_and_get_ident(&format!("_field{}", i)), i)
|
getarg(cx, span, Symbol::intern(&format!("_field{}", i)), i)
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
|
@ -214,7 +213,7 @@ fn decode_static_fields<F>(cx: &mut ExtCtxt,
|
||||||
let fields = fields.iter()
|
let fields = fields.iter()
|
||||||
.enumerate()
|
.enumerate()
|
||||||
.map(|(i, &(ident, span))| {
|
.map(|(i, &(ident, span))| {
|
||||||
let arg = getarg(cx, span, ident.name.as_str(), i);
|
let arg = getarg(cx, span, ident.name, i);
|
||||||
cx.field_imm(span, ident, arg)
|
cx.field_imm(span, ident, arg)
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
|
@ -96,7 +96,7 @@ use syntax::ast::{Expr, ExprKind, MetaItem, Mutability};
|
||||||
use syntax::ext::base::{Annotatable, ExtCtxt};
|
use syntax::ext::base::{Annotatable, ExtCtxt};
|
||||||
use syntax::ext::build::AstBuilder;
|
use syntax::ext::build::AstBuilder;
|
||||||
use syntax::ptr::P;
|
use syntax::ptr::P;
|
||||||
use syntax::symbol::intern_and_get_ident;
|
use syntax::symbol::Symbol;
|
||||||
use syntax_pos::Span;
|
use syntax_pos::Span;
|
||||||
|
|
||||||
pub fn expand_deriving_rustc_encodable(cx: &mut ExtCtxt,
|
pub fn expand_deriving_rustc_encodable(cx: &mut ExtCtxt,
|
||||||
|
@ -192,8 +192,8 @@ fn encodable_substructure(cx: &mut ExtCtxt,
|
||||||
let mut stmts = Vec::new();
|
let mut stmts = Vec::new();
|
||||||
for (i, &FieldInfo { name, ref self_, span, .. }) in fields.iter().enumerate() {
|
for (i, &FieldInfo { name, ref self_, span, .. }) in fields.iter().enumerate() {
|
||||||
let name = match name {
|
let name = match name {
|
||||||
Some(id) => id.name.as_str(),
|
Some(id) => id.name,
|
||||||
None => intern_and_get_ident(&format!("_field{}", i)),
|
None => Symbol::intern(&format!("_field{}", i)),
|
||||||
};
|
};
|
||||||
let self_ref = cx.expr_addr_of(span, self_.clone());
|
let self_ref = cx.expr_addr_of(span, self_.clone());
|
||||||
let enc = cx.expr_call(span, fn_path.clone(), vec![self_ref, blkencoder.clone()]);
|
let enc = cx.expr_call(span, fn_path.clone(), vec![self_ref, blkencoder.clone()]);
|
||||||
|
@ -226,7 +226,7 @@ fn encodable_substructure(cx: &mut ExtCtxt,
|
||||||
cx.expr_method_call(trait_span,
|
cx.expr_method_call(trait_span,
|
||||||
encoder,
|
encoder,
|
||||||
cx.ident_of("emit_struct"),
|
cx.ident_of("emit_struct"),
|
||||||
vec![cx.expr_str(trait_span, substr.type_ident.name.as_str()),
|
vec![cx.expr_str(trait_span, substr.type_ident.name),
|
||||||
cx.expr_usize(trait_span, fields.len()),
|
cx.expr_usize(trait_span, fields.len()),
|
||||||
blk])
|
blk])
|
||||||
}
|
}
|
||||||
|
@ -265,7 +265,7 @@ fn encodable_substructure(cx: &mut ExtCtxt,
|
||||||
}
|
}
|
||||||
|
|
||||||
let blk = cx.lambda_stmts_1(trait_span, stmts, blkarg);
|
let blk = cx.lambda_stmts_1(trait_span, stmts, blkarg);
|
||||||
let name = cx.expr_str(trait_span, variant.node.name.name.as_str());
|
let name = cx.expr_str(trait_span, variant.node.name.name);
|
||||||
let call = cx.expr_method_call(trait_span,
|
let call = cx.expr_method_call(trait_span,
|
||||||
blkencoder,
|
blkencoder,
|
||||||
cx.ident_of("emit_enum_variant"),
|
cx.ident_of("emit_enum_variant"),
|
||||||
|
@ -277,8 +277,7 @@ fn encodable_substructure(cx: &mut ExtCtxt,
|
||||||
let ret = cx.expr_method_call(trait_span,
|
let ret = cx.expr_method_call(trait_span,
|
||||||
encoder,
|
encoder,
|
||||||
cx.ident_of("emit_enum"),
|
cx.ident_of("emit_enum"),
|
||||||
vec![cx.expr_str(trait_span,
|
vec![cx.expr_str(trait_span ,substr.type_ident.name),
|
||||||
substr.type_ident.name.as_str()),
|
|
||||||
blk]);
|
blk]);
|
||||||
cx.expr_block(cx.block(trait_span, vec![me, cx.stmt_expr(ret)]))
|
cx.expr_block(cx.block(trait_span, vec![me, cx.stmt_expr(ret)]))
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,7 +17,7 @@ use syntax::ast;
|
||||||
use syntax::ext::base::*;
|
use syntax::ext::base::*;
|
||||||
use syntax::ext::base;
|
use syntax::ext::base;
|
||||||
use syntax::ext::build::AstBuilder;
|
use syntax::ext::build::AstBuilder;
|
||||||
use syntax::symbol::intern_and_get_ident;
|
use syntax::symbol::Symbol;
|
||||||
use syntax_pos::Span;
|
use syntax_pos::Span;
|
||||||
use syntax::tokenstream;
|
use syntax::tokenstream;
|
||||||
|
|
||||||
|
@ -32,7 +32,7 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt,
|
||||||
Some(v) => v,
|
Some(v) => v,
|
||||||
};
|
};
|
||||||
|
|
||||||
let e = match env::var(&var[..]) {
|
let e = match env::var(&*var.as_str()) {
|
||||||
Err(..) => {
|
Err(..) => {
|
||||||
cx.expr_path(cx.path_all(sp,
|
cx.expr_path(cx.path_all(sp,
|
||||||
true,
|
true,
|
||||||
|
@ -49,7 +49,7 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt,
|
||||||
Ok(s) => {
|
Ok(s) => {
|
||||||
cx.expr_call_global(sp,
|
cx.expr_call_global(sp,
|
||||||
cx.std_path(&["option", "Option", "Some"]),
|
cx.std_path(&["option", "Option", "Some"]),
|
||||||
vec![cx.expr_str(sp, intern_and_get_ident(&s[..]))])
|
vec![cx.expr_str(sp, Symbol::intern(&s))])
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
MacEager::expr(e)
|
MacEager::expr(e)
|
||||||
|
@ -73,7 +73,7 @@ pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt,
|
||||||
Some((v, _style)) => v,
|
Some((v, _style)) => v,
|
||||||
};
|
};
|
||||||
let msg = match exprs.next() {
|
let msg = match exprs.next() {
|
||||||
None => intern_and_get_ident(&format!("environment variable `{}` not defined", var)),
|
None => Symbol::intern(&format!("environment variable `{}` not defined", var)),
|
||||||
Some(second) => {
|
Some(second) => {
|
||||||
match expr_to_string(cx, second, "expected string literal") {
|
match expr_to_string(cx, second, "expected string literal") {
|
||||||
None => return DummyResult::expr(sp),
|
None => return DummyResult::expr(sp),
|
||||||
|
@ -87,12 +87,12 @@ pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt,
|
||||||
return DummyResult::expr(sp);
|
return DummyResult::expr(sp);
|
||||||
}
|
}
|
||||||
|
|
||||||
let e = match env::var(&var[..]) {
|
let e = match env::var(&*var.as_str()) {
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
cx.span_err(sp, &msg);
|
cx.span_err(sp, &msg.as_str());
|
||||||
cx.expr_usize(sp, 0)
|
cx.expr_usize(sp, 0)
|
||||||
}
|
}
|
||||||
Ok(s) => cx.expr_str(sp, intern_and_get_ident(&s)),
|
Ok(s) => cx.expr_str(sp, Symbol::intern(&s)),
|
||||||
};
|
};
|
||||||
MacEager::expr(e)
|
MacEager::expr(e)
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,7 @@ use syntax::ext::base;
|
||||||
use syntax::ext::build::AstBuilder;
|
use syntax::ext::build::AstBuilder;
|
||||||
use syntax::parse::token;
|
use syntax::parse::token;
|
||||||
use syntax::ptr::P;
|
use syntax::ptr::P;
|
||||||
use syntax::symbol::{self, keywords};
|
use syntax::symbol::{Symbol, keywords};
|
||||||
use syntax_pos::{Span, DUMMY_SP};
|
use syntax_pos::{Span, DUMMY_SP};
|
||||||
use syntax::tokenstream;
|
use syntax::tokenstream;
|
||||||
|
|
||||||
|
@ -370,7 +370,7 @@ impl<'a, 'b> Context<'a, 'b> {
|
||||||
/// Translate the accumulated string literals to a literal expression
|
/// Translate the accumulated string literals to a literal expression
|
||||||
fn trans_literal_string(&mut self) -> P<ast::Expr> {
|
fn trans_literal_string(&mut self) -> P<ast::Expr> {
|
||||||
let sp = self.fmtsp;
|
let sp = self.fmtsp;
|
||||||
let s = symbol::intern_and_get_ident(&self.literal);
|
let s = Symbol::intern(&self.literal);
|
||||||
self.literal.clear();
|
self.literal.clear();
|
||||||
self.ecx.expr_str(sp, s)
|
self.ecx.expr_str(sp, s)
|
||||||
}
|
}
|
||||||
|
@ -728,7 +728,8 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt,
|
||||||
fmtsp: fmt.span,
|
fmtsp: fmt.span,
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut parser = parse::Parser::new(&fmt.node.0);
|
let fmt_str = &*fmt.node.0.as_str();
|
||||||
|
let mut parser = parse::Parser::new(fmt_str);
|
||||||
let mut pieces = vec![];
|
let mut pieces = vec![];
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
|
@ -809,7 +810,6 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt,
|
||||||
// Decide if we want to look for foreign formatting directives.
|
// Decide if we want to look for foreign formatting directives.
|
||||||
if args_used < args_unused {
|
if args_used < args_unused {
|
||||||
use super::format_foreign as foreign;
|
use super::format_foreign as foreign;
|
||||||
let fmt_str = &fmt.node.0[..];
|
|
||||||
|
|
||||||
// The set of foreign substitutions we've explained. This prevents spamming the user
|
// The set of foreign substitutions we've explained. This prevents spamming the user
|
||||||
// with `%d should be written as {}` over and over again.
|
// with `%d should be written as {}` over and over again.
|
||||||
|
|
|
@ -290,10 +290,10 @@ fn mk_registrar(cx: &mut ExtCtxt,
|
||||||
let register_custom_derive = Ident::from_str("register_custom_derive");
|
let register_custom_derive = Ident::from_str("register_custom_derive");
|
||||||
let stmts = custom_derives.iter().map(|cd| {
|
let stmts = custom_derives.iter().map(|cd| {
|
||||||
let path = cx.path_global(cd.span, vec![cd.function_name]);
|
let path = cx.path_global(cd.span, vec![cd.function_name]);
|
||||||
let trait_name = cx.expr_str(cd.span, cd.trait_name.as_str());
|
let trait_name = cx.expr_str(cd.span, cd.trait_name);
|
||||||
let attrs = cx.expr_vec_slice(
|
let attrs = cx.expr_vec_slice(
|
||||||
span,
|
span,
|
||||||
cd.attrs.iter().map(|s| cx.expr_str(cd.span, s.as_str())).collect::<Vec<_>>()
|
cd.attrs.iter().map(|&s| cx.expr_str(cd.span, s)).collect::<Vec<_>>()
|
||||||
);
|
);
|
||||||
(path, trait_name, attrs)
|
(path, trait_name, attrs)
|
||||||
}).map(|(path, trait_name, attrs)| {
|
}).map(|(path, trait_name, attrs)| {
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue