Make fields of Span
private
This commit is contained in:
parent
630e02f25b
commit
3da868dcb6
60 changed files with 316 additions and 349 deletions
|
@ -89,10 +89,7 @@ impl FromStr for TokenStream {
|
||||||
// notify the expansion info that it is unhygienic
|
// notify the expansion info that it is unhygienic
|
||||||
let mark = Mark::fresh(mark);
|
let mark = Mark::fresh(mark);
|
||||||
mark.set_expn_info(expn_info);
|
mark.set_expn_info(expn_info);
|
||||||
let span = syntax_pos::Span {
|
let span = call_site.with_ctxt(SyntaxContext::empty().apply_mark(mark));
|
||||||
ctxt: SyntaxContext::empty().apply_mark(mark),
|
|
||||||
..call_site
|
|
||||||
};
|
|
||||||
let stream = parse::parse_stream_from_source_str(name, src, sess, Some(span));
|
let stream = parse::parse_stream_from_source_str(name, src, sess, Some(span));
|
||||||
Ok(__internal::token_stream_wrap(stream))
|
Ok(__internal::token_stream_wrap(stream))
|
||||||
})
|
})
|
||||||
|
@ -177,10 +174,10 @@ pub struct Span(syntax_pos::Span);
|
||||||
#[unstable(feature = "proc_macro", issue = "38356")]
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
||||||
impl Default for Span {
|
impl Default for Span {
|
||||||
fn default() -> Span {
|
fn default() -> Span {
|
||||||
::__internal::with_sess(|(_, mark)| Span(syntax_pos::Span {
|
::__internal::with_sess(|(_, mark)| {
|
||||||
ctxt: SyntaxContext::empty().apply_mark(mark),
|
let call_site = mark.expn_info().unwrap().call_site;
|
||||||
..mark.expn_info().unwrap().call_site
|
Span(call_site.with_ctxt(SyntaxContext::empty().apply_mark(mark)))
|
||||||
}))
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -570,7 +567,7 @@ impl TokenTree {
|
||||||
}).into();
|
}).into();
|
||||||
},
|
},
|
||||||
TokenNode::Term(symbol) => {
|
TokenNode::Term(symbol) => {
|
||||||
let ident = ast::Ident { name: symbol.0, ctxt: self.span.0.ctxt };
|
let ident = ast::Ident { name: symbol.0, ctxt: self.span.0.ctxt() };
|
||||||
let token =
|
let token =
|
||||||
if symbol.0.as_str().starts_with("'") { Lifetime(ident) } else { Ident(ident) };
|
if symbol.0.as_str().starts_with("'") { Lifetime(ident) } else { Ident(ident) };
|
||||||
return TokenTree::Token(self.span.0, token).into();
|
return TokenTree::Token(self.span.0, token).into();
|
||||||
|
|
|
@ -425,8 +425,7 @@ impl<'a> LoweringContext<'a> {
|
||||||
Symbol::gensym(s)
|
Symbol::gensym(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn allow_internal_unstable(&self, reason: CompilerDesugaringKind, mut span: Span)
|
fn allow_internal_unstable(&self, reason: CompilerDesugaringKind, span: Span) -> Span
|
||||||
-> Span
|
|
||||||
{
|
{
|
||||||
let mark = Mark::fresh(Mark::root());
|
let mark = Mark::fresh(Mark::root());
|
||||||
mark.set_expn_info(codemap::ExpnInfo {
|
mark.set_expn_info(codemap::ExpnInfo {
|
||||||
|
@ -438,8 +437,7 @@ impl<'a> LoweringContext<'a> {
|
||||||
allow_internal_unsafe: false,
|
allow_internal_unsafe: false,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
span.ctxt = SyntaxContext::empty().apply_mark(mark);
|
span.with_ctxt(SyntaxContext::empty().apply_mark(mark))
|
||||||
span
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn with_catch_scope<T, F>(&mut self, catch_id: NodeId, f: F) -> T
|
fn with_catch_scope<T, F>(&mut self, catch_id: NodeId, f: F) -> T
|
||||||
|
@ -613,7 +611,7 @@ impl<'a> LoweringContext<'a> {
|
||||||
TyKind::Slice(ref ty) => hir::TySlice(self.lower_ty(ty)),
|
TyKind::Slice(ref ty) => hir::TySlice(self.lower_ty(ty)),
|
||||||
TyKind::Ptr(ref mt) => hir::TyPtr(self.lower_mt(mt)),
|
TyKind::Ptr(ref mt) => hir::TyPtr(self.lower_mt(mt)),
|
||||||
TyKind::Rptr(ref region, ref mt) => {
|
TyKind::Rptr(ref region, ref mt) => {
|
||||||
let span = Span { hi: t.span.lo, ..t.span };
|
let span = t.span.with_hi(t.span.lo());
|
||||||
let lifetime = match *region {
|
let lifetime = match *region {
|
||||||
Some(ref lt) => self.lower_lifetime(lt),
|
Some(ref lt) => self.lower_lifetime(lt),
|
||||||
None => self.elided_lifetime(span)
|
None => self.elided_lifetime(span)
|
||||||
|
@ -1237,7 +1235,7 @@ impl<'a> LoweringContext<'a> {
|
||||||
name: self.lower_ident(match f.ident {
|
name: self.lower_ident(match f.ident {
|
||||||
Some(ident) => ident,
|
Some(ident) => ident,
|
||||||
// FIXME(jseyfried) positional field hygiene
|
// FIXME(jseyfried) positional field hygiene
|
||||||
None => Ident { name: Symbol::intern(&index.to_string()), ctxt: f.span.ctxt },
|
None => Ident { name: Symbol::intern(&index.to_string()), ctxt: f.span.ctxt() },
|
||||||
}),
|
}),
|
||||||
vis: self.lower_visibility(&f.vis, None),
|
vis: self.lower_visibility(&f.vis, None),
|
||||||
ty: self.lower_ty(&f.ty),
|
ty: self.lower_ty(&f.ty),
|
||||||
|
|
|
@ -262,7 +262,7 @@ impl<'a> State<'a> {
|
||||||
indented: usize,
|
indented: usize,
|
||||||
close_box: bool)
|
close_box: bool)
|
||||||
-> io::Result<()> {
|
-> io::Result<()> {
|
||||||
self.maybe_print_comment(span.hi)?;
|
self.maybe_print_comment(span.hi())?;
|
||||||
self.break_offset_if_not_bol(1, -(indented as isize))?;
|
self.break_offset_if_not_bol(1, -(indented as isize))?;
|
||||||
self.s.word("}")?;
|
self.s.word("}")?;
|
||||||
if close_box {
|
if close_box {
|
||||||
|
@ -324,12 +324,12 @@ impl<'a> State<'a> {
|
||||||
let len = elts.len();
|
let len = elts.len();
|
||||||
let mut i = 0;
|
let mut i = 0;
|
||||||
for elt in elts {
|
for elt in elts {
|
||||||
self.maybe_print_comment(get_span(elt).hi)?;
|
self.maybe_print_comment(get_span(elt).hi())?;
|
||||||
op(self, elt)?;
|
op(self, elt)?;
|
||||||
i += 1;
|
i += 1;
|
||||||
if i < len {
|
if i < len {
|
||||||
self.s.word(",")?;
|
self.s.word(",")?;
|
||||||
self.maybe_print_trailing_comment(get_span(elt), Some(get_span(&elts[i]).hi))?;
|
self.maybe_print_trailing_comment(get_span(elt), Some(get_span(&elts[i]).hi()))?;
|
||||||
self.space_if_not_bol()?;
|
self.space_if_not_bol()?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -368,7 +368,7 @@ impl<'a> State<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_type(&mut self, ty: &hir::Ty) -> io::Result<()> {
|
pub fn print_type(&mut self, ty: &hir::Ty) -> io::Result<()> {
|
||||||
self.maybe_print_comment(ty.span.lo)?;
|
self.maybe_print_comment(ty.span.lo())?;
|
||||||
self.ibox(0)?;
|
self.ibox(0)?;
|
||||||
match ty.node {
|
match ty.node {
|
||||||
hir::TySlice(ref ty) => {
|
hir::TySlice(ref ty) => {
|
||||||
|
@ -458,7 +458,7 @@ impl<'a> State<'a> {
|
||||||
|
|
||||||
pub fn print_foreign_item(&mut self, item: &hir::ForeignItem) -> io::Result<()> {
|
pub fn print_foreign_item(&mut self, item: &hir::ForeignItem) -> io::Result<()> {
|
||||||
self.hardbreak_if_not_bol()?;
|
self.hardbreak_if_not_bol()?;
|
||||||
self.maybe_print_comment(item.span.lo)?;
|
self.maybe_print_comment(item.span.lo())?;
|
||||||
self.print_outer_attributes(&item.attrs)?;
|
self.print_outer_attributes(&item.attrs)?;
|
||||||
match item.node {
|
match item.node {
|
||||||
hir::ForeignItemFn(ref decl, ref arg_names, ref generics) => {
|
hir::ForeignItemFn(ref decl, ref arg_names, ref generics) => {
|
||||||
|
@ -531,7 +531,7 @@ impl<'a> State<'a> {
|
||||||
/// Pretty-print an item
|
/// Pretty-print an item
|
||||||
pub fn print_item(&mut self, item: &hir::Item) -> io::Result<()> {
|
pub fn print_item(&mut self, item: &hir::Item) -> io::Result<()> {
|
||||||
self.hardbreak_if_not_bol()?;
|
self.hardbreak_if_not_bol()?;
|
||||||
self.maybe_print_comment(item.span.lo)?;
|
self.maybe_print_comment(item.span.lo())?;
|
||||||
self.print_outer_attributes(&item.attrs)?;
|
self.print_outer_attributes(&item.attrs)?;
|
||||||
self.ann.pre(self, NodeItem(item))?;
|
self.ann.pre(self, NodeItem(item))?;
|
||||||
match item.node {
|
match item.node {
|
||||||
|
@ -797,7 +797,7 @@ impl<'a> State<'a> {
|
||||||
self.bopen()?;
|
self.bopen()?;
|
||||||
for v in variants {
|
for v in variants {
|
||||||
self.space_if_not_bol()?;
|
self.space_if_not_bol()?;
|
||||||
self.maybe_print_comment(v.span.lo)?;
|
self.maybe_print_comment(v.span.lo())?;
|
||||||
self.print_outer_attributes(&v.node.attrs)?;
|
self.print_outer_attributes(&v.node.attrs)?;
|
||||||
self.ibox(indent_unit)?;
|
self.ibox(indent_unit)?;
|
||||||
self.print_variant(v)?;
|
self.print_variant(v)?;
|
||||||
|
@ -842,7 +842,7 @@ impl<'a> State<'a> {
|
||||||
if struct_def.is_tuple() {
|
if struct_def.is_tuple() {
|
||||||
self.popen()?;
|
self.popen()?;
|
||||||
self.commasep(Inconsistent, struct_def.fields(), |s, field| {
|
self.commasep(Inconsistent, struct_def.fields(), |s, field| {
|
||||||
s.maybe_print_comment(field.span.lo)?;
|
s.maybe_print_comment(field.span.lo())?;
|
||||||
s.print_outer_attributes(&field.attrs)?;
|
s.print_outer_attributes(&field.attrs)?;
|
||||||
s.print_visibility(&field.vis)?;
|
s.print_visibility(&field.vis)?;
|
||||||
s.print_type(&field.ty)
|
s.print_type(&field.ty)
|
||||||
|
@ -863,7 +863,7 @@ impl<'a> State<'a> {
|
||||||
|
|
||||||
for field in struct_def.fields() {
|
for field in struct_def.fields() {
|
||||||
self.hardbreak_if_not_bol()?;
|
self.hardbreak_if_not_bol()?;
|
||||||
self.maybe_print_comment(field.span.lo)?;
|
self.maybe_print_comment(field.span.lo())?;
|
||||||
self.print_outer_attributes(&field.attrs)?;
|
self.print_outer_attributes(&field.attrs)?;
|
||||||
self.print_visibility(&field.vis)?;
|
self.print_visibility(&field.vis)?;
|
||||||
self.print_name(field.name)?;
|
self.print_name(field.name)?;
|
||||||
|
@ -908,7 +908,7 @@ impl<'a> State<'a> {
|
||||||
pub fn print_trait_item(&mut self, ti: &hir::TraitItem) -> io::Result<()> {
|
pub fn print_trait_item(&mut self, ti: &hir::TraitItem) -> io::Result<()> {
|
||||||
self.ann.pre(self, NodeSubItem(ti.id))?;
|
self.ann.pre(self, NodeSubItem(ti.id))?;
|
||||||
self.hardbreak_if_not_bol()?;
|
self.hardbreak_if_not_bol()?;
|
||||||
self.maybe_print_comment(ti.span.lo)?;
|
self.maybe_print_comment(ti.span.lo())?;
|
||||||
self.print_outer_attributes(&ti.attrs)?;
|
self.print_outer_attributes(&ti.attrs)?;
|
||||||
match ti.node {
|
match ti.node {
|
||||||
hir::TraitItemKind::Const(ref ty, default) => {
|
hir::TraitItemKind::Const(ref ty, default) => {
|
||||||
|
@ -938,7 +938,7 @@ impl<'a> State<'a> {
|
||||||
pub fn print_impl_item(&mut self, ii: &hir::ImplItem) -> io::Result<()> {
|
pub fn print_impl_item(&mut self, ii: &hir::ImplItem) -> io::Result<()> {
|
||||||
self.ann.pre(self, NodeSubItem(ii.id))?;
|
self.ann.pre(self, NodeSubItem(ii.id))?;
|
||||||
self.hardbreak_if_not_bol()?;
|
self.hardbreak_if_not_bol()?;
|
||||||
self.maybe_print_comment(ii.span.lo)?;
|
self.maybe_print_comment(ii.span.lo())?;
|
||||||
self.print_outer_attributes(&ii.attrs)?;
|
self.print_outer_attributes(&ii.attrs)?;
|
||||||
self.print_defaultness(ii.defaultness)?;
|
self.print_defaultness(ii.defaultness)?;
|
||||||
|
|
||||||
|
@ -962,7 +962,7 @@ impl<'a> State<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_stmt(&mut self, st: &hir::Stmt) -> io::Result<()> {
|
pub fn print_stmt(&mut self, st: &hir::Stmt) -> io::Result<()> {
|
||||||
self.maybe_print_comment(st.span.lo)?;
|
self.maybe_print_comment(st.span.lo())?;
|
||||||
match st.node {
|
match st.node {
|
||||||
hir::StmtDecl(ref decl, _) => {
|
hir::StmtDecl(ref decl, _) => {
|
||||||
self.print_decl(&decl)?;
|
self.print_decl(&decl)?;
|
||||||
|
@ -1017,7 +1017,7 @@ impl<'a> State<'a> {
|
||||||
hir::PopUnsafeBlock(..) => self.word_space("pop_unsafe")?,
|
hir::PopUnsafeBlock(..) => self.word_space("pop_unsafe")?,
|
||||||
hir::DefaultBlock => (),
|
hir::DefaultBlock => (),
|
||||||
}
|
}
|
||||||
self.maybe_print_comment(blk.span.lo)?;
|
self.maybe_print_comment(blk.span.lo())?;
|
||||||
self.ann.pre(self, NodeBlock(blk))?;
|
self.ann.pre(self, NodeBlock(blk))?;
|
||||||
self.bopen()?;
|
self.bopen()?;
|
||||||
|
|
||||||
|
@ -1030,7 +1030,7 @@ impl<'a> State<'a> {
|
||||||
Some(ref expr) => {
|
Some(ref expr) => {
|
||||||
self.space_if_not_bol()?;
|
self.space_if_not_bol()?;
|
||||||
self.print_expr(&expr)?;
|
self.print_expr(&expr)?;
|
||||||
self.maybe_print_trailing_comment(expr.span, Some(blk.span.hi))?;
|
self.maybe_print_trailing_comment(expr.span, Some(blk.span.hi()))?;
|
||||||
}
|
}
|
||||||
_ => (),
|
_ => (),
|
||||||
}
|
}
|
||||||
|
@ -1228,7 +1228,7 @@ impl<'a> State<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_expr(&mut self, expr: &hir::Expr) -> io::Result<()> {
|
pub fn print_expr(&mut self, expr: &hir::Expr) -> io::Result<()> {
|
||||||
self.maybe_print_comment(expr.span.lo)?;
|
self.maybe_print_comment(expr.span.lo())?;
|
||||||
self.print_outer_attributes(&expr.attrs)?;
|
self.print_outer_attributes(&expr.attrs)?;
|
||||||
self.ibox(indent_unit)?;
|
self.ibox(indent_unit)?;
|
||||||
self.ann.pre(self, NodeExpr(expr))?;
|
self.ann.pre(self, NodeExpr(expr))?;
|
||||||
|
@ -1480,7 +1480,7 @@ impl<'a> State<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_decl(&mut self, decl: &hir::Decl) -> io::Result<()> {
|
pub fn print_decl(&mut self, decl: &hir::Decl) -> io::Result<()> {
|
||||||
self.maybe_print_comment(decl.span.lo)?;
|
self.maybe_print_comment(decl.span.lo())?;
|
||||||
match decl.node {
|
match decl.node {
|
||||||
hir::DeclLocal(ref loc) => {
|
hir::DeclLocal(ref loc) => {
|
||||||
self.space_if_not_bol()?;
|
self.space_if_not_bol()?;
|
||||||
|
@ -1523,7 +1523,7 @@ impl<'a> State<'a> {
|
||||||
path: &hir::Path,
|
path: &hir::Path,
|
||||||
colons_before_params: bool)
|
colons_before_params: bool)
|
||||||
-> io::Result<()> {
|
-> io::Result<()> {
|
||||||
self.maybe_print_comment(path.span.lo)?;
|
self.maybe_print_comment(path.span.lo())?;
|
||||||
|
|
||||||
for (i, segment) in path.segments.iter().enumerate() {
|
for (i, segment) in path.segments.iter().enumerate() {
|
||||||
if i > 0 {
|
if i > 0 {
|
||||||
|
@ -1641,7 +1641,7 @@ impl<'a> State<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_pat(&mut self, pat: &hir::Pat) -> io::Result<()> {
|
pub fn print_pat(&mut self, pat: &hir::Pat) -> io::Result<()> {
|
||||||
self.maybe_print_comment(pat.span.lo)?;
|
self.maybe_print_comment(pat.span.lo())?;
|
||||||
self.ann.pre(self, NodePat(pat))?;
|
self.ann.pre(self, NodePat(pat))?;
|
||||||
// Pat isn't normalized, but the beauty of it
|
// Pat isn't normalized, but the beauty of it
|
||||||
// is that it doesn't matter
|
// is that it doesn't matter
|
||||||
|
@ -1897,7 +1897,7 @@ impl<'a> State<'a> {
|
||||||
match decl.output {
|
match decl.output {
|
||||||
hir::Return(ref ty) => {
|
hir::Return(ref ty) => {
|
||||||
self.print_type(&ty)?;
|
self.print_type(&ty)?;
|
||||||
self.maybe_print_comment(ty.span.lo)
|
self.maybe_print_comment(ty.span.lo())
|
||||||
}
|
}
|
||||||
hir::DefaultReturn(..) => unreachable!(),
|
hir::DefaultReturn(..) => unreachable!(),
|
||||||
}
|
}
|
||||||
|
@ -2074,7 +2074,7 @@ impl<'a> State<'a> {
|
||||||
self.end()?;
|
self.end()?;
|
||||||
|
|
||||||
match decl.output {
|
match decl.output {
|
||||||
hir::Return(ref output) => self.maybe_print_comment(output.span.lo),
|
hir::Return(ref output) => self.maybe_print_comment(output.span.lo()),
|
||||||
_ => Ok(()),
|
_ => Ok(()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2124,13 +2124,13 @@ impl<'a> State<'a> {
|
||||||
if (*cmnt).style != comments::Trailing {
|
if (*cmnt).style != comments::Trailing {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
let span_line = cm.lookup_char_pos(span.hi);
|
let span_line = cm.lookup_char_pos(span.hi());
|
||||||
let comment_line = cm.lookup_char_pos((*cmnt).pos);
|
let comment_line = cm.lookup_char_pos((*cmnt).pos);
|
||||||
let mut next = (*cmnt).pos + BytePos(1);
|
let mut next = (*cmnt).pos + BytePos(1);
|
||||||
if let Some(p) = next_pos {
|
if let Some(p) = next_pos {
|
||||||
next = p;
|
next = p;
|
||||||
}
|
}
|
||||||
if span.hi < (*cmnt).pos && (*cmnt).pos < next &&
|
if span.hi() < (*cmnt).pos && (*cmnt).pos < next &&
|
||||||
span_line.line == comment_line.line {
|
span_line.line == comment_line.line {
|
||||||
self.print_comment(cmnt)?;
|
self.print_comment(cmnt)?;
|
||||||
}
|
}
|
||||||
|
|
|
@ -253,17 +253,17 @@ impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for Span {
|
||||||
// If this is not an empty or invalid span, we want to hash the last
|
// If this is not an empty or invalid span, we want to hash the last
|
||||||
// position that belongs to it, as opposed to hashing the first
|
// position that belongs to it, as opposed to hashing the first
|
||||||
// position past it.
|
// position past it.
|
||||||
let span_hi = if self.hi > self.lo {
|
let span_hi = if self.hi() > self.lo() {
|
||||||
// We might end up in the middle of a multibyte character here,
|
// We might end up in the middle of a multibyte character here,
|
||||||
// but that's OK, since we are not trying to decode anything at
|
// but that's OK, since we are not trying to decode anything at
|
||||||
// this position.
|
// this position.
|
||||||
self.hi - ::syntax_pos::BytePos(1)
|
self.hi() - ::syntax_pos::BytePos(1)
|
||||||
} else {
|
} else {
|
||||||
self.hi
|
self.hi()
|
||||||
};
|
};
|
||||||
|
|
||||||
{
|
{
|
||||||
let loc1 = hcx.codemap().byte_pos_to_line_and_col(self.lo);
|
let loc1 = hcx.codemap().byte_pos_to_line_and_col(self.lo());
|
||||||
let loc1 = loc1.as_ref()
|
let loc1 = loc1.as_ref()
|
||||||
.map(|&(ref fm, line, col)| (&fm.name[..], line, col.to_usize()))
|
.map(|&(ref fm, line, col)| (&fm.name[..], line, col.to_usize()))
|
||||||
.unwrap_or(("???", 0, 0));
|
.unwrap_or(("???", 0, 0));
|
||||||
|
@ -296,7 +296,7 @@ impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for Span {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if self.ctxt == SyntaxContext::empty() {
|
if self.ctxt() == SyntaxContext::empty() {
|
||||||
0u8.hash_stable(hcx, hasher);
|
0u8.hash_stable(hcx, hasher);
|
||||||
} else {
|
} else {
|
||||||
1u8.hash_stable(hcx, hasher);
|
1u8.hash_stable(hcx, hasher);
|
||||||
|
|
|
@ -118,7 +118,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||||
fn explain_span<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
|
fn explain_span<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
|
||||||
heading: &str, span: Span)
|
heading: &str, span: Span)
|
||||||
-> (String, Option<Span>) {
|
-> (String, Option<Span>) {
|
||||||
let lo = tcx.sess.codemap().lookup_char_pos_adj(span.lo);
|
let lo = tcx.sess.codemap().lookup_char_pos_adj(span.lo());
|
||||||
(format!("the {} at {}:{}", heading, lo.line, lo.col.to_usize() + 1),
|
(format!("the {} at {}:{}", heading, lo.line, lo.col.to_usize() + 1),
|
||||||
Some(span))
|
Some(span))
|
||||||
}
|
}
|
||||||
|
|
|
@ -180,7 +180,7 @@ impl CodeExtent {
|
||||||
// (This is the special case aluded to in the
|
// (This is the special case aluded to in the
|
||||||
// doc-comment for this method)
|
// doc-comment for this method)
|
||||||
let stmt_span = blk.stmts[r.first_statement_index as usize].span;
|
let stmt_span = blk.stmts[r.first_statement_index as usize].span;
|
||||||
Some(Span { lo: stmt_span.hi, hi: blk.span.hi, ctxt: stmt_span.ctxt })
|
Some(Span::new(stmt_span.hi(), blk.span.hi(), stmt_span.ctxt()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -82,10 +82,7 @@ impl<'a> Folder for ExpandAllocatorDirectives<'a> {
|
||||||
allow_internal_unsafe: false,
|
allow_internal_unsafe: false,
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
let span = Span {
|
let span = item.span.with_ctxt(SyntaxContext::empty().apply_mark(mark));
|
||||||
ctxt: SyntaxContext::empty().apply_mark(mark),
|
|
||||||
..item.span
|
|
||||||
};
|
|
||||||
let ecfg = ExpansionConfig::default(name.to_string());
|
let ecfg = ExpansionConfig::default(name.to_string());
|
||||||
let mut f = AllocFnFactory {
|
let mut f = AllocFnFactory {
|
||||||
span,
|
span,
|
||||||
|
|
|
@ -183,8 +183,8 @@ impl EmitterWriter {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
let lo = cm.lookup_char_pos(span_label.span.lo);
|
let lo = cm.lookup_char_pos(span_label.span.lo());
|
||||||
let mut hi = cm.lookup_char_pos(span_label.span.hi);
|
let mut hi = cm.lookup_char_pos(span_label.span.hi());
|
||||||
|
|
||||||
// Watch out for "empty spans". If we get a span like 6..6, we
|
// Watch out for "empty spans". If we get a span like 6..6, we
|
||||||
// want to just display a `^` at 6, so convert that to
|
// want to just display a `^` at 6, so convert that to
|
||||||
|
@ -683,7 +683,7 @@ impl EmitterWriter {
|
||||||
if let Some(ref cm) = self.cm {
|
if let Some(ref cm) = self.cm {
|
||||||
for primary_span in msp.primary_spans() {
|
for primary_span in msp.primary_spans() {
|
||||||
if primary_span != &DUMMY_SP {
|
if primary_span != &DUMMY_SP {
|
||||||
let hi = cm.lookup_char_pos(primary_span.hi);
|
let hi = cm.lookup_char_pos(primary_span.hi());
|
||||||
if hi.line > max {
|
if hi.line > max {
|
||||||
max = hi.line;
|
max = hi.line;
|
||||||
}
|
}
|
||||||
|
@ -691,7 +691,7 @@ impl EmitterWriter {
|
||||||
}
|
}
|
||||||
for span_label in msp.span_labels() {
|
for span_label in msp.span_labels() {
|
||||||
if span_label.span != DUMMY_SP {
|
if span_label.span != DUMMY_SP {
|
||||||
let hi = cm.lookup_char_pos(span_label.span.hi);
|
let hi = cm.lookup_char_pos(span_label.span.hi());
|
||||||
if hi.line > max {
|
if hi.line > max {
|
||||||
max = hi.line;
|
max = hi.line;
|
||||||
}
|
}
|
||||||
|
@ -914,7 +914,7 @@ impl EmitterWriter {
|
||||||
let (primary_lo, cm) = if let (Some(cm), Some(ref primary_span)) =
|
let (primary_lo, cm) = if let (Some(cm), Some(ref primary_span)) =
|
||||||
(self.cm.as_ref(), msp.primary_span().as_ref()) {
|
(self.cm.as_ref(), msp.primary_span().as_ref()) {
|
||||||
if primary_span != &&DUMMY_SP {
|
if primary_span != &&DUMMY_SP {
|
||||||
(cm.lookup_char_pos(primary_span.lo), cm)
|
(cm.lookup_char_pos(primary_span.lo()), cm)
|
||||||
} else {
|
} else {
|
||||||
emit_to_destination(&buffer.render(), level, &mut self.dst)?;
|
emit_to_destination(&buffer.render(), level, &mut self.dst)?;
|
||||||
return Ok(());
|
return Ok(());
|
||||||
|
@ -1091,7 +1091,7 @@ impl EmitterWriter {
|
||||||
Some(Style::HeaderMsg));
|
Some(Style::HeaderMsg));
|
||||||
|
|
||||||
let suggestions = suggestion.splice_lines(cm.borrow());
|
let suggestions = suggestion.splice_lines(cm.borrow());
|
||||||
let span_start_pos = cm.lookup_char_pos(primary_sub.span.lo);
|
let span_start_pos = cm.lookup_char_pos(primary_sub.span.lo());
|
||||||
let line_start = span_start_pos.line;
|
let line_start = span_start_pos.line;
|
||||||
draw_col_separator_no_space(&mut buffer, 1, max_line_num_len + 1);
|
draw_col_separator_no_space(&mut buffer, 1, max_line_num_len + 1);
|
||||||
let mut row_num = 2;
|
let mut row_num = 2;
|
||||||
|
|
|
@ -148,16 +148,12 @@ impl CodeSuggestion {
|
||||||
|
|
||||||
// Assumption: all spans are in the same file, and all spans
|
// Assumption: all spans are in the same file, and all spans
|
||||||
// are disjoint. Sort in ascending order.
|
// are disjoint. Sort in ascending order.
|
||||||
primary_spans.sort_by_key(|sp| sp.0.lo);
|
primary_spans.sort_by_key(|sp| sp.0.lo());
|
||||||
|
|
||||||
// Find the bounding span.
|
// Find the bounding span.
|
||||||
let lo = primary_spans.iter().map(|sp| sp.0.lo).min().unwrap();
|
let lo = primary_spans.iter().map(|sp| sp.0.lo()).min().unwrap();
|
||||||
let hi = primary_spans.iter().map(|sp| sp.0.hi).min().unwrap();
|
let hi = primary_spans.iter().map(|sp| sp.0.hi()).min().unwrap();
|
||||||
let bounding_span = Span {
|
let bounding_span = Span::new(lo, hi, NO_EXPANSION);
|
||||||
lo,
|
|
||||||
hi,
|
|
||||||
ctxt: NO_EXPANSION,
|
|
||||||
};
|
|
||||||
let lines = cm.span_to_lines(bounding_span).unwrap();
|
let lines = cm.span_to_lines(bounding_span).unwrap();
|
||||||
assert!(!lines.lines.is_empty());
|
assert!(!lines.lines.is_empty());
|
||||||
|
|
||||||
|
@ -171,14 +167,14 @@ impl CodeSuggestion {
|
||||||
//
|
//
|
||||||
// Finally push the trailing line segment of the last span
|
// Finally push the trailing line segment of the last span
|
||||||
let fm = &lines.file;
|
let fm = &lines.file;
|
||||||
let mut prev_hi = cm.lookup_char_pos(bounding_span.lo);
|
let mut prev_hi = cm.lookup_char_pos(bounding_span.lo());
|
||||||
prev_hi.col = CharPos::from_usize(0);
|
prev_hi.col = CharPos::from_usize(0);
|
||||||
|
|
||||||
let mut prev_line = fm.get_line(lines.lines[0].line_index);
|
let mut prev_line = fm.get_line(lines.lines[0].line_index);
|
||||||
let mut bufs = vec![(String::new(), false); self.substitutions()];
|
let mut bufs = vec![(String::new(), false); self.substitutions()];
|
||||||
|
|
||||||
for (sp, substitutes) in primary_spans {
|
for (sp, substitutes) in primary_spans {
|
||||||
let cur_lo = cm.lookup_char_pos(sp.lo);
|
let cur_lo = cm.lookup_char_pos(sp.lo());
|
||||||
for (&mut (ref mut buf, ref mut underline), substitute) in bufs.iter_mut()
|
for (&mut (ref mut buf, ref mut underline), substitute) in bufs.iter_mut()
|
||||||
.zip(substitutes) {
|
.zip(substitutes) {
|
||||||
if prev_hi.line == cur_lo.line {
|
if prev_hi.line == cur_lo.line {
|
||||||
|
@ -208,7 +204,7 @@ impl CodeSuggestion {
|
||||||
}
|
}
|
||||||
buf.push_str(substitute);
|
buf.push_str(substitute);
|
||||||
}
|
}
|
||||||
prev_hi = cm.lookup_char_pos(sp.hi);
|
prev_hi = cm.lookup_char_pos(sp.hi());
|
||||||
prev_line = fm.get_line(prev_hi.line - 1);
|
prev_line = fm.get_line(prev_hi.line - 1);
|
||||||
}
|
}
|
||||||
for &mut (ref mut buf, _) in &mut bufs {
|
for &mut (ref mut buf, _) in &mut bufs {
|
||||||
|
|
|
@ -369,7 +369,7 @@ impl CrateStore for cstore::CStore {
|
||||||
let source_name = format!("<{} macros>", name);
|
let source_name = format!("<{} macros>", name);
|
||||||
|
|
||||||
let filemap = sess.parse_sess.codemap().new_filemap(source_name, def.body);
|
let filemap = sess.parse_sess.codemap().new_filemap(source_name, def.body);
|
||||||
let local_span = Span { lo: filemap.start_pos, hi: filemap.end_pos, ctxt: NO_EXPANSION };
|
let local_span = Span::new(filemap.start_pos, filemap.end_pos, NO_EXPANSION);
|
||||||
let body = filemap_to_stream(&sess.parse_sess, filemap, None);
|
let body = filemap_to_stream(&sess.parse_sess, filemap, None);
|
||||||
|
|
||||||
// Mark the attrs as used
|
// Mark the attrs as used
|
||||||
|
|
|
@ -242,7 +242,7 @@ impl<'a, 'tcx> SpecializedDecoder<Span> for DecodeContext<'a, 'tcx> {
|
||||||
let sess = if let Some(sess) = self.sess {
|
let sess = if let Some(sess) = self.sess {
|
||||||
sess
|
sess
|
||||||
} else {
|
} else {
|
||||||
return Ok(Span { lo: lo, hi: hi, ctxt: NO_EXPANSION });
|
return Ok(Span::new(lo, hi, NO_EXPANSION));
|
||||||
};
|
};
|
||||||
|
|
||||||
let (lo, hi) = if lo > hi {
|
let (lo, hi) = if lo > hi {
|
||||||
|
@ -289,7 +289,7 @@ impl<'a, 'tcx> SpecializedDecoder<Span> for DecodeContext<'a, 'tcx> {
|
||||||
let lo = (lo - filemap.original_start_pos) + filemap.translated_filemap.start_pos;
|
let lo = (lo - filemap.original_start_pos) + filemap.translated_filemap.start_pos;
|
||||||
let hi = (hi - filemap.original_start_pos) + filemap.translated_filemap.start_pos;
|
let hi = (hi - filemap.original_start_pos) + filemap.translated_filemap.start_pos;
|
||||||
|
|
||||||
Ok(Span { lo: lo, hi: hi, ctxt: NO_EXPANSION })
|
Ok(Span::new(lo, hi, NO_EXPANSION))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -364,7 +364,7 @@ fn construct_fn<'a, 'gcx, 'tcx, A>(hir: Cx<'a, 'gcx, 'tcx>,
|
||||||
builder.args_and_body(block, &arguments, arg_extent, &body.value)
|
builder.args_and_body(block, &arguments, arg_extent, &body.value)
|
||||||
}));
|
}));
|
||||||
// Attribute epilogue to function's closing brace
|
// Attribute epilogue to function's closing brace
|
||||||
let fn_end = Span { lo: span.hi, ..span };
|
let fn_end = span.with_lo(span.hi());
|
||||||
let source_info = builder.source_info(fn_end);
|
let source_info = builder.source_info(fn_end);
|
||||||
let return_block = builder.return_block();
|
let return_block = builder.return_block();
|
||||||
builder.cfg.terminate(block, source_info,
|
builder.cfg.terminate(block, source_info,
|
||||||
|
|
|
@ -636,7 +636,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
|
||||||
let tcx = self.hir.tcx();
|
let tcx = self.hir.tcx();
|
||||||
let extent_span = extent.span(&tcx.hir).unwrap();
|
let extent_span = extent.span(&tcx.hir).unwrap();
|
||||||
// Attribute scope exit drops to scope's closing brace
|
// Attribute scope exit drops to scope's closing brace
|
||||||
let scope_end = Span { lo: extent_span.hi, .. extent_span};
|
let scope_end = extent_span.with_lo(extent_span.hi());
|
||||||
scope.drops.push(DropData {
|
scope.drops.push(DropData {
|
||||||
span: scope_end,
|
span: scope_end,
|
||||||
location: lvalue.clone(),
|
location: lvalue.clone(),
|
||||||
|
|
|
@ -477,7 +477,7 @@ struct NamePrivacyVisitor<'a, 'tcx: 'a> {
|
||||||
impl<'a, 'tcx> NamePrivacyVisitor<'a, 'tcx> {
|
impl<'a, 'tcx> NamePrivacyVisitor<'a, 'tcx> {
|
||||||
// Checks that a field is accessible.
|
// Checks that a field is accessible.
|
||||||
fn check_field(&mut self, span: Span, def: &'tcx ty::AdtDef, field: &'tcx ty::FieldDef) {
|
fn check_field(&mut self, span: Span, def: &'tcx ty::AdtDef, field: &'tcx ty::FieldDef) {
|
||||||
let ident = Ident { ctxt: span.ctxt.modern(), ..keywords::Invalid.ident() };
|
let ident = Ident { ctxt: span.ctxt().modern(), ..keywords::Invalid.ident() };
|
||||||
let def_id = self.tcx.adjust_ident(ident, def.did, self.current_item).1;
|
let def_id = self.tcx.adjust_ident(ident, def.did, self.current_item).1;
|
||||||
if !def.is_enum() && !field.vis.is_accessible_from(def_id, self.tcx) {
|
if !def.is_enum() && !field.vis.is_accessible_from(def_id, self.tcx) {
|
||||||
struct_span_err!(self.tcx.sess, span, E0451, "field `{}` of {} `{}` is private",
|
struct_span_err!(self.tcx.sess, span, E0451, "field `{}` of {} `{}` is private",
|
||||||
|
|
|
@ -606,9 +606,7 @@ impl<'tcx> Visitor<'tcx> for UsePlacementFinder {
|
||||||
// don't suggest placing a use before the prelude
|
// don't suggest placing a use before the prelude
|
||||||
// import or other generated ones
|
// import or other generated ones
|
||||||
if item.span == DUMMY_SP {
|
if item.span == DUMMY_SP {
|
||||||
let mut span = item.span;
|
self.span = Some(item.span.with_hi(item.span.lo()));
|
||||||
span.hi = span.lo;
|
|
||||||
self.span = Some(span);
|
|
||||||
self.found_use = true;
|
self.found_use = true;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -617,9 +615,7 @@ impl<'tcx> Visitor<'tcx> for UsePlacementFinder {
|
||||||
ItemKind::ExternCrate(_) => {}
|
ItemKind::ExternCrate(_) => {}
|
||||||
// but place them before the first other item
|
// but place them before the first other item
|
||||||
_ => if self.span.map_or(true, |span| item.span < span ) {
|
_ => if self.span.map_or(true, |span| item.span < span ) {
|
||||||
let mut span = item.span;
|
self.span = Some(item.span.with_hi(item.span.lo()));
|
||||||
span.hi = span.lo;
|
|
||||||
self.span = Some(span);
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1732,7 +1728,7 @@ impl<'a> Resolver<'a> {
|
||||||
|
|
||||||
fn resolve_self(&mut self, ctxt: &mut SyntaxContext, module: Module<'a>) -> Module<'a> {
|
fn resolve_self(&mut self, ctxt: &mut SyntaxContext, module: Module<'a>) -> Module<'a> {
|
||||||
let mut module = self.get_module(module.normal_ancestor_id);
|
let mut module = self.get_module(module.normal_ancestor_id);
|
||||||
while module.span.ctxt.modern() != *ctxt {
|
while module.span.ctxt().modern() != *ctxt {
|
||||||
let parent = module.parent.unwrap_or_else(|| self.macro_def_scope(ctxt.remove_mark()));
|
let parent = module.parent.unwrap_or_else(|| self.macro_def_scope(ctxt.remove_mark()));
|
||||||
module = self.get_module(parent.normal_ancestor_id);
|
module = self.get_module(parent.normal_ancestor_id);
|
||||||
}
|
}
|
||||||
|
@ -2659,8 +2655,8 @@ impl<'a> Resolver<'a> {
|
||||||
sp = sp.next_point();
|
sp = sp.next_point();
|
||||||
if let Ok(snippet) = cm.span_to_snippet(sp.to(sp.next_point())) {
|
if let Ok(snippet) = cm.span_to_snippet(sp.to(sp.next_point())) {
|
||||||
debug!("snippet {:?}", snippet);
|
debug!("snippet {:?}", snippet);
|
||||||
let line_sp = cm.lookup_char_pos(sp.hi).line;
|
let line_sp = cm.lookup_char_pos(sp.hi()).line;
|
||||||
let line_base_sp = cm.lookup_char_pos(base_span.lo).line;
|
let line_base_sp = cm.lookup_char_pos(base_span.lo()).line;
|
||||||
debug!("{:?} {:?}", line_sp, line_base_sp);
|
debug!("{:?} {:?}", line_sp, line_base_sp);
|
||||||
if snippet == ":" {
|
if snippet == ":" {
|
||||||
err.span_label(base_span,
|
err.span_label(base_span,
|
||||||
|
@ -3360,7 +3356,7 @@ impl<'a> Resolver<'a> {
|
||||||
for &(trait_name, binding) in traits.as_ref().unwrap().iter() {
|
for &(trait_name, binding) in traits.as_ref().unwrap().iter() {
|
||||||
let module = binding.module().unwrap();
|
let module = binding.module().unwrap();
|
||||||
let mut ident = ident;
|
let mut ident = ident;
|
||||||
if ident.ctxt.glob_adjust(module.expansion, binding.span.ctxt.modern()).is_none() {
|
if ident.ctxt.glob_adjust(module.expansion, binding.span.ctxt().modern()).is_none() {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if self.resolve_ident_in_module_unadjusted(module, ident, ns, false, false, module.span)
|
if self.resolve_ident_in_module_unadjusted(module, ident, ns, false, false, module.span)
|
||||||
|
@ -3586,7 +3582,7 @@ impl<'a> Resolver<'a> {
|
||||||
new_binding: &NameBinding,
|
new_binding: &NameBinding,
|
||||||
old_binding: &NameBinding) {
|
old_binding: &NameBinding) {
|
||||||
// Error on the second of two conflicting names
|
// Error on the second of two conflicting names
|
||||||
if old_binding.span.lo > new_binding.span.lo {
|
if old_binding.span.lo() > new_binding.span.lo() {
|
||||||
return self.report_conflict(parent, ident, ns, old_binding, new_binding);
|
return self.report_conflict(parent, ident, ns, old_binding, new_binding);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -237,7 +237,7 @@ impl<'a> Resolver<'a> {
|
||||||
}
|
}
|
||||||
let module = unwrap_or!(directive.imported_module.get(), return Err(Undetermined));
|
let module = unwrap_or!(directive.imported_module.get(), return Err(Undetermined));
|
||||||
let (orig_current_module, mut ident) = (self.current_module, ident.modern());
|
let (orig_current_module, mut ident) = (self.current_module, ident.modern());
|
||||||
match ident.ctxt.glob_adjust(module.expansion, directive.span.ctxt.modern()) {
|
match ident.ctxt.glob_adjust(module.expansion, directive.span.ctxt().modern()) {
|
||||||
Some(Some(def)) => self.current_module = self.macro_def_scope(def),
|
Some(Some(def)) => self.current_module = self.macro_def_scope(def),
|
||||||
Some(None) => {}
|
Some(None) => {}
|
||||||
None => continue,
|
None => continue,
|
||||||
|
@ -398,7 +398,7 @@ impl<'a> Resolver<'a> {
|
||||||
for directive in module.glob_importers.borrow_mut().iter() {
|
for directive in module.glob_importers.borrow_mut().iter() {
|
||||||
let mut ident = ident.modern();
|
let mut ident = ident.modern();
|
||||||
let scope = match ident.ctxt.reverse_glob_adjust(module.expansion,
|
let scope = match ident.ctxt.reverse_glob_adjust(module.expansion,
|
||||||
directive.span.ctxt.modern()) {
|
directive.span.ctxt().modern()) {
|
||||||
Some(Some(def)) => self.macro_def_scope(def),
|
Some(Some(def)) => self.macro_def_scope(def),
|
||||||
Some(None) => directive.parent,
|
Some(None) => directive.parent,
|
||||||
None => continue,
|
None => continue,
|
||||||
|
@ -800,7 +800,7 @@ impl<'a, 'b:'a> ImportResolver<'a, 'b> {
|
||||||
}).collect::<Vec<_>>();
|
}).collect::<Vec<_>>();
|
||||||
for ((mut ident, ns), binding) in bindings {
|
for ((mut ident, ns), binding) in bindings {
|
||||||
let scope = match ident.ctxt.reverse_glob_adjust(module.expansion,
|
let scope = match ident.ctxt.reverse_glob_adjust(module.expansion,
|
||||||
directive.span.ctxt.modern()) {
|
directive.span.ctxt().modern()) {
|
||||||
Some(Some(def)) => self.macro_def_scope(def),
|
Some(Some(def)) => self.macro_def_scope(def),
|
||||||
Some(None) => self.current_module,
|
Some(None) => self.current_module,
|
||||||
None => continue,
|
None => continue,
|
||||||
|
|
|
@ -91,13 +91,13 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> {
|
||||||
use rls_span::{Row, Column};
|
use rls_span::{Row, Column};
|
||||||
|
|
||||||
let cm = self.tcx.sess.codemap();
|
let cm = self.tcx.sess.codemap();
|
||||||
let start = cm.lookup_char_pos(span.lo);
|
let start = cm.lookup_char_pos(span.lo());
|
||||||
let end = cm.lookup_char_pos(span.hi);
|
let end = cm.lookup_char_pos(span.hi());
|
||||||
|
|
||||||
SpanData {
|
SpanData {
|
||||||
file_name: start.file.name.clone().into(),
|
file_name: start.file.name.clone().into(),
|
||||||
byte_start: span.lo.0,
|
byte_start: span.lo().0,
|
||||||
byte_end: span.hi.0,
|
byte_end: span.hi().0,
|
||||||
line_start: Row::new_one_indexed(start.line as u32),
|
line_start: Row::new_one_indexed(start.line as u32),
|
||||||
line_end: Row::new_one_indexed(end.line as u32),
|
line_end: Row::new_one_indexed(end.line as u32),
|
||||||
column_start: Column::new_one_indexed(start.col.0 as u32 + 1),
|
column_start: Column::new_one_indexed(start.col.0 as u32 + 1),
|
||||||
|
@ -117,7 +117,7 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let lo_loc = self.span_utils.sess.codemap().lookup_char_pos(span.lo);
|
let lo_loc = self.span_utils.sess.codemap().lookup_char_pos(span.lo());
|
||||||
result.push(ExternalCrateData {
|
result.push(ExternalCrateData {
|
||||||
name: self.tcx.sess.cstore.crate_name(n).to_string(),
|
name: self.tcx.sess.cstore.crate_name(n).to_string(),
|
||||||
num: n.as_u32(),
|
num: n.as_u32(),
|
||||||
|
@ -999,7 +999,7 @@ fn escape(s: String) -> String {
|
||||||
// Helper function to determine if a span came from a
|
// Helper function to determine if a span came from a
|
||||||
// macro expansion or syntax extension.
|
// macro expansion or syntax extension.
|
||||||
fn generated_code(span: Span) -> bool {
|
fn generated_code(span: Span) -> bool {
|
||||||
span.ctxt != NO_EXPANSION || span == DUMMY_SP
|
span.ctxt() != NO_EXPANSION || span == DUMMY_SP
|
||||||
}
|
}
|
||||||
|
|
||||||
// DefId::index is a newtype and so the JSON serialisation is ugly. Therefore
|
// DefId::index is a newtype and so the JSON serialisation is ugly. Therefore
|
||||||
|
|
|
@ -192,7 +192,7 @@ impl<'a> SpanUtils<'a> {
|
||||||
prev = next;
|
prev = next;
|
||||||
}
|
}
|
||||||
if angle_count != 0 || bracket_count != 0 {
|
if angle_count != 0 || bracket_count != 0 {
|
||||||
let loc = self.sess.codemap().lookup_char_pos(span.lo);
|
let loc = self.sess.codemap().lookup_char_pos(span.lo());
|
||||||
span_bug!(span,
|
span_bug!(span,
|
||||||
"Mis-counted brackets when breaking path? Parsing '{}' \
|
"Mis-counted brackets when breaking path? Parsing '{}' \
|
||||||
in {}, line {}",
|
in {}, line {}",
|
||||||
|
@ -319,7 +319,7 @@ impl<'a> SpanUtils<'a> {
|
||||||
};
|
};
|
||||||
|
|
||||||
//If the span comes from a fake filemap, filter it.
|
//If the span comes from a fake filemap, filter it.
|
||||||
if !self.sess.codemap().lookup_char_pos(parent.lo).file.is_real_file() {
|
if !self.sess.codemap().lookup_char_pos(parent.lo()).file.is_real_file() {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1371,7 +1371,7 @@ fn assert_symbols_are_distinct<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>, trans_i
|
||||||
// Deterministically select one of the spans for error reporting
|
// Deterministically select one of the spans for error reporting
|
||||||
let span = match (span1, span2) {
|
let span = match (span1, span2) {
|
||||||
(Some(span1), Some(span2)) => {
|
(Some(span1), Some(span2)) => {
|
||||||
Some(if span1.lo.0 > span2.lo.0 {
|
Some(if span1.lo().0 > span2.lo().0 {
|
||||||
span1
|
span1
|
||||||
} else {
|
} else {
|
||||||
span2
|
span2
|
||||||
|
|
|
@ -49,7 +49,7 @@ pub fn create_DIArray(builder: DIBuilderRef, arr: &[DIDescriptor]) -> DIArray {
|
||||||
|
|
||||||
/// Return syntax_pos::Loc corresponding to the beginning of the span
|
/// Return syntax_pos::Loc corresponding to the beginning of the span
|
||||||
pub fn span_start(cx: &CrateContext, span: Span) -> syntax_pos::Loc {
|
pub fn span_start(cx: &CrateContext, span: Span) -> syntax_pos::Loc {
|
||||||
cx.sess().codemap().lookup_char_pos(span.lo)
|
cx.sess().codemap().lookup_char_pos(span.lo())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn size_and_align_of(cx: &CrateContext, llvm_type: Type) -> (u64, u32) {
|
pub fn size_and_align_of(cx: &CrateContext, llvm_type: Type) -> (u64, u32) {
|
||||||
|
|
|
@ -330,7 +330,7 @@ impl<'a, 'tcx> MirContext<'a, 'tcx> {
|
||||||
self.set_debug_loc(&bcx, terminator.source_info);
|
self.set_debug_loc(&bcx, terminator.source_info);
|
||||||
|
|
||||||
// Get the location information.
|
// Get the location information.
|
||||||
let loc = bcx.sess().codemap().lookup_char_pos(span.lo);
|
let loc = bcx.sess().codemap().lookup_char_pos(span.lo());
|
||||||
let filename = Symbol::intern(&loc.file.name).as_str();
|
let filename = Symbol::intern(&loc.file.name).as_str();
|
||||||
let filename = C_str_slice(bcx.ccx, filename);
|
let filename = C_str_slice(bcx.ccx, filename);
|
||||||
let line = C_u32(bcx.ccx, loc.line as u32);
|
let line = C_u32(bcx.ccx, loc.line as u32);
|
||||||
|
|
|
@ -129,23 +129,23 @@ impl<'a, 'tcx> MirContext<'a, 'tcx> {
|
||||||
// In order to have a good line stepping behavior in debugger, we overwrite debug
|
// In order to have a good line stepping behavior in debugger, we overwrite debug
|
||||||
// locations of macro expansions with that of the outermost expansion site
|
// locations of macro expansions with that of the outermost expansion site
|
||||||
// (unless the crate is being compiled with `-Z debug-macros`).
|
// (unless the crate is being compiled with `-Z debug-macros`).
|
||||||
if source_info.span.ctxt == NO_EXPANSION ||
|
if source_info.span.ctxt() == NO_EXPANSION ||
|
||||||
self.ccx.sess().opts.debugging_opts.debug_macros {
|
self.ccx.sess().opts.debugging_opts.debug_macros {
|
||||||
let scope = self.scope_metadata_for_loc(source_info.scope, source_info.span.lo);
|
let scope = self.scope_metadata_for_loc(source_info.scope, source_info.span.lo());
|
||||||
(scope, source_info.span)
|
(scope, source_info.span)
|
||||||
} else {
|
} else {
|
||||||
// Walk up the macro expansion chain until we reach a non-expanded span.
|
// Walk up the macro expansion chain until we reach a non-expanded span.
|
||||||
// We also stop at the function body level because no line stepping can occur
|
// We also stop at the function body level because no line stepping can occur
|
||||||
// at the level above that.
|
// at the level above that.
|
||||||
let mut span = source_info.span;
|
let mut span = source_info.span;
|
||||||
while span.ctxt != NO_EXPANSION && span.ctxt != self.mir.span.ctxt {
|
while span.ctxt() != NO_EXPANSION && span.ctxt() != self.mir.span.ctxt() {
|
||||||
if let Some(info) = span.ctxt.outer().expn_info() {
|
if let Some(info) = span.ctxt().outer().expn_info() {
|
||||||
span = info.call_site;
|
span = info.call_site;
|
||||||
} else {
|
} else {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let scope = self.scope_metadata_for_loc(source_info.scope, span.lo);
|
let scope = self.scope_metadata_for_loc(source_info.scope, span.lo());
|
||||||
// Use span of the outermost expansion site, while keeping the original lexical scope.
|
// Use span of the outermost expansion site, while keeping the original lexical scope.
|
||||||
(scope, span)
|
(scope, span)
|
||||||
}
|
}
|
||||||
|
|
|
@ -2486,9 +2486,9 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
|
||||||
err.span_label(def_s, "defined here");
|
err.span_label(def_s, "defined here");
|
||||||
}
|
}
|
||||||
if sugg_unit {
|
if sugg_unit {
|
||||||
let mut sugg_span = sp.end_point();
|
let sugg_span = sp.end_point();
|
||||||
// remove closing `)` from the span
|
// remove closing `)` from the span
|
||||||
sugg_span.hi = sugg_span.lo;
|
let sugg_span = sugg_span.with_hi(sugg_span.lo());
|
||||||
err.span_suggestion(
|
err.span_suggestion(
|
||||||
sugg_span,
|
sugg_span,
|
||||||
"expected the unit value `()`. You can create one with a pair of parenthesis",
|
"expected the unit value `()`. You can create one with a pair of parenthesis",
|
||||||
|
@ -3137,7 +3137,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
|
||||||
debug!("tuple struct named {:?}", base_t);
|
debug!("tuple struct named {:?}", base_t);
|
||||||
let ident = ast::Ident {
|
let ident = ast::Ident {
|
||||||
name: Symbol::intern(&idx.node.to_string()),
|
name: Symbol::intern(&idx.node.to_string()),
|
||||||
ctxt: idx.span.ctxt.modern(),
|
ctxt: idx.span.ctxt().modern(),
|
||||||
};
|
};
|
||||||
let (ident, def_scope) =
|
let (ident, def_scope) =
|
||||||
self.tcx.adjust_ident(ident, base_def.did, self.body_id);
|
self.tcx.adjust_ident(ident, base_def.did, self.body_id);
|
||||||
|
@ -4524,11 +4524,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
let original_span = original_sp(last_stmt.span, blk.span);
|
let original_span = original_sp(last_stmt.span, blk.span);
|
||||||
let span_semi = Span {
|
let span_semi = original_span.with_lo(original_span.hi() - BytePos(1));
|
||||||
lo: original_span.hi - BytePos(1),
|
|
||||||
hi: original_span.hi,
|
|
||||||
ctxt: original_span.ctxt,
|
|
||||||
};
|
|
||||||
err.span_suggestion(span_semi, "consider removing this semicolon", "".to_string());
|
err.span_suggestion(span_semi, "consider removing this semicolon", "".to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -457,8 +457,8 @@ impl Clean<Item> for doctree::Module {
|
||||||
// the outer `mod` item for the source code.
|
// the outer `mod` item for the source code.
|
||||||
let whence = {
|
let whence = {
|
||||||
let cm = cx.sess().codemap();
|
let cm = cx.sess().codemap();
|
||||||
let outer = cm.lookup_char_pos(self.where_outer.lo);
|
let outer = cm.lookup_char_pos(self.where_outer.lo());
|
||||||
let inner = cm.lookup_char_pos(self.where_inner.lo);
|
let inner = cm.lookup_char_pos(self.where_inner.lo());
|
||||||
if outer.file.start_pos == inner.file.start_pos {
|
if outer.file.start_pos == inner.file.start_pos {
|
||||||
// mod foo { ... }
|
// mod foo { ... }
|
||||||
self.where_outer
|
self.where_outer
|
||||||
|
@ -2251,8 +2251,8 @@ impl Clean<Span> for syntax_pos::Span {
|
||||||
|
|
||||||
let cm = cx.sess().codemap();
|
let cm = cx.sess().codemap();
|
||||||
let filename = cm.span_to_filename(*self);
|
let filename = cm.span_to_filename(*self);
|
||||||
let lo = cm.lookup_char_pos(self.lo);
|
let lo = cm.lookup_char_pos(self.lo());
|
||||||
let hi = cm.lookup_char_pos(self.hi);
|
let hi = cm.lookup_char_pos(self.hi());
|
||||||
Span {
|
Span {
|
||||||
filename: filename.to_string(),
|
filename: filename.to_string(),
|
||||||
loline: lo.line,
|
loline: lo.line,
|
||||||
|
|
|
@ -532,7 +532,7 @@ impl Collector {
|
||||||
|
|
||||||
pub fn get_line(&self) -> usize {
|
pub fn get_line(&self) -> usize {
|
||||||
if let Some(ref codemap) = self.codemap {
|
if let Some(ref codemap) = self.codemap {
|
||||||
let line = self.position.lo.to_usize();
|
let line = self.position.lo().to_usize();
|
||||||
let line = codemap.lookup_char_pos(BytePos(line as u32)).line;
|
let line = codemap.lookup_char_pos(BytePos(line as u32)).line;
|
||||||
if line > 0 { line - 1 } else { line }
|
if line > 0 { line - 1 } else { line }
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -134,7 +134,7 @@ impl PathSegment {
|
||||||
}
|
}
|
||||||
pub fn crate_root(span: Span) -> Self {
|
pub fn crate_root(span: Span) -> Self {
|
||||||
PathSegment {
|
PathSegment {
|
||||||
identifier: Ident { ctxt: span.ctxt, ..keywords::CrateRoot.ident() },
|
identifier: Ident { ctxt: span.ctxt(), ..keywords::CrateRoot.ident() },
|
||||||
span,
|
span,
|
||||||
parameters: None,
|
parameters: None,
|
||||||
}
|
}
|
||||||
|
|
|
@ -1059,7 +1059,7 @@ impl MetaItem {
|
||||||
fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItem>
|
fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItem>
|
||||||
where I: Iterator<Item = TokenTree>,
|
where I: Iterator<Item = TokenTree>,
|
||||||
{
|
{
|
||||||
let (mut span, name) = match tokens.next() {
|
let (span, name) = match tokens.next() {
|
||||||
Some(TokenTree::Token(span, Token::Ident(ident))) => (span, ident.name),
|
Some(TokenTree::Token(span, Token::Ident(ident))) => (span, ident.name),
|
||||||
Some(TokenTree::Token(_, Token::Interpolated(ref nt))) => match nt.0 {
|
Some(TokenTree::Token(_, Token::Interpolated(ref nt))) => match nt.0 {
|
||||||
token::Nonterminal::NtIdent(ident) => (ident.span, ident.node.name),
|
token::Nonterminal::NtIdent(ident) => (ident.span, ident.node.name),
|
||||||
|
@ -1068,17 +1068,17 @@ impl MetaItem {
|
||||||
},
|
},
|
||||||
_ => return None,
|
_ => return None,
|
||||||
};
|
};
|
||||||
let list_closing_paren_pos = tokens.peek().map(|tt| tt.span().hi);
|
let list_closing_paren_pos = tokens.peek().map(|tt| tt.span().hi());
|
||||||
let node = match MetaItemKind::from_tokens(tokens) {
|
let node = match MetaItemKind::from_tokens(tokens) {
|
||||||
Some(node) => node,
|
Some(node) => node,
|
||||||
_ => return None,
|
_ => return None,
|
||||||
};
|
};
|
||||||
span.hi = match node {
|
let hi = match node {
|
||||||
MetaItemKind::NameValue(ref lit) => lit.span.hi,
|
MetaItemKind::NameValue(ref lit) => lit.span.hi(),
|
||||||
MetaItemKind::List(..) => list_closing_paren_pos.unwrap_or(span.hi),
|
MetaItemKind::List(..) => list_closing_paren_pos.unwrap_or(span.hi()),
|
||||||
_ => span.hi,
|
_ => span.hi(),
|
||||||
};
|
};
|
||||||
Some(MetaItem { name: name, span: span, node: node })
|
Some(MetaItem { name, node, span: span.with_hi(hi) })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -34,8 +34,8 @@ use errors::CodeMapper;
|
||||||
/// otherwise return the call site span up to the `enclosing_sp` by
|
/// otherwise return the call site span up to the `enclosing_sp` by
|
||||||
/// following the `expn_info` chain.
|
/// following the `expn_info` chain.
|
||||||
pub fn original_sp(sp: Span, enclosing_sp: Span) -> Span {
|
pub fn original_sp(sp: Span, enclosing_sp: Span) -> Span {
|
||||||
let call_site1 = sp.ctxt.outer().expn_info().map(|ei| ei.call_site);
|
let call_site1 = sp.ctxt().outer().expn_info().map(|ei| ei.call_site);
|
||||||
let call_site2 = enclosing_sp.ctxt.outer().expn_info().map(|ei| ei.call_site);
|
let call_site2 = enclosing_sp.ctxt().outer().expn_info().map(|ei| ei.call_site);
|
||||||
match (call_site1, call_site2) {
|
match (call_site1, call_site2) {
|
||||||
(None, _) => sp,
|
(None, _) => sp,
|
||||||
(Some(call_site1), Some(call_site2)) if call_site1 == call_site2 => sp,
|
(Some(call_site1), Some(call_site2)) if call_site1 == call_site2 => sp,
|
||||||
|
@ -232,7 +232,7 @@ impl CodeMap {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn mk_substr_filename(&self, sp: Span) -> String {
|
pub fn mk_substr_filename(&self, sp: Span) -> String {
|
||||||
let pos = self.lookup_char_pos(sp.lo);
|
let pos = self.lookup_char_pos(sp.lo());
|
||||||
(format!("<{}:{}:{}>",
|
(format!("<{}:{}:{}>",
|
||||||
pos.file.name,
|
pos.file.name,
|
||||||
pos.line,
|
pos.line,
|
||||||
|
@ -299,18 +299,16 @@ impl CodeMap {
|
||||||
/// * the lhs span needs to end on the same line the rhs span begins
|
/// * the lhs span needs to end on the same line the rhs span begins
|
||||||
/// * the lhs span must start at or before the rhs span
|
/// * the lhs span must start at or before the rhs span
|
||||||
pub fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option<Span> {
|
pub fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option<Span> {
|
||||||
use std::cmp;
|
|
||||||
|
|
||||||
// make sure we're at the same expansion id
|
// make sure we're at the same expansion id
|
||||||
if sp_lhs.ctxt != sp_rhs.ctxt {
|
if sp_lhs.ctxt() != sp_rhs.ctxt() {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
let lhs_end = match self.lookup_line(sp_lhs.hi) {
|
let lhs_end = match self.lookup_line(sp_lhs.hi()) {
|
||||||
Ok(x) => x,
|
Ok(x) => x,
|
||||||
Err(_) => return None
|
Err(_) => return None
|
||||||
};
|
};
|
||||||
let rhs_begin = match self.lookup_line(sp_rhs.lo) {
|
let rhs_begin = match self.lookup_line(sp_rhs.lo()) {
|
||||||
Ok(x) => x,
|
Ok(x) => x,
|
||||||
Err(_) => return None
|
Err(_) => return None
|
||||||
};
|
};
|
||||||
|
@ -321,12 +319,8 @@ impl CodeMap {
|
||||||
}
|
}
|
||||||
|
|
||||||
// ensure these follow the expected order and we don't overlap
|
// ensure these follow the expected order and we don't overlap
|
||||||
if (sp_lhs.lo <= sp_rhs.lo) && (sp_lhs.hi <= sp_rhs.lo) {
|
if (sp_lhs.lo() <= sp_rhs.lo()) && (sp_lhs.hi() <= sp_rhs.lo()) {
|
||||||
Some(Span {
|
Some(sp_lhs.to(sp_rhs))
|
||||||
lo: cmp::min(sp_lhs.lo, sp_rhs.lo),
|
|
||||||
hi: cmp::max(sp_lhs.hi, sp_rhs.hi),
|
|
||||||
ctxt: sp_lhs.ctxt,
|
|
||||||
})
|
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
@ -337,8 +331,8 @@ impl CodeMap {
|
||||||
return "no-location".to_string();
|
return "no-location".to_string();
|
||||||
}
|
}
|
||||||
|
|
||||||
let lo = self.lookup_char_pos_adj(sp.lo);
|
let lo = self.lookup_char_pos_adj(sp.lo());
|
||||||
let hi = self.lookup_char_pos_adj(sp.hi);
|
let hi = self.lookup_char_pos_adj(sp.hi());
|
||||||
return (format!("{}:{}:{}: {}:{}",
|
return (format!("{}:{}:{}: {}:{}",
|
||||||
lo.filename,
|
lo.filename,
|
||||||
lo.line,
|
lo.line,
|
||||||
|
@ -348,19 +342,19 @@ impl CodeMap {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn span_to_filename(&self, sp: Span) -> FileName {
|
pub fn span_to_filename(&self, sp: Span) -> FileName {
|
||||||
self.lookup_char_pos(sp.lo).file.name.to_string()
|
self.lookup_char_pos(sp.lo()).file.name.to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn span_to_lines(&self, sp: Span) -> FileLinesResult {
|
pub fn span_to_lines(&self, sp: Span) -> FileLinesResult {
|
||||||
debug!("span_to_lines(sp={:?})", sp);
|
debug!("span_to_lines(sp={:?})", sp);
|
||||||
|
|
||||||
if sp.lo > sp.hi {
|
if sp.lo() > sp.hi() {
|
||||||
return Err(SpanLinesError::IllFormedSpan(sp));
|
return Err(SpanLinesError::IllFormedSpan(sp));
|
||||||
}
|
}
|
||||||
|
|
||||||
let lo = self.lookup_char_pos(sp.lo);
|
let lo = self.lookup_char_pos(sp.lo());
|
||||||
debug!("span_to_lines: lo={:?}", lo);
|
debug!("span_to_lines: lo={:?}", lo);
|
||||||
let hi = self.lookup_char_pos(sp.hi);
|
let hi = self.lookup_char_pos(sp.hi());
|
||||||
debug!("span_to_lines: hi={:?}", hi);
|
debug!("span_to_lines: hi={:?}", hi);
|
||||||
|
|
||||||
if lo.file.start_pos != hi.file.start_pos {
|
if lo.file.start_pos != hi.file.start_pos {
|
||||||
|
@ -400,12 +394,12 @@ impl CodeMap {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn span_to_snippet(&self, sp: Span) -> Result<String, SpanSnippetError> {
|
pub fn span_to_snippet(&self, sp: Span) -> Result<String, SpanSnippetError> {
|
||||||
if sp.lo > sp.hi {
|
if sp.lo() > sp.hi() {
|
||||||
return Err(SpanSnippetError::IllFormedSpan(sp));
|
return Err(SpanSnippetError::IllFormedSpan(sp));
|
||||||
}
|
}
|
||||||
|
|
||||||
let local_begin = self.lookup_byte_offset(sp.lo);
|
let local_begin = self.lookup_byte_offset(sp.lo());
|
||||||
let local_end = self.lookup_byte_offset(sp.hi);
|
let local_end = self.lookup_byte_offset(sp.hi());
|
||||||
|
|
||||||
if local_begin.fm.start_pos != local_end.fm.start_pos {
|
if local_begin.fm.start_pos != local_end.fm.start_pos {
|
||||||
return Err(SpanSnippetError::DistinctSources(DistinctSources {
|
return Err(SpanSnippetError::DistinctSources(DistinctSources {
|
||||||
|
@ -450,7 +444,7 @@ impl CodeMap {
|
||||||
Ok(snippet) => {
|
Ok(snippet) => {
|
||||||
let snippet = snippet.split(c).nth(0).unwrap_or("").trim_right();
|
let snippet = snippet.split(c).nth(0).unwrap_or("").trim_right();
|
||||||
if !snippet.is_empty() && !snippet.contains('\n') {
|
if !snippet.is_empty() && !snippet.contains('\n') {
|
||||||
Span { hi: BytePos(sp.lo.0 + snippet.len() as u32), ..sp }
|
sp.with_hi(BytePos(sp.lo().0 + snippet.len() as u32))
|
||||||
} else {
|
} else {
|
||||||
sp
|
sp
|
||||||
}
|
}
|
||||||
|
@ -752,7 +746,7 @@ mod tests {
|
||||||
fn t7() {
|
fn t7() {
|
||||||
// Test span_to_lines for a span ending at the end of filemap
|
// Test span_to_lines for a span ending at the end of filemap
|
||||||
let cm = init_code_map();
|
let cm = init_code_map();
|
||||||
let span = Span {lo: BytePos(12), hi: BytePos(23), ctxt: NO_EXPANSION};
|
let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION);
|
||||||
let file_lines = cm.span_to_lines(span).unwrap();
|
let file_lines = cm.span_to_lines(span).unwrap();
|
||||||
|
|
||||||
assert_eq!(file_lines.file.name, "blork.rs");
|
assert_eq!(file_lines.file.name, "blork.rs");
|
||||||
|
@ -768,7 +762,7 @@ mod tests {
|
||||||
assert_eq!(input.len(), selection.len());
|
assert_eq!(input.len(), selection.len());
|
||||||
let left_index = selection.find('~').unwrap() as u32;
|
let left_index = selection.find('~').unwrap() as u32;
|
||||||
let right_index = selection.rfind('~').map(|x|x as u32).unwrap_or(left_index);
|
let right_index = selection.rfind('~').map(|x|x as u32).unwrap_or(left_index);
|
||||||
Span { lo: BytePos(left_index), hi: BytePos(right_index + 1), ctxt: NO_EXPANSION }
|
Span::new(BytePos(left_index), BytePos(right_index + 1), NO_EXPANSION)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Test span_to_snippet and span_to_lines for a span converting 3
|
/// Test span_to_snippet and span_to_lines for a span converting 3
|
||||||
|
@ -798,7 +792,7 @@ mod tests {
|
||||||
fn t8() {
|
fn t8() {
|
||||||
// Test span_to_snippet for a span ending at the end of filemap
|
// Test span_to_snippet for a span ending at the end of filemap
|
||||||
let cm = init_code_map();
|
let cm = init_code_map();
|
||||||
let span = Span {lo: BytePos(12), hi: BytePos(23), ctxt: NO_EXPANSION};
|
let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION);
|
||||||
let snippet = cm.span_to_snippet(span);
|
let snippet = cm.span_to_snippet(span);
|
||||||
|
|
||||||
assert_eq!(snippet, Ok("second line".to_string()));
|
assert_eq!(snippet, Ok("second line".to_string()));
|
||||||
|
@ -808,7 +802,7 @@ mod tests {
|
||||||
fn t9() {
|
fn t9() {
|
||||||
// Test span_to_str for a span ending at the end of filemap
|
// Test span_to_str for a span ending at the end of filemap
|
||||||
let cm = init_code_map();
|
let cm = init_code_map();
|
||||||
let span = Span {lo: BytePos(12), hi: BytePos(23), ctxt: NO_EXPANSION};
|
let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION);
|
||||||
let sstr = cm.span_to_string(span);
|
let sstr = cm.span_to_string(span);
|
||||||
|
|
||||||
assert_eq!(sstr, "blork.rs:2:1: 2:12");
|
assert_eq!(sstr, "blork.rs:2:1: 2:12");
|
||||||
|
@ -859,11 +853,11 @@ mod tests {
|
||||||
let lo = hi + offset;
|
let lo = hi + offset;
|
||||||
hi = lo + substring.len();
|
hi = lo + substring.len();
|
||||||
if i == n {
|
if i == n {
|
||||||
let span = Span {
|
let span = Span::new(
|
||||||
lo: BytePos(lo as u32 + file.start_pos.0),
|
BytePos(lo as u32 + file.start_pos.0),
|
||||||
hi: BytePos(hi as u32 + file.start_pos.0),
|
BytePos(hi as u32 + file.start_pos.0),
|
||||||
ctxt: NO_EXPANSION,
|
NO_EXPANSION,
|
||||||
};
|
);
|
||||||
assert_eq!(&self.span_to_snippet(span).unwrap()[..],
|
assert_eq!(&self.span_to_snippet(span).unwrap()[..],
|
||||||
substring);
|
substring);
|
||||||
return span;
|
return span;
|
||||||
|
|
|
@ -14,7 +14,6 @@ use {fold, attr};
|
||||||
use ast;
|
use ast;
|
||||||
use codemap::Spanned;
|
use codemap::Spanned;
|
||||||
use parse::{token, ParseSess};
|
use parse::{token, ParseSess};
|
||||||
use syntax_pos::Span;
|
|
||||||
|
|
||||||
use ptr::P;
|
use ptr::P;
|
||||||
use util::small_vector::SmallVector;
|
use util::small_vector::SmallVector;
|
||||||
|
@ -89,10 +88,10 @@ impl<'a> StripUnconfigured<'a> {
|
||||||
parser.expect(&token::OpenDelim(token::Paren))?;
|
parser.expect(&token::OpenDelim(token::Paren))?;
|
||||||
let cfg = parser.parse_meta_item()?;
|
let cfg = parser.parse_meta_item()?;
|
||||||
parser.expect(&token::Comma)?;
|
parser.expect(&token::Comma)?;
|
||||||
let lo = parser.span.lo;
|
let lo = parser.span.lo();
|
||||||
let (path, tokens) = parser.parse_path_and_tokens()?;
|
let (path, tokens) = parser.parse_path_and_tokens()?;
|
||||||
parser.expect(&token::CloseDelim(token::Paren))?;
|
parser.expect(&token::CloseDelim(token::Paren))?;
|
||||||
Ok((cfg, path, tokens, Span { lo: lo, ..parser.prev_span }))
|
Ok((cfg, path, tokens, parser.prev_span.with_lo(lo)))
|
||||||
}) {
|
}) {
|
||||||
Ok(result) => result,
|
Ok(result) => result,
|
||||||
Err(mut e) => {
|
Err(mut e) => {
|
||||||
|
|
|
@ -47,7 +47,7 @@ pub struct ErrorLocation {
|
||||||
impl ErrorLocation {
|
impl ErrorLocation {
|
||||||
/// Create an error location from a span.
|
/// Create an error location from a span.
|
||||||
pub fn from_span(ecx: &ExtCtxt, sp: Span) -> ErrorLocation {
|
pub fn from_span(ecx: &ExtCtxt, sp: Span) -> ErrorLocation {
|
||||||
let loc = ecx.codemap().lookup_char_pos_adj(sp.lo);
|
let loc = ecx.codemap().lookup_char_pos_adj(sp.lo());
|
||||||
ErrorLocation {
|
ErrorLocation {
|
||||||
filename: loc.filename,
|
filename: loc.filename,
|
||||||
line: loc.line
|
line: loc.line
|
||||||
|
|
|
@ -731,7 +731,7 @@ impl<'a> ExtCtxt<'a> {
|
||||||
// Stop going up the backtrace once include! is encountered
|
// Stop going up the backtrace once include! is encountered
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
ctxt = info.call_site.ctxt;
|
ctxt = info.call_site.ctxt();
|
||||||
last_macro = Some(info.call_site);
|
last_macro = Some(info.call_site);
|
||||||
Some(())
|
Some(())
|
||||||
}).is_none() {
|
}).is_none() {
|
||||||
|
@ -837,7 +837,7 @@ pub fn expr_to_spanned_string(cx: &mut ExtCtxt, expr: P<ast::Expr>, err_msg: &st
|
||||||
-> Option<Spanned<(Symbol, ast::StrStyle)>> {
|
-> Option<Spanned<(Symbol, ast::StrStyle)>> {
|
||||||
// Update `expr.span`'s ctxt now in case expr is an `include!` macro invocation.
|
// Update `expr.span`'s ctxt now in case expr is an `include!` macro invocation.
|
||||||
let expr = expr.map(|mut expr| {
|
let expr = expr.map(|mut expr| {
|
||||||
expr.span.ctxt = expr.span.ctxt.apply_mark(cx.current_expansion.mark);
|
expr.span = expr.span.with_ctxt(expr.span.ctxt().apply_mark(cx.current_expansion.mark));
|
||||||
expr
|
expr
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -755,7 +755,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expr_fail(&self, span: Span, msg: Symbol) -> P<ast::Expr> {
|
fn expr_fail(&self, span: Span, msg: Symbol) -> P<ast::Expr> {
|
||||||
let loc = self.codemap().lookup_char_pos(span.lo);
|
let loc = self.codemap().lookup_char_pos(span.lo());
|
||||||
let expr_file = self.expr_str(span, Symbol::intern(&loc.file.name));
|
let expr_file = self.expr_str(span, Symbol::intern(&loc.file.name));
|
||||||
let expr_line = self.expr_u32(span, loc.line as u32);
|
let expr_line = self.expr_u32(span, loc.line as u32);
|
||||||
let expr_col = self.expr_u32(span, loc.col.to_usize() as u32 + 1);
|
let expr_col = self.expr_u32(span, loc.col.to_usize() as u32 + 1);
|
||||||
|
|
|
@ -68,7 +68,7 @@ pub fn add_derived_markers<T>(cx: &mut ExtCtxt, span: Span, traits: &[ast::Path]
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
let span = Span { ctxt: cx.backtrace(), ..span };
|
let span = span.with_ctxt(cx.backtrace());
|
||||||
item.map_attrs(|mut attrs| {
|
item.map_attrs(|mut attrs| {
|
||||||
if names.contains(&Symbol::intern("Eq")) && names.contains(&Symbol::intern("PartialEq")) {
|
if names.contains(&Symbol::intern("Eq")) && names.contains(&Symbol::intern("PartialEq")) {
|
||||||
let meta = cx.meta_word(span, Symbol::intern("structural_match"));
|
let meta = cx.meta_word(span, Symbol::intern("structural_match"));
|
||||||
|
|
|
@ -598,7 +598,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
||||||
match *ext {
|
match *ext {
|
||||||
ProcMacroDerive(ref ext, _) => {
|
ProcMacroDerive(ref ext, _) => {
|
||||||
invoc.expansion_data.mark.set_expn_info(expn_info);
|
invoc.expansion_data.mark.set_expn_info(expn_info);
|
||||||
let span = Span { ctxt: self.cx.backtrace(), ..span };
|
let span = span.with_ctxt(self.cx.backtrace());
|
||||||
let dummy = ast::MetaItem { // FIXME(jseyfried) avoid this
|
let dummy = ast::MetaItem { // FIXME(jseyfried) avoid this
|
||||||
name: keywords::Invalid.name(),
|
name: keywords::Invalid.name(),
|
||||||
span: DUMMY_SP,
|
span: DUMMY_SP,
|
||||||
|
@ -609,7 +609,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
||||||
BuiltinDerive(func) => {
|
BuiltinDerive(func) => {
|
||||||
expn_info.callee.allow_internal_unstable = true;
|
expn_info.callee.allow_internal_unstable = true;
|
||||||
invoc.expansion_data.mark.set_expn_info(expn_info);
|
invoc.expansion_data.mark.set_expn_info(expn_info);
|
||||||
let span = Span { ctxt: self.cx.backtrace(), ..span };
|
let span = span.with_ctxt(self.cx.backtrace());
|
||||||
let mut items = Vec::new();
|
let mut items = Vec::new();
|
||||||
func(self.cx, span, &attr.meta().unwrap(), &item, &mut |a| items.push(a));
|
func(self.cx, span, &attr.meta().unwrap(), &item, &mut |a| items.push(a));
|
||||||
kind.expect_from_annotatables(items)
|
kind.expect_from_annotatables(items)
|
||||||
|
@ -684,8 +684,8 @@ impl<'a> Parser<'a> {
|
||||||
if self.token != token::Eof {
|
if self.token != token::Eof {
|
||||||
let msg = format!("macro expansion ignores token `{}` and any following",
|
let msg = format!("macro expansion ignores token `{}` and any following",
|
||||||
self.this_token_to_string());
|
self.this_token_to_string());
|
||||||
let mut def_site_span = self.span;
|
// Avoid emitting backtrace info twice.
|
||||||
def_site_span.ctxt = SyntaxContext::empty(); // Avoid emitting backtrace info twice.
|
let def_site_span = self.span.with_ctxt(SyntaxContext::empty());
|
||||||
let mut err = self.diagnostic().struct_span_err(def_site_span, &msg);
|
let mut err = self.diagnostic().struct_span_err(def_site_span, &msg);
|
||||||
let msg = format!("caused by the macro expansion here; the usage \
|
let msg = format!("caused by the macro expansion here; the usage \
|
||||||
of `{}!` is likely invalid in {} context",
|
of `{}!` is likely invalid in {} context",
|
||||||
|
@ -1069,9 +1069,8 @@ impl Folder for Marker {
|
||||||
ident
|
ident
|
||||||
}
|
}
|
||||||
|
|
||||||
fn new_span(&mut self, mut span: Span) -> Span {
|
fn new_span(&mut self, span: Span) -> Span {
|
||||||
span.ctxt = span.ctxt.apply_mark(self.0);
|
span.with_ctxt(span.ctxt().apply_mark(self.0))
|
||||||
span
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac {
|
fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac {
|
||||||
|
|
|
@ -36,7 +36,7 @@ pub fn expand_line(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
|
||||||
base::check_zero_tts(cx, sp, tts, "line!");
|
base::check_zero_tts(cx, sp, tts, "line!");
|
||||||
|
|
||||||
let topmost = cx.expansion_cause().unwrap_or(sp);
|
let topmost = cx.expansion_cause().unwrap_or(sp);
|
||||||
let loc = cx.codemap().lookup_char_pos(topmost.lo);
|
let loc = cx.codemap().lookup_char_pos(topmost.lo());
|
||||||
|
|
||||||
base::MacEager::expr(cx.expr_u32(topmost, loc.line as u32))
|
base::MacEager::expr(cx.expr_u32(topmost, loc.line as u32))
|
||||||
}
|
}
|
||||||
|
@ -47,7 +47,7 @@ pub fn expand_column(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
|
||||||
base::check_zero_tts(cx, sp, tts, "column!");
|
base::check_zero_tts(cx, sp, tts, "column!");
|
||||||
|
|
||||||
let topmost = cx.expansion_cause().unwrap_or(sp);
|
let topmost = cx.expansion_cause().unwrap_or(sp);
|
||||||
let loc = cx.codemap().lookup_char_pos(topmost.lo);
|
let loc = cx.codemap().lookup_char_pos(topmost.lo());
|
||||||
|
|
||||||
base::MacEager::expr(cx.expr_u32(topmost, loc.col.to_usize() as u32))
|
base::MacEager::expr(cx.expr_u32(topmost, loc.col.to_usize() as u32))
|
||||||
}
|
}
|
||||||
|
@ -70,7 +70,7 @@ pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
|
||||||
base::check_zero_tts(cx, sp, tts, "file!");
|
base::check_zero_tts(cx, sp, tts, "file!");
|
||||||
|
|
||||||
let topmost = cx.expansion_cause().unwrap_or(sp);
|
let topmost = cx.expansion_cause().unwrap_or(sp);
|
||||||
let loc = cx.codemap().lookup_char_pos(topmost.lo);
|
let loc = cx.codemap().lookup_char_pos(topmost.lo());
|
||||||
base::MacEager::expr(cx.expr_str(topmost, Symbol::intern(&loc.file.name)))
|
base::MacEager::expr(cx.expr_str(topmost, Symbol::intern(&loc.file.name)))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -329,7 +329,8 @@ fn inner_parse_loop(sess: &ParseSess,
|
||||||
// Only touch the binders we have actually bound
|
// Only touch the binders we have actually bound
|
||||||
for idx in item.match_lo..item.match_hi {
|
for idx in item.match_lo..item.match_hi {
|
||||||
let sub = item.matches[idx].clone();
|
let sub = item.matches[idx].clone();
|
||||||
new_pos.push_match(idx, MatchedSeq(sub, Span { lo: item.sp_lo, ..span }));
|
let span = span.with_lo(item.sp_lo);
|
||||||
|
new_pos.push_match(idx, MatchedSeq(sub, span));
|
||||||
}
|
}
|
||||||
|
|
||||||
new_pos.match_cur = item.match_hi;
|
new_pos.match_cur = item.match_hi;
|
||||||
|
@ -379,7 +380,7 @@ fn inner_parse_loop(sess: &ParseSess,
|
||||||
match_cur: item.match_cur,
|
match_cur: item.match_cur,
|
||||||
match_hi: item.match_cur + seq.num_captures,
|
match_hi: item.match_cur + seq.num_captures,
|
||||||
up: Some(item),
|
up: Some(item),
|
||||||
sp_lo: sp.lo,
|
sp_lo: sp.lo(),
|
||||||
top_elts: Tt(TokenTree::Sequence(sp, seq)),
|
top_elts: Tt(TokenTree::Sequence(sp, seq)),
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
@ -424,7 +425,7 @@ pub fn parse(sess: &ParseSess,
|
||||||
recurse_into_modules: bool)
|
recurse_into_modules: bool)
|
||||||
-> NamedParseResult {
|
-> NamedParseResult {
|
||||||
let mut parser = Parser::new(sess, tts, directory, recurse_into_modules, true);
|
let mut parser = Parser::new(sess, tts, directory, recurse_into_modules, true);
|
||||||
let mut cur_items = SmallVector::one(initial_matcher_pos(ms.to_owned(), parser.span.lo));
|
let mut cur_items = SmallVector::one(initial_matcher_pos(ms.to_owned(), parser.span.lo()));
|
||||||
let mut next_items = Vec::new(); // or proceed normally
|
let mut next_items = Vec::new(); // or proceed normally
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
|
|
|
@ -130,7 +130,7 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt,
|
||||||
tts = tts.map_enumerated(|i, tt| {
|
tts = tts.map_enumerated(|i, tt| {
|
||||||
let mut tt = tt.clone();
|
let mut tt = tt.clone();
|
||||||
let mut sp = rhs_spans[i];
|
let mut sp = rhs_spans[i];
|
||||||
sp.ctxt = tt.span().ctxt;
|
sp = sp.with_ctxt(tt.span().ctxt());
|
||||||
tt.set_span(sp);
|
tt.set_span(sp);
|
||||||
tt
|
tt
|
||||||
});
|
});
|
||||||
|
@ -161,7 +161,7 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt,
|
||||||
macro_ident: name
|
macro_ident: name
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
Failure(sp, tok) => if sp.lo >= best_fail_spot.lo {
|
Failure(sp, tok) => if sp.lo() >= best_fail_spot.lo() {
|
||||||
best_fail_spot = sp;
|
best_fail_spot = sp;
|
||||||
best_fail_tok = Some(tok);
|
best_fail_tok = Some(tok);
|
||||||
},
|
},
|
||||||
|
|
|
@ -37,7 +37,7 @@ impl Delimited {
|
||||||
let open_span = if span == DUMMY_SP {
|
let open_span = if span == DUMMY_SP {
|
||||||
DUMMY_SP
|
DUMMY_SP
|
||||||
} else {
|
} else {
|
||||||
Span { hi: span.lo + BytePos(self.delim.len() as u32), ..span }
|
span.with_lo(span.lo() + BytePos(self.delim.len() as u32))
|
||||||
};
|
};
|
||||||
TokenTree::Token(open_span, self.open_token())
|
TokenTree::Token(open_span, self.open_token())
|
||||||
}
|
}
|
||||||
|
@ -46,7 +46,7 @@ impl Delimited {
|
||||||
let close_span = if span == DUMMY_SP {
|
let close_span = if span == DUMMY_SP {
|
||||||
DUMMY_SP
|
DUMMY_SP
|
||||||
} else {
|
} else {
|
||||||
Span { lo: span.hi - BytePos(self.delim.len() as u32), ..span }
|
span.with_lo(span.hi() - BytePos(self.delim.len() as u32))
|
||||||
};
|
};
|
||||||
TokenTree::Token(close_span, self.close_token())
|
TokenTree::Token(close_span, self.close_token())
|
||||||
}
|
}
|
||||||
|
@ -152,7 +152,7 @@ pub fn parse(input: tokenstream::TokenStream, expect_matchers: bool, sess: &Pars
|
||||||
Some(tokenstream::TokenTree::Token(span, token::Colon)) => match trees.next() {
|
Some(tokenstream::TokenTree::Token(span, token::Colon)) => match trees.next() {
|
||||||
Some(tokenstream::TokenTree::Token(end_sp, ref tok)) => match tok.ident() {
|
Some(tokenstream::TokenTree::Token(end_sp, ref tok)) => match tok.ident() {
|
||||||
Some(kind) => {
|
Some(kind) => {
|
||||||
let span = Span { lo: start_sp.lo, ..end_sp };
|
let span = end_sp.with_lo(start_sp.lo());
|
||||||
result.push(TokenTree::MetaVarDecl(span, ident, kind));
|
result.push(TokenTree::MetaVarDecl(span, ident, kind));
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
@ -198,7 +198,7 @@ fn parse_tree<I>(tree: tokenstream::TokenTree,
|
||||||
}
|
}
|
||||||
Some(tokenstream::TokenTree::Token(ident_span, ref token)) if token.is_ident() => {
|
Some(tokenstream::TokenTree::Token(ident_span, ref token)) if token.is_ident() => {
|
||||||
let ident = token.ident().unwrap();
|
let ident = token.ident().unwrap();
|
||||||
let span = Span { lo: span.lo, ..ident_span };
|
let span = ident_span.with_lo(span.lo());
|
||||||
if ident.name == keywords::Crate.name() {
|
if ident.name == keywords::Crate.name() {
|
||||||
let ident = ast::Ident { name: keywords::DollarCrate.name(), ..ident };
|
let ident = ast::Ident { name: keywords::DollarCrate.name(), ..ident };
|
||||||
TokenTree::Token(span, token::Ident(ident))
|
TokenTree::Token(span, token::Ident(ident))
|
||||||
|
|
|
@ -155,7 +155,7 @@ pub fn transcribe(cx: &ExtCtxt,
|
||||||
if let NtTT(ref tt) = **nt {
|
if let NtTT(ref tt) = **nt {
|
||||||
result.push(tt.clone().into());
|
result.push(tt.clone().into());
|
||||||
} else {
|
} else {
|
||||||
sp.ctxt = sp.ctxt.apply_mark(cx.current_expansion.mark);
|
sp = sp.with_ctxt(sp.ctxt().apply_mark(cx.current_expansion.mark));
|
||||||
let token = TokenTree::Token(sp, Token::interpolated((**nt).clone()));
|
let token = TokenTree::Token(sp, Token::interpolated((**nt).clone()));
|
||||||
result.push(token.into());
|
result.push(token.into());
|
||||||
}
|
}
|
||||||
|
@ -166,13 +166,13 @@ pub fn transcribe(cx: &ExtCtxt,
|
||||||
} else {
|
} else {
|
||||||
let ident =
|
let ident =
|
||||||
Ident { ctxt: ident.ctxt.apply_mark(cx.current_expansion.mark), ..ident };
|
Ident { ctxt: ident.ctxt.apply_mark(cx.current_expansion.mark), ..ident };
|
||||||
sp.ctxt = sp.ctxt.apply_mark(cx.current_expansion.mark);
|
sp = sp.with_ctxt(sp.ctxt().apply_mark(cx.current_expansion.mark));
|
||||||
result.push(TokenTree::Token(sp, token::Dollar).into());
|
result.push(TokenTree::Token(sp, token::Dollar).into());
|
||||||
result.push(TokenTree::Token(sp, token::Ident(ident)).into());
|
result.push(TokenTree::Token(sp, token::Ident(ident)).into());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
quoted::TokenTree::Delimited(mut span, delimited) => {
|
quoted::TokenTree::Delimited(mut span, delimited) => {
|
||||||
span.ctxt = span.ctxt.apply_mark(cx.current_expansion.mark);
|
span = span.with_ctxt(span.ctxt().apply_mark(cx.current_expansion.mark));
|
||||||
stack.push(Frame::Delimited { forest: delimited, idx: 0, span: span });
|
stack.push(Frame::Delimited { forest: delimited, idx: 0, span: span });
|
||||||
result_stack.push(mem::replace(&mut result, Vec::new()));
|
result_stack.push(mem::replace(&mut result, Vec::new()));
|
||||||
}
|
}
|
||||||
|
|
|
@ -230,8 +230,8 @@ impl DiagnosticSpan {
|
||||||
mut backtrace: vec::IntoIter<MacroBacktrace>,
|
mut backtrace: vec::IntoIter<MacroBacktrace>,
|
||||||
je: &JsonEmitter)
|
je: &JsonEmitter)
|
||||||
-> DiagnosticSpan {
|
-> DiagnosticSpan {
|
||||||
let start = je.cm.lookup_char_pos(span.lo);
|
let start = je.cm.lookup_char_pos(span.lo());
|
||||||
let end = je.cm.lookup_char_pos(span.hi);
|
let end = je.cm.lookup_char_pos(span.hi());
|
||||||
let backtrace_step = backtrace.next().map(|bt| {
|
let backtrace_step = backtrace.next().map(|bt| {
|
||||||
let call_site =
|
let call_site =
|
||||||
Self::from_span_full(bt.call_site,
|
Self::from_span_full(bt.call_site,
|
||||||
|
@ -256,8 +256,8 @@ impl DiagnosticSpan {
|
||||||
});
|
});
|
||||||
DiagnosticSpan {
|
DiagnosticSpan {
|
||||||
file_name: start.file.name.clone(),
|
file_name: start.file.name.clone(),
|
||||||
byte_start: span.lo.0 - start.file.start_pos.0,
|
byte_start: span.lo().0 - start.file.start_pos.0,
|
||||||
byte_end: span.hi.0 - start.file.start_pos.0,
|
byte_end: span.hi().0 - start.file.start_pos.0,
|
||||||
line_start: start.line,
|
line_start: start.line,
|
||||||
line_end: end.line,
|
line_end: end.line,
|
||||||
column_start: start.col.0 + 1,
|
column_start: start.col.0 + 1,
|
||||||
|
|
|
@ -386,7 +386,7 @@ pub fn gather_comments_and_literals(sess: &ParseSess, path: String, srdr: &mut R
|
||||||
debug!("tok lit: {}", s);
|
debug!("tok lit: {}", s);
|
||||||
literals.push(Literal {
|
literals.push(Literal {
|
||||||
lit: s.to_string(),
|
lit: s.to_string(),
|
||||||
pos: sp.lo,
|
pos: sp.lo(),
|
||||||
});
|
});
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -71,7 +71,7 @@ pub struct StringReader<'a> {
|
||||||
|
|
||||||
impl<'a> StringReader<'a> {
|
impl<'a> StringReader<'a> {
|
||||||
fn mk_sp(&self, lo: BytePos, hi: BytePos) -> Span {
|
fn mk_sp(&self, lo: BytePos, hi: BytePos) -> Span {
|
||||||
unwrap_or!(self.override_span, Span { lo: lo, hi: hi, ctxt: NO_EXPANSION})
|
unwrap_or!(self.override_span, Span::new(lo, hi, NO_EXPANSION))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn next_token(&mut self) -> TokenAndSpan where Self: Sized {
|
fn next_token(&mut self) -> TokenAndSpan where Self: Sized {
|
||||||
|
@ -190,20 +190,20 @@ impl<'a> StringReader<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn retokenize(sess: &'a ParseSess, mut span: Span) -> Self {
|
pub fn retokenize(sess: &'a ParseSess, mut span: Span) -> Self {
|
||||||
let begin = sess.codemap().lookup_byte_offset(span.lo);
|
let begin = sess.codemap().lookup_byte_offset(span.lo());
|
||||||
let end = sess.codemap().lookup_byte_offset(span.hi);
|
let end = sess.codemap().lookup_byte_offset(span.hi());
|
||||||
|
|
||||||
// Make the range zero-length if the span is invalid.
|
// Make the range zero-length if the span is invalid.
|
||||||
if span.lo > span.hi || begin.fm.start_pos != end.fm.start_pos {
|
if span.lo() > span.hi() || begin.fm.start_pos != end.fm.start_pos {
|
||||||
span.hi = span.lo;
|
span = span.with_hi(span.lo());
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut sr = StringReader::new_raw_internal(sess, begin.fm);
|
let mut sr = StringReader::new_raw_internal(sess, begin.fm);
|
||||||
|
|
||||||
// Seek the lexer to the right byte range.
|
// Seek the lexer to the right byte range.
|
||||||
sr.save_new_lines_and_multibyte = false;
|
sr.save_new_lines_and_multibyte = false;
|
||||||
sr.next_pos = span.lo;
|
sr.next_pos = span.lo();
|
||||||
sr.terminator = Some(span.hi);
|
sr.terminator = Some(span.hi());
|
||||||
|
|
||||||
sr.bump();
|
sr.bump();
|
||||||
|
|
||||||
|
@ -1745,11 +1745,7 @@ mod tests {
|
||||||
let tok1 = string_reader.next_token();
|
let tok1 = string_reader.next_token();
|
||||||
let tok2 = TokenAndSpan {
|
let tok2 = TokenAndSpan {
|
||||||
tok: token::Ident(id),
|
tok: token::Ident(id),
|
||||||
sp: Span {
|
sp: Span::new(BytePos(21), BytePos(23), NO_EXPANSION),
|
||||||
lo: BytePos(21),
|
|
||||||
hi: BytePos(23),
|
|
||||||
ctxt: NO_EXPANSION,
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
assert_eq!(tok1, tok2);
|
assert_eq!(tok1, tok2);
|
||||||
assert_eq!(string_reader.next_token().tok, token::Whitespace);
|
assert_eq!(string_reader.next_token().tok, token::Whitespace);
|
||||||
|
@ -1759,11 +1755,7 @@ mod tests {
|
||||||
let tok3 = string_reader.next_token();
|
let tok3 = string_reader.next_token();
|
||||||
let tok4 = TokenAndSpan {
|
let tok4 = TokenAndSpan {
|
||||||
tok: token::Ident(Ident::from_str("main")),
|
tok: token::Ident(Ident::from_str("main")),
|
||||||
sp: Span {
|
sp: Span::new(BytePos(24), BytePos(28), NO_EXPANSION),
|
||||||
lo: BytePos(24),
|
|
||||||
hi: BytePos(28),
|
|
||||||
ctxt: NO_EXPANSION,
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
assert_eq!(tok3, tok4);
|
assert_eq!(tok3, tok4);
|
||||||
// the lparen is already read:
|
// the lparen is already read:
|
||||||
|
@ -1921,7 +1913,7 @@ mod tests {
|
||||||
let mut lexer = setup(&cm, &sh, "// test\r\n/// test\r\n".to_string());
|
let mut lexer = setup(&cm, &sh, "// test\r\n/// test\r\n".to_string());
|
||||||
let comment = lexer.next_token();
|
let comment = lexer.next_token();
|
||||||
assert_eq!(comment.tok, token::Comment);
|
assert_eq!(comment.tok, token::Comment);
|
||||||
assert_eq!((comment.sp.lo, comment.sp.hi), (BytePos(0), BytePos(7)));
|
assert_eq!((comment.sp.lo(), comment.sp.hi()), (BytePos(0), BytePos(7)));
|
||||||
assert_eq!(lexer.next_token().tok, token::Whitespace);
|
assert_eq!(lexer.next_token().tok, token::Whitespace);
|
||||||
assert_eq!(lexer.next_token().tok,
|
assert_eq!(lexer.next_token().tok,
|
||||||
token::DocComment(Symbol::intern("/// test")));
|
token::DocComment(Symbol::intern("/// test")));
|
||||||
|
|
|
@ -11,7 +11,6 @@
|
||||||
use print::pprust::token_to_string;
|
use print::pprust::token_to_string;
|
||||||
use parse::lexer::StringReader;
|
use parse::lexer::StringReader;
|
||||||
use parse::{token, PResult};
|
use parse::{token, PResult};
|
||||||
use syntax_pos::Span;
|
|
||||||
use tokenstream::{Delimited, TokenStream, TokenTree};
|
use tokenstream::{Delimited, TokenStream, TokenTree};
|
||||||
|
|
||||||
impl<'a> StringReader<'a> {
|
impl<'a> StringReader<'a> {
|
||||||
|
@ -20,7 +19,7 @@ impl<'a> StringReader<'a> {
|
||||||
let mut tts = Vec::new();
|
let mut tts = Vec::new();
|
||||||
while self.token != token::Eof {
|
while self.token != token::Eof {
|
||||||
let tree = self.parse_token_tree()?;
|
let tree = self.parse_token_tree()?;
|
||||||
let is_joint = tree.span().hi == self.span.lo && token::is_op(&self.token);
|
let is_joint = tree.span().hi() == self.span.lo() && token::is_op(&self.token);
|
||||||
tts.push(if is_joint { tree.joint() } else { tree.into() });
|
tts.push(if is_joint { tree.joint() } else { tree.into() });
|
||||||
}
|
}
|
||||||
Ok(TokenStream::concat(tts))
|
Ok(TokenStream::concat(tts))
|
||||||
|
@ -40,7 +39,7 @@ impl<'a> StringReader<'a> {
|
||||||
return TokenStream::concat(tts);
|
return TokenStream::concat(tts);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let is_joint = tree.span().hi == self.span.lo && token::is_op(&self.token);
|
let is_joint = tree.span().hi() == self.span.lo() && token::is_op(&self.token);
|
||||||
tts.push(if is_joint { tree.joint() } else { tree.into() });
|
tts.push(if is_joint { tree.joint() } else { tree.into() });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -69,7 +68,7 @@ impl<'a> StringReader<'a> {
|
||||||
let tts = self.parse_token_trees_until_close_delim();
|
let tts = self.parse_token_trees_until_close_delim();
|
||||||
|
|
||||||
// Expand to cover the entire delimited token tree
|
// Expand to cover the entire delimited token tree
|
||||||
let span = Span { hi: self.span.hi, ..pre_span };
|
let span = pre_span.with_hi(self.span.hi());
|
||||||
|
|
||||||
match self.token {
|
match self.token {
|
||||||
// Correct delimiter.
|
// Correct delimiter.
|
||||||
|
|
|
@ -340,7 +340,7 @@ pub fn check_for_substitution<'a>(reader: &StringReader<'a>,
|
||||||
.iter()
|
.iter()
|
||||||
.find(|&&(c, _, _)| c == ch)
|
.find(|&&(c, _, _)| c == ch)
|
||||||
.map(|&(_, u_name, ascii_char)| {
|
.map(|&(_, u_name, ascii_char)| {
|
||||||
let span = Span { lo: reader.pos, hi: reader.next_pos, ctxt: NO_EXPANSION };
|
let span = Span::new(reader.pos, reader.next_pos, NO_EXPANSION);
|
||||||
match ASCII_ARRAY.iter().find(|&&(c, _)| c == ascii_char) {
|
match ASCII_ARRAY.iter().find(|&&(c, _)| c == ascii_char) {
|
||||||
Some(&(ascii_char, ascii_name)) => {
|
Some(&(ascii_char, ascii_name)) => {
|
||||||
let msg =
|
let msg =
|
||||||
|
|
|
@ -181,7 +181,7 @@ pub fn filemap_to_parser(sess: & ParseSess, filemap: Rc<FileMap>, ) -> Parser {
|
||||||
let mut parser = stream_to_parser(sess, filemap_to_stream(sess, filemap, None));
|
let mut parser = stream_to_parser(sess, filemap_to_stream(sess, filemap, None));
|
||||||
|
|
||||||
if parser.token == token::Eof && parser.span == syntax_pos::DUMMY_SP {
|
if parser.token == token::Eof && parser.span == syntax_pos::DUMMY_SP {
|
||||||
parser.span = Span { lo: end_pos, hi: end_pos, ctxt: NO_EXPANSION };
|
parser.span = Span::new(end_pos, end_pos, NO_EXPANSION);
|
||||||
}
|
}
|
||||||
|
|
||||||
parser
|
parser
|
||||||
|
@ -661,7 +661,7 @@ mod tests {
|
||||||
|
|
||||||
// produce a syntax_pos::span
|
// produce a syntax_pos::span
|
||||||
fn sp(a: u32, b: u32) -> Span {
|
fn sp(a: u32, b: u32) -> Span {
|
||||||
Span {lo: BytePos(a), hi: BytePos(b), ctxt: NO_EXPANSION}
|
Span::new(BytePos(a), BytePos(b), NO_EXPANSION)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn str2seg(s: &str, lo: u32, hi: u32) -> ast::PathSegment {
|
fn str2seg(s: &str, lo: u32, hi: u32) -> ast::PathSegment {
|
||||||
|
@ -976,7 +976,7 @@ mod tests {
|
||||||
|
|
||||||
for &src in &srcs {
|
for &src in &srcs {
|
||||||
let spans = get_spans_of_pat_idents(src);
|
let spans = get_spans_of_pat_idents(src);
|
||||||
let Span{ lo, hi, .. } = spans[0];
|
let (lo, hi) = (spans[0].lo(), spans[0].hi());
|
||||||
assert!("self" == &src[lo.to_usize()..hi.to_usize()],
|
assert!("self" == &src[lo.to_usize()..hi.to_usize()],
|
||||||
"\"{}\" != \"self\". src=\"{}\"",
|
"\"{}\" != \"self\". src=\"{}\"",
|
||||||
&src[lo.to_usize()..hi.to_usize()], src)
|
&src[lo.to_usize()..hi.to_usize()], src)
|
||||||
|
|
|
@ -790,9 +790,8 @@ impl<'a> Parser<'a> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
token::AndAnd => {
|
token::AndAnd => {
|
||||||
let span = self.span;
|
let span = self.span.with_lo(self.span.lo() + BytePos(1));
|
||||||
let lo = span.lo + BytePos(1);
|
Ok(self.bump_with(token::BinOp(token::And), span))
|
||||||
Ok(self.bump_with(token::BinOp(token::And), Span { lo: lo, ..span }))
|
|
||||||
}
|
}
|
||||||
_ => self.unexpected()
|
_ => self.unexpected()
|
||||||
}
|
}
|
||||||
|
@ -824,9 +823,8 @@ impl<'a> Parser<'a> {
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
token::BinOp(token::Shl) => {
|
token::BinOp(token::Shl) => {
|
||||||
let span = self.span;
|
let span = self.span.with_lo(self.span.lo() + BytePos(1));
|
||||||
let lo = span.lo + BytePos(1);
|
self.bump_with(token::Lt, span);
|
||||||
self.bump_with(token::Lt, Span { lo: lo, ..span });
|
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
_ => false,
|
_ => false,
|
||||||
|
@ -852,19 +850,16 @@ impl<'a> Parser<'a> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
token::BinOp(token::Shr) => {
|
token::BinOp(token::Shr) => {
|
||||||
let span = self.span;
|
let span = self.span.with_lo(self.span.lo() + BytePos(1));
|
||||||
let lo = span.lo + BytePos(1);
|
Ok(self.bump_with(token::Gt, span))
|
||||||
Ok(self.bump_with(token::Gt, Span { lo: lo, ..span }))
|
|
||||||
}
|
}
|
||||||
token::BinOpEq(token::Shr) => {
|
token::BinOpEq(token::Shr) => {
|
||||||
let span = self.span;
|
let span = self.span.with_lo(self.span.lo() + BytePos(1));
|
||||||
let lo = span.lo + BytePos(1);
|
Ok(self.bump_with(token::Ge, span))
|
||||||
Ok(self.bump_with(token::Ge, Span { lo: lo, ..span }))
|
|
||||||
}
|
}
|
||||||
token::Ge => {
|
token::Ge => {
|
||||||
let span = self.span;
|
let span = self.span.with_lo(self.span.lo() + BytePos(1));
|
||||||
let lo = span.lo + BytePos(1);
|
Ok(self.bump_with(token::Eq, span))
|
||||||
Ok(self.bump_with(token::Eq, Span { lo: lo, ..span }))
|
|
||||||
}
|
}
|
||||||
_ => self.unexpected()
|
_ => self.unexpected()
|
||||||
}
|
}
|
||||||
|
@ -1094,7 +1089,7 @@ impl<'a> Parser<'a> {
|
||||||
/// Advance the parser using provided token as a next one. Use this when
|
/// Advance the parser using provided token as a next one. Use this when
|
||||||
/// consuming a part of a token. For example a single `<` from `<<`.
|
/// consuming a part of a token. For example a single `<` from `<<`.
|
||||||
pub fn bump_with(&mut self, next: token::Token, span: Span) {
|
pub fn bump_with(&mut self, next: token::Token, span: Span) {
|
||||||
self.prev_span = Span { hi: span.lo, ..self.span };
|
self.prev_span = self.span.with_hi(span.lo());
|
||||||
// It would be incorrect to record the kind of the current token, but
|
// It would be incorrect to record the kind of the current token, but
|
||||||
// fortunately for tokens currently using `bump_with`, the
|
// fortunately for tokens currently using `bump_with`, the
|
||||||
// prev_token_kind will be of no use anyway.
|
// prev_token_kind will be of no use anyway.
|
||||||
|
@ -1356,7 +1351,7 @@ impl<'a> Parser<'a> {
|
||||||
if self.eat(&token::RArrow) {
|
if self.eat(&token::RArrow) {
|
||||||
Ok(FunctionRetTy::Ty(self.parse_ty_no_plus()?))
|
Ok(FunctionRetTy::Ty(self.parse_ty_no_plus()?))
|
||||||
} else {
|
} else {
|
||||||
Ok(FunctionRetTy::Default(Span { hi: self.span.lo, ..self.span }))
|
Ok(FunctionRetTy::Default(self.span.with_hi(self.span.lo())))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2532,7 +2527,7 @@ impl<'a> Parser<'a> {
|
||||||
|
|
||||||
pub fn process_potential_macro_variable(&mut self) {
|
pub fn process_potential_macro_variable(&mut self) {
|
||||||
let ident = match self.token {
|
let ident = match self.token {
|
||||||
token::Dollar if self.span.ctxt != syntax_pos::hygiene::SyntaxContext::empty() &&
|
token::Dollar if self.span.ctxt() != syntax_pos::hygiene::SyntaxContext::empty() &&
|
||||||
self.look_ahead(1, |t| t.is_ident()) => {
|
self.look_ahead(1, |t| t.is_ident()) => {
|
||||||
self.bump();
|
self.bump();
|
||||||
let name = match self.token { token::Ident(ident) => ident, _ => unreachable!() };
|
let name = match self.token { token::Ident(ident) => ident, _ => unreachable!() };
|
||||||
|
@ -2734,8 +2729,8 @@ impl<'a> Parser<'a> {
|
||||||
err.span_label(self.span,
|
err.span_label(self.span,
|
||||||
"expecting a type here because of type ascription");
|
"expecting a type here because of type ascription");
|
||||||
let cm = self.sess.codemap();
|
let cm = self.sess.codemap();
|
||||||
let cur_pos = cm.lookup_char_pos(self.span.lo);
|
let cur_pos = cm.lookup_char_pos(self.span.lo());
|
||||||
let op_pos = cm.lookup_char_pos(cur_op_span.hi);
|
let op_pos = cm.lookup_char_pos(cur_op_span.hi());
|
||||||
if cur_pos.line != op_pos.line {
|
if cur_pos.line != op_pos.line {
|
||||||
err.span_suggestion_short(cur_op_span,
|
err.span_suggestion_short(cur_op_span,
|
||||||
"did you mean to use `;` here?",
|
"did you mean to use `;` here?",
|
||||||
|
@ -4056,7 +4051,7 @@ impl<'a> Parser<'a> {
|
||||||
let mut stmt_span = stmt.span;
|
let mut stmt_span = stmt.span;
|
||||||
// expand the span to include the semicolon, if it exists
|
// expand the span to include the semicolon, if it exists
|
||||||
if self.eat(&token::Semi) {
|
if self.eat(&token::Semi) {
|
||||||
stmt_span.hi = self.prev_span.hi;
|
stmt_span = stmt_span.with_hi(self.prev_span.hi());
|
||||||
}
|
}
|
||||||
let sugg = pprust::to_string(|s| {
|
let sugg = pprust::to_string(|s| {
|
||||||
use print::pprust::{PrintState, INDENT_UNIT};
|
use print::pprust::{PrintState, INDENT_UNIT};
|
||||||
|
@ -4148,7 +4143,7 @@ impl<'a> Parser<'a> {
|
||||||
stmt = stmt.add_trailing_semicolon();
|
stmt = stmt.add_trailing_semicolon();
|
||||||
}
|
}
|
||||||
|
|
||||||
stmt.span.hi = self.prev_span.hi;
|
stmt.span = stmt.span.with_hi(self.prev_span.hi());
|
||||||
Ok(Some(stmt))
|
Ok(Some(stmt))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -603,8 +603,8 @@ pub trait PrintState<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn print_literal(&mut self, lit: &ast::Lit) -> io::Result<()> {
|
fn print_literal(&mut self, lit: &ast::Lit) -> io::Result<()> {
|
||||||
self.maybe_print_comment(lit.span.lo)?;
|
self.maybe_print_comment(lit.span.lo())?;
|
||||||
if let Some(ltrl) = self.next_lit(lit.span.lo) {
|
if let Some(ltrl) = self.next_lit(lit.span.lo()) {
|
||||||
return self.writer().word(<rl.lit);
|
return self.writer().word(<rl.lit);
|
||||||
}
|
}
|
||||||
match lit.node {
|
match lit.node {
|
||||||
|
@ -723,7 +723,7 @@ pub trait PrintState<'a> {
|
||||||
if !is_inline {
|
if !is_inline {
|
||||||
self.hardbreak_if_not_bol()?;
|
self.hardbreak_if_not_bol()?;
|
||||||
}
|
}
|
||||||
self.maybe_print_comment(attr.span.lo)?;
|
self.maybe_print_comment(attr.span.lo())?;
|
||||||
if attr.is_sugared_doc {
|
if attr.is_sugared_doc {
|
||||||
self.writer().word(&attr.value_str().unwrap().as_str())?;
|
self.writer().word(&attr.value_str().unwrap().as_str())?;
|
||||||
self.writer().hardbreak()
|
self.writer().hardbreak()
|
||||||
|
@ -892,7 +892,7 @@ impl<'a> State<'a> {
|
||||||
}
|
}
|
||||||
pub fn bclose_maybe_open(&mut self, span: syntax_pos::Span,
|
pub fn bclose_maybe_open(&mut self, span: syntax_pos::Span,
|
||||||
indented: usize, close_box: bool) -> io::Result<()> {
|
indented: usize, close_box: bool) -> io::Result<()> {
|
||||||
self.maybe_print_comment(span.hi)?;
|
self.maybe_print_comment(span.hi())?;
|
||||||
self.break_offset_if_not_bol(1, -(indented as isize))?;
|
self.break_offset_if_not_bol(1, -(indented as isize))?;
|
||||||
self.s.word("}")?;
|
self.s.word("}")?;
|
||||||
if close_box {
|
if close_box {
|
||||||
|
@ -950,13 +950,13 @@ impl<'a> State<'a> {
|
||||||
let len = elts.len();
|
let len = elts.len();
|
||||||
let mut i = 0;
|
let mut i = 0;
|
||||||
for elt in elts {
|
for elt in elts {
|
||||||
self.maybe_print_comment(get_span(elt).hi)?;
|
self.maybe_print_comment(get_span(elt).hi())?;
|
||||||
op(self, elt)?;
|
op(self, elt)?;
|
||||||
i += 1;
|
i += 1;
|
||||||
if i < len {
|
if i < len {
|
||||||
self.s.word(",")?;
|
self.s.word(",")?;
|
||||||
self.maybe_print_trailing_comment(get_span(elt),
|
self.maybe_print_trailing_comment(get_span(elt),
|
||||||
Some(get_span(&elts[i]).hi))?;
|
Some(get_span(&elts[i]).hi()))?;
|
||||||
self.space_if_not_bol()?;
|
self.space_if_not_bol()?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -996,7 +996,7 @@ impl<'a> State<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_type(&mut self, ty: &ast::Ty) -> io::Result<()> {
|
pub fn print_type(&mut self, ty: &ast::Ty) -> io::Result<()> {
|
||||||
self.maybe_print_comment(ty.span.lo)?;
|
self.maybe_print_comment(ty.span.lo())?;
|
||||||
self.ibox(0)?;
|
self.ibox(0)?;
|
||||||
match ty.node {
|
match ty.node {
|
||||||
ast::TyKind::Slice(ref ty) => {
|
ast::TyKind::Slice(ref ty) => {
|
||||||
|
@ -1094,7 +1094,7 @@ impl<'a> State<'a> {
|
||||||
pub fn print_foreign_item(&mut self,
|
pub fn print_foreign_item(&mut self,
|
||||||
item: &ast::ForeignItem) -> io::Result<()> {
|
item: &ast::ForeignItem) -> io::Result<()> {
|
||||||
self.hardbreak_if_not_bol()?;
|
self.hardbreak_if_not_bol()?;
|
||||||
self.maybe_print_comment(item.span.lo)?;
|
self.maybe_print_comment(item.span.lo())?;
|
||||||
self.print_outer_attributes(&item.attrs)?;
|
self.print_outer_attributes(&item.attrs)?;
|
||||||
match item.node {
|
match item.node {
|
||||||
ast::ForeignItemKind::Fn(ref decl, ref generics) => {
|
ast::ForeignItemKind::Fn(ref decl, ref generics) => {
|
||||||
|
@ -1163,7 +1163,7 @@ impl<'a> State<'a> {
|
||||||
/// Pretty-print an item
|
/// Pretty-print an item
|
||||||
pub fn print_item(&mut self, item: &ast::Item) -> io::Result<()> {
|
pub fn print_item(&mut self, item: &ast::Item) -> io::Result<()> {
|
||||||
self.hardbreak_if_not_bol()?;
|
self.hardbreak_if_not_bol()?;
|
||||||
self.maybe_print_comment(item.span.lo)?;
|
self.maybe_print_comment(item.span.lo())?;
|
||||||
self.print_outer_attributes(&item.attrs)?;
|
self.print_outer_attributes(&item.attrs)?;
|
||||||
self.ann.pre(self, NodeItem(item))?;
|
self.ann.pre(self, NodeItem(item))?;
|
||||||
match item.node {
|
match item.node {
|
||||||
|
@ -1433,7 +1433,7 @@ impl<'a> State<'a> {
|
||||||
self.bopen()?;
|
self.bopen()?;
|
||||||
for v in variants {
|
for v in variants {
|
||||||
self.space_if_not_bol()?;
|
self.space_if_not_bol()?;
|
||||||
self.maybe_print_comment(v.span.lo)?;
|
self.maybe_print_comment(v.span.lo())?;
|
||||||
self.print_outer_attributes(&v.node.attrs)?;
|
self.print_outer_attributes(&v.node.attrs)?;
|
||||||
self.ibox(INDENT_UNIT)?;
|
self.ibox(INDENT_UNIT)?;
|
||||||
self.print_variant(v)?;
|
self.print_variant(v)?;
|
||||||
|
@ -1481,7 +1481,7 @@ impl<'a> State<'a> {
|
||||||
self.commasep(
|
self.commasep(
|
||||||
Inconsistent, struct_def.fields(),
|
Inconsistent, struct_def.fields(),
|
||||||
|s, field| {
|
|s, field| {
|
||||||
s.maybe_print_comment(field.span.lo)?;
|
s.maybe_print_comment(field.span.lo())?;
|
||||||
s.print_outer_attributes(&field.attrs)?;
|
s.print_outer_attributes(&field.attrs)?;
|
||||||
s.print_visibility(&field.vis)?;
|
s.print_visibility(&field.vis)?;
|
||||||
s.print_type(&field.ty)
|
s.print_type(&field.ty)
|
||||||
|
@ -1503,7 +1503,7 @@ impl<'a> State<'a> {
|
||||||
|
|
||||||
for field in struct_def.fields() {
|
for field in struct_def.fields() {
|
||||||
self.hardbreak_if_not_bol()?;
|
self.hardbreak_if_not_bol()?;
|
||||||
self.maybe_print_comment(field.span.lo)?;
|
self.maybe_print_comment(field.span.lo())?;
|
||||||
self.print_outer_attributes(&field.attrs)?;
|
self.print_outer_attributes(&field.attrs)?;
|
||||||
self.print_visibility(&field.vis)?;
|
self.print_visibility(&field.vis)?;
|
||||||
self.print_ident(field.ident.unwrap())?;
|
self.print_ident(field.ident.unwrap())?;
|
||||||
|
@ -1548,7 +1548,7 @@ impl<'a> State<'a> {
|
||||||
-> io::Result<()> {
|
-> io::Result<()> {
|
||||||
self.ann.pre(self, NodeSubItem(ti.id))?;
|
self.ann.pre(self, NodeSubItem(ti.id))?;
|
||||||
self.hardbreak_if_not_bol()?;
|
self.hardbreak_if_not_bol()?;
|
||||||
self.maybe_print_comment(ti.span.lo)?;
|
self.maybe_print_comment(ti.span.lo())?;
|
||||||
self.print_outer_attributes(&ti.attrs)?;
|
self.print_outer_attributes(&ti.attrs)?;
|
||||||
match ti.node {
|
match ti.node {
|
||||||
ast::TraitItemKind::Const(ref ty, ref default) => {
|
ast::TraitItemKind::Const(ref ty, ref default) => {
|
||||||
|
@ -1590,7 +1590,7 @@ impl<'a> State<'a> {
|
||||||
pub fn print_impl_item(&mut self, ii: &ast::ImplItem) -> io::Result<()> {
|
pub fn print_impl_item(&mut self, ii: &ast::ImplItem) -> io::Result<()> {
|
||||||
self.ann.pre(self, NodeSubItem(ii.id))?;
|
self.ann.pre(self, NodeSubItem(ii.id))?;
|
||||||
self.hardbreak_if_not_bol()?;
|
self.hardbreak_if_not_bol()?;
|
||||||
self.maybe_print_comment(ii.span.lo)?;
|
self.maybe_print_comment(ii.span.lo())?;
|
||||||
self.print_outer_attributes(&ii.attrs)?;
|
self.print_outer_attributes(&ii.attrs)?;
|
||||||
self.print_defaultness(ii.defaultness)?;
|
self.print_defaultness(ii.defaultness)?;
|
||||||
match ii.node {
|
match ii.node {
|
||||||
|
@ -1622,7 +1622,7 @@ impl<'a> State<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_stmt(&mut self, st: &ast::Stmt) -> io::Result<()> {
|
pub fn print_stmt(&mut self, st: &ast::Stmt) -> io::Result<()> {
|
||||||
self.maybe_print_comment(st.span.lo)?;
|
self.maybe_print_comment(st.span.lo())?;
|
||||||
match st.node {
|
match st.node {
|
||||||
ast::StmtKind::Local(ref loc) => {
|
ast::StmtKind::Local(ref loc) => {
|
||||||
self.print_outer_attributes(&loc.attrs)?;
|
self.print_outer_attributes(&loc.attrs)?;
|
||||||
|
@ -1705,7 +1705,7 @@ impl<'a> State<'a> {
|
||||||
BlockCheckMode::Unsafe(..) => self.word_space("unsafe")?,
|
BlockCheckMode::Unsafe(..) => self.word_space("unsafe")?,
|
||||||
BlockCheckMode::Default => ()
|
BlockCheckMode::Default => ()
|
||||||
}
|
}
|
||||||
self.maybe_print_comment(blk.span.lo)?;
|
self.maybe_print_comment(blk.span.lo())?;
|
||||||
self.ann.pre(self, NodeBlock(blk))?;
|
self.ann.pre(self, NodeBlock(blk))?;
|
||||||
self.bopen()?;
|
self.bopen()?;
|
||||||
|
|
||||||
|
@ -1714,10 +1714,10 @@ impl<'a> State<'a> {
|
||||||
for (i, st) in blk.stmts.iter().enumerate() {
|
for (i, st) in blk.stmts.iter().enumerate() {
|
||||||
match st.node {
|
match st.node {
|
||||||
ast::StmtKind::Expr(ref expr) if i == blk.stmts.len() - 1 => {
|
ast::StmtKind::Expr(ref expr) if i == blk.stmts.len() - 1 => {
|
||||||
self.maybe_print_comment(st.span.lo)?;
|
self.maybe_print_comment(st.span.lo())?;
|
||||||
self.space_if_not_bol()?;
|
self.space_if_not_bol()?;
|
||||||
self.print_expr_outer_attr_style(expr, false)?;
|
self.print_expr_outer_attr_style(expr, false)?;
|
||||||
self.maybe_print_trailing_comment(expr.span, Some(blk.span.hi))?;
|
self.maybe_print_trailing_comment(expr.span, Some(blk.span.hi()))?;
|
||||||
}
|
}
|
||||||
_ => self.print_stmt(st)?,
|
_ => self.print_stmt(st)?,
|
||||||
}
|
}
|
||||||
|
@ -1988,7 +1988,7 @@ impl<'a> State<'a> {
|
||||||
fn print_expr_outer_attr_style(&mut self,
|
fn print_expr_outer_attr_style(&mut self,
|
||||||
expr: &ast::Expr,
|
expr: &ast::Expr,
|
||||||
is_inline: bool) -> io::Result<()> {
|
is_inline: bool) -> io::Result<()> {
|
||||||
self.maybe_print_comment(expr.span.lo)?;
|
self.maybe_print_comment(expr.span.lo())?;
|
||||||
|
|
||||||
let attrs = &expr.attrs;
|
let attrs = &expr.attrs;
|
||||||
if is_inline {
|
if is_inline {
|
||||||
|
@ -2343,7 +2343,7 @@ impl<'a> State<'a> {
|
||||||
defaults_to_global: bool)
|
defaults_to_global: bool)
|
||||||
-> io::Result<()>
|
-> io::Result<()>
|
||||||
{
|
{
|
||||||
self.maybe_print_comment(path.span.lo)?;
|
self.maybe_print_comment(path.span.lo())?;
|
||||||
|
|
||||||
let mut segments = path.segments[..path.segments.len()-depth].iter();
|
let mut segments = path.segments[..path.segments.len()-depth].iter();
|
||||||
if defaults_to_global && path.is_global() {
|
if defaults_to_global && path.is_global() {
|
||||||
|
@ -2465,7 +2465,7 @@ impl<'a> State<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_pat(&mut self, pat: &ast::Pat) -> io::Result<()> {
|
pub fn print_pat(&mut self, pat: &ast::Pat) -> io::Result<()> {
|
||||||
self.maybe_print_comment(pat.span.lo)?;
|
self.maybe_print_comment(pat.span.lo())?;
|
||||||
self.ann.pre(self, NodePat(pat))?;
|
self.ann.pre(self, NodePat(pat))?;
|
||||||
/* Pat isn't normalized, but the beauty of it
|
/* Pat isn't normalized, but the beauty of it
|
||||||
is that it doesn't matter */
|
is that it doesn't matter */
|
||||||
|
@ -2607,7 +2607,7 @@ impl<'a> State<'a> {
|
||||||
}
|
}
|
||||||
self.cbox(INDENT_UNIT)?;
|
self.cbox(INDENT_UNIT)?;
|
||||||
self.ibox(0)?;
|
self.ibox(0)?;
|
||||||
self.maybe_print_comment(arm.pats[0].span.lo)?;
|
self.maybe_print_comment(arm.pats[0].span.lo())?;
|
||||||
self.print_outer_attributes(&arm.attrs)?;
|
self.print_outer_attributes(&arm.attrs)?;
|
||||||
let mut first = true;
|
let mut first = true;
|
||||||
for p in &arm.pats {
|
for p in &arm.pats {
|
||||||
|
@ -2715,7 +2715,7 @@ impl<'a> State<'a> {
|
||||||
match decl.output {
|
match decl.output {
|
||||||
ast::FunctionRetTy::Ty(ref ty) => {
|
ast::FunctionRetTy::Ty(ref ty) => {
|
||||||
self.print_type(ty)?;
|
self.print_type(ty)?;
|
||||||
self.maybe_print_comment(ty.span.lo)
|
self.maybe_print_comment(ty.span.lo())
|
||||||
}
|
}
|
||||||
ast::FunctionRetTy::Default(..) => unreachable!(),
|
ast::FunctionRetTy::Default(..) => unreachable!(),
|
||||||
}
|
}
|
||||||
|
@ -2971,7 +2971,7 @@ impl<'a> State<'a> {
|
||||||
self.end()?;
|
self.end()?;
|
||||||
|
|
||||||
match decl.output {
|
match decl.output {
|
||||||
ast::FunctionRetTy::Ty(ref output) => self.maybe_print_comment(output.span.lo),
|
ast::FunctionRetTy::Ty(ref output) => self.maybe_print_comment(output.span.lo()),
|
||||||
_ => Ok(())
|
_ => Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3017,10 +3017,10 @@ impl<'a> State<'a> {
|
||||||
};
|
};
|
||||||
if let Some(ref cmnt) = self.next_comment() {
|
if let Some(ref cmnt) = self.next_comment() {
|
||||||
if cmnt.style != comments::Trailing { return Ok(()) }
|
if cmnt.style != comments::Trailing { return Ok(()) }
|
||||||
let span_line = cm.lookup_char_pos(span.hi);
|
let span_line = cm.lookup_char_pos(span.hi());
|
||||||
let comment_line = cm.lookup_char_pos(cmnt.pos);
|
let comment_line = cm.lookup_char_pos(cmnt.pos);
|
||||||
let next = next_pos.unwrap_or(cmnt.pos + BytePos(1));
|
let next = next_pos.unwrap_or(cmnt.pos + BytePos(1));
|
||||||
if span.hi < cmnt.pos && cmnt.pos < next && span_line.line == comment_line.line {
|
if span.hi() < cmnt.pos && cmnt.pos < next && span_line.line == comment_line.line {
|
||||||
self.print_comment(cmnt)?;
|
self.print_comment(cmnt)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,7 +31,7 @@ fn ignored_span(sp: Span) -> Span {
|
||||||
allow_internal_unsafe: false,
|
allow_internal_unsafe: false,
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
Span { ctxt: SyntaxContext::empty().apply_mark(mark), ..sp }
|
sp.with_ctxt(SyntaxContext::empty().apply_mark(mark))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn injected_crate_name(krate: &ast::Crate) -> Option<&'static str> {
|
pub fn injected_crate_name(krate: &ast::Crate) -> Option<&'static str> {
|
||||||
|
|
|
@ -306,7 +306,7 @@ fn generate_test_harness(sess: &ParseSess,
|
||||||
/// call to codemap's `is_internal` check.
|
/// call to codemap's `is_internal` check.
|
||||||
/// The expanded code calls some unstable functions in the test crate.
|
/// The expanded code calls some unstable functions in the test crate.
|
||||||
fn ignored_span(cx: &TestCtxt, sp: Span) -> Span {
|
fn ignored_span(cx: &TestCtxt, sp: Span) -> Span {
|
||||||
Span { ctxt: cx.ctxt, ..sp }
|
sp.with_ctxt(cx.ctxt)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(PartialEq)]
|
#[derive(PartialEq)]
|
||||||
|
|
|
@ -80,11 +80,7 @@ fn make_span(file_text: &str, start: &Position, end: &Position) -> Span {
|
||||||
let start = make_pos(file_text, start);
|
let start = make_pos(file_text, start);
|
||||||
let end = make_pos(file_text, end) + end.string.len(); // just after matching thing ends
|
let end = make_pos(file_text, end) + end.string.len(); // just after matching thing ends
|
||||||
assert!(start <= end);
|
assert!(start <= end);
|
||||||
Span {
|
Span::new(BytePos(start as u32), BytePos(end as u32), NO_EXPANSION)
|
||||||
lo: BytePos(start as u32),
|
|
||||||
hi: BytePos(end as u32),
|
|
||||||
ctxt: NO_EXPANSION,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn make_pos(file_text: &str, pos: &Position) -> usize {
|
fn make_pos(file_text: &str, pos: &Position) -> usize {
|
||||||
|
|
|
@ -59,7 +59,7 @@ impl Delimited {
|
||||||
let open_span = if span == DUMMY_SP {
|
let open_span = if span == DUMMY_SP {
|
||||||
DUMMY_SP
|
DUMMY_SP
|
||||||
} else {
|
} else {
|
||||||
Span { hi: span.lo + BytePos(self.delim.len() as u32), ..span }
|
span.with_hi(span.lo() + BytePos(self.delim.len() as u32))
|
||||||
};
|
};
|
||||||
TokenTree::Token(open_span, self.open_token())
|
TokenTree::Token(open_span, self.open_token())
|
||||||
}
|
}
|
||||||
|
@ -69,7 +69,7 @@ impl Delimited {
|
||||||
let close_span = if span == DUMMY_SP {
|
let close_span = if span == DUMMY_SP {
|
||||||
DUMMY_SP
|
DUMMY_SP
|
||||||
} else {
|
} else {
|
||||||
Span { lo: span.hi - BytePos(self.delim.len() as u32), ..span }
|
span.with_lo(span.hi() - BytePos(self.delim.len() as u32))
|
||||||
};
|
};
|
||||||
TokenTree::Token(close_span, self.close_token())
|
TokenTree::Token(close_span, self.close_token())
|
||||||
}
|
}
|
||||||
|
@ -602,11 +602,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn sp(a: u32, b: u32) -> Span {
|
fn sp(a: u32, b: u32) -> Span {
|
||||||
Span {
|
Span::new(BytePos(a), BytePos(b), NO_EXPANSION)
|
||||||
lo: BytePos(a),
|
|
||||||
hi: BytePos(b),
|
|
||||||
ctxt: NO_EXPANSION,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
@ -111,7 +111,7 @@ fn cs_clone_shallow(name: &str,
|
||||||
ty: P<ast::Ty>, span: Span, helper_name: &str) {
|
ty: P<ast::Ty>, span: Span, helper_name: &str) {
|
||||||
// Generate statement `let _: helper_name<ty>;`,
|
// Generate statement `let _: helper_name<ty>;`,
|
||||||
// set the expn ID so we can use the unstable struct.
|
// set the expn ID so we can use the unstable struct.
|
||||||
let span = Span { ctxt: cx.backtrace(), ..span};
|
let span = span.with_ctxt(cx.backtrace());
|
||||||
let assert_path = cx.path_all(span, true,
|
let assert_path = cx.path_all(span, true,
|
||||||
cx.std_path(&["clone", helper_name]),
|
cx.std_path(&["clone", helper_name]),
|
||||||
vec![], vec![ty], vec![]);
|
vec![], vec![ty], vec![]);
|
||||||
|
|
|
@ -58,7 +58,7 @@ fn cs_total_eq_assert(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure)
|
||||||
ty: P<ast::Ty>, span: Span, helper_name: &str) {
|
ty: P<ast::Ty>, span: Span, helper_name: &str) {
|
||||||
// Generate statement `let _: helper_name<ty>;`,
|
// Generate statement `let _: helper_name<ty>;`,
|
||||||
// set the expn ID so we can use the unstable struct.
|
// set the expn ID so we can use the unstable struct.
|
||||||
let span = Span { ctxt: cx.backtrace(), ..span };
|
let span = span.with_ctxt(cx.backtrace());
|
||||||
let assert_path = cx.path_all(span, true,
|
let assert_path = cx.path_all(span, true,
|
||||||
cx.std_path(&["cmp", helper_name]),
|
cx.std_path(&["cmp", helper_name]),
|
||||||
vec![], vec![ty], vec![]);
|
vec![], vec![ty], vec![]);
|
||||||
|
|
|
@ -67,7 +67,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P<E
|
||||||
};
|
};
|
||||||
|
|
||||||
// We want to make sure we have the ctxt set so that we can use unstable methods
|
// We want to make sure we have the ctxt set so that we can use unstable methods
|
||||||
let span = Span { ctxt: cx.backtrace(), ..span };
|
let span = span.with_ctxt(cx.backtrace());
|
||||||
let name = cx.expr_lit(span, ast::LitKind::Str(ident.name, ast::StrStyle::Cooked));
|
let name = cx.expr_lit(span, ast::LitKind::Str(ident.name, ast::StrStyle::Cooked));
|
||||||
let builder = Ident::from_str("builder");
|
let builder = Ident::from_str("builder");
|
||||||
let builder_expr = cx.expr_ident(span, builder.clone());
|
let builder_expr = cx.expr_ident(span, builder.clone());
|
||||||
|
|
|
@ -375,7 +375,7 @@ fn find_type_parameters(ty: &ast::Ty,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_mac(&mut self, mac: &ast::Mac) {
|
fn visit_mac(&mut self, mac: &ast::Mac) {
|
||||||
let span = Span { ctxt: self.span.ctxt, ..mac.span };
|
let span = mac.span.with_ctxt(self.span.ctxt());
|
||||||
self.cx.span_err(span, "`derive` cannot be used on items with type macros");
|
self.cx.span_err(span, "`derive` cannot be used on items with type macros");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1464,7 +1464,7 @@ impl<'a> MethodDef<'a> {
|
||||||
.iter()
|
.iter()
|
||||||
.map(|v| {
|
.map(|v| {
|
||||||
let ident = v.node.name;
|
let ident = v.node.name;
|
||||||
let sp = Span { ctxt: trait_.span.ctxt, ..v.span };
|
let sp = v.span.with_ctxt(trait_.span.ctxt());
|
||||||
let summary = trait_.summarise_struct(cx, &v.node.data);
|
let summary = trait_.summarise_struct(cx, &v.node.data);
|
||||||
(ident, sp, summary)
|
(ident, sp, summary)
|
||||||
})
|
})
|
||||||
|
@ -1484,7 +1484,7 @@ impl<'a> TraitDef<'a> {
|
||||||
let mut named_idents = Vec::new();
|
let mut named_idents = Vec::new();
|
||||||
let mut just_spans = Vec::new();
|
let mut just_spans = Vec::new();
|
||||||
for field in struct_def.fields() {
|
for field in struct_def.fields() {
|
||||||
let sp = Span { ctxt: self.span.ctxt, ..field.span };
|
let sp = field.span.with_ctxt(self.span.ctxt());
|
||||||
match field.ident {
|
match field.ident {
|
||||||
Some(ident) => named_idents.push((ident, sp)),
|
Some(ident) => named_idents.push((ident, sp)),
|
||||||
_ => just_spans.push(sp),
|
_ => just_spans.push(sp),
|
||||||
|
@ -1529,7 +1529,7 @@ impl<'a> TraitDef<'a> {
|
||||||
let mut paths = Vec::new();
|
let mut paths = Vec::new();
|
||||||
let mut ident_exprs = Vec::new();
|
let mut ident_exprs = Vec::new();
|
||||||
for (i, struct_field) in struct_def.fields().iter().enumerate() {
|
for (i, struct_field) in struct_def.fields().iter().enumerate() {
|
||||||
let sp = Span { ctxt: self.span.ctxt, ..struct_field.span };
|
let sp = struct_field.span.with_ctxt(self.span.ctxt());
|
||||||
let ident = cx.ident_of(&format!("{}_{}", prefix, i));
|
let ident = cx.ident_of(&format!("{}_{}", prefix, i));
|
||||||
paths.push(codemap::Spanned {
|
paths.push(codemap::Spanned {
|
||||||
span: sp,
|
span: sp,
|
||||||
|
@ -1550,7 +1550,7 @@ impl<'a> TraitDef<'a> {
|
||||||
cx.span_bug(sp, "a braced struct with unnamed fields in `derive`");
|
cx.span_bug(sp, "a braced struct with unnamed fields in `derive`");
|
||||||
}
|
}
|
||||||
codemap::Spanned {
|
codemap::Spanned {
|
||||||
span: Span { ctxt: self.span.ctxt, ..pat.span },
|
span: pat.span.with_ctxt(self.span.ctxt()),
|
||||||
node: ast::FieldPat {
|
node: ast::FieldPat {
|
||||||
ident: ident.unwrap(),
|
ident: ident.unwrap(),
|
||||||
pat,
|
pat,
|
||||||
|
@ -1582,7 +1582,7 @@ impl<'a> TraitDef<'a> {
|
||||||
mutbl: ast::Mutability)
|
mutbl: ast::Mutability)
|
||||||
-> (P<ast::Pat>, Vec<(Span, Option<Ident>, P<Expr>, &'a [ast::Attribute])>) {
|
-> (P<ast::Pat>, Vec<(Span, Option<Ident>, P<Expr>, &'a [ast::Attribute])>) {
|
||||||
let variant_ident = variant.node.name;
|
let variant_ident = variant.node.name;
|
||||||
let sp = Span { ctxt: self.span.ctxt, ..variant.span };
|
let sp = variant.span.with_ctxt(self.span.ctxt());
|
||||||
let variant_path = cx.path(sp, vec![enum_ident, variant_ident]);
|
let variant_path = cx.path(sp, vec![enum_ident, variant_ident]);
|
||||||
self.create_struct_pattern(cx, variant_path, &variant.node.data, prefix, mutbl)
|
self.create_struct_pattern(cx, variant_path, &variant.node.data, prefix, mutbl)
|
||||||
}
|
}
|
||||||
|
|
|
@ -158,13 +158,13 @@ fn call_intrinsic(cx: &ExtCtxt,
|
||||||
args: Vec<P<ast::Expr>>)
|
args: Vec<P<ast::Expr>>)
|
||||||
-> P<ast::Expr> {
|
-> P<ast::Expr> {
|
||||||
if cx.current_expansion.mark.expn_info().unwrap().callee.allow_internal_unstable {
|
if cx.current_expansion.mark.expn_info().unwrap().callee.allow_internal_unstable {
|
||||||
span.ctxt = cx.backtrace();
|
span = span.with_ctxt(cx.backtrace());
|
||||||
} else { // Avoid instability errors with user defined curstom derives, cc #36316
|
} else { // Avoid instability errors with user defined curstom derives, cc #36316
|
||||||
let mut info = cx.current_expansion.mark.expn_info().unwrap();
|
let mut info = cx.current_expansion.mark.expn_info().unwrap();
|
||||||
info.callee.allow_internal_unstable = true;
|
info.callee.allow_internal_unstable = true;
|
||||||
let mark = Mark::fresh(Mark::root());
|
let mark = Mark::fresh(Mark::root());
|
||||||
mark.set_expn_info(info);
|
mark.set_expn_info(info);
|
||||||
span.ctxt = SyntaxContext::empty().apply_mark(mark);
|
span = span.with_ctxt(SyntaxContext::empty().apply_mark(mark));
|
||||||
}
|
}
|
||||||
let path = cx.std_path(&["intrinsics", intrinsic]);
|
let path = cx.std_path(&["intrinsics", intrinsic]);
|
||||||
let call = cx.expr_call_global(span, path, args);
|
let call = cx.expr_call_global(span, path, args);
|
||||||
|
|
|
@ -558,10 +558,8 @@ impl<'a, 'b> Context<'a, 'b> {
|
||||||
// passed to this function.
|
// passed to this function.
|
||||||
for (i, e) in self.args.into_iter().enumerate() {
|
for (i, e) in self.args.into_iter().enumerate() {
|
||||||
let name = self.ecx.ident_of(&format!("__arg{}", i));
|
let name = self.ecx.ident_of(&format!("__arg{}", i));
|
||||||
let span = Span {
|
let span =
|
||||||
ctxt: e.span.ctxt.apply_mark(self.ecx.current_expansion.mark),
|
DUMMY_SP.with_ctxt(e.span.ctxt().apply_mark(self.ecx.current_expansion.mark));
|
||||||
..DUMMY_SP
|
|
||||||
};
|
|
||||||
pats.push(self.ecx.pat_ident(span, name));
|
pats.push(self.ecx.pat_ident(span, name));
|
||||||
for ref arg_ty in self.arg_unique_types[i].iter() {
|
for ref arg_ty in self.arg_unique_types[i].iter() {
|
||||||
locals.push(Context::format_arg(self.ecx, self.macsp, e.span, arg_ty, name));
|
locals.push(Context::format_arg(self.ecx, self.macsp, e.span, arg_ty, name));
|
||||||
|
@ -642,7 +640,7 @@ impl<'a, 'b> Context<'a, 'b> {
|
||||||
ty: &ArgumentType,
|
ty: &ArgumentType,
|
||||||
arg: ast::Ident)
|
arg: ast::Ident)
|
||||||
-> P<ast::Expr> {
|
-> P<ast::Expr> {
|
||||||
sp.ctxt = sp.ctxt.apply_mark(ecx.current_expansion.mark);
|
sp = sp.with_ctxt(sp.ctxt().apply_mark(ecx.current_expansion.mark));
|
||||||
let arg = ecx.expr_ident(sp, arg);
|
let arg = ecx.expr_ident(sp, arg);
|
||||||
let trait_ = match *ty {
|
let trait_ = match *ty {
|
||||||
Placeholder(ref tyname) => {
|
Placeholder(ref tyname) => {
|
||||||
|
@ -679,7 +677,7 @@ pub fn expand_format_args<'cx>(ecx: &'cx mut ExtCtxt,
|
||||||
mut sp: Span,
|
mut sp: Span,
|
||||||
tts: &[tokenstream::TokenTree])
|
tts: &[tokenstream::TokenTree])
|
||||||
-> Box<base::MacResult + 'cx> {
|
-> Box<base::MacResult + 'cx> {
|
||||||
sp.ctxt = sp.ctxt.apply_mark(ecx.current_expansion.mark);
|
sp = sp.with_ctxt(sp.ctxt().apply_mark(ecx.current_expansion.mark));
|
||||||
match parse_args(ecx, sp, tts) {
|
match parse_args(ecx, sp, tts) {
|
||||||
Some((efmt, args, names)) => {
|
Some((efmt, args, names)) => {
|
||||||
MacEager::expr(expand_preparsed_format_args(ecx, sp, efmt, args, names))
|
MacEager::expr(expand_preparsed_format_args(ecx, sp, efmt, args, names))
|
||||||
|
@ -701,7 +699,7 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt,
|
||||||
let arg_types: Vec<_> = (0..args.len()).map(|_| Vec::new()).collect();
|
let arg_types: Vec<_> = (0..args.len()).map(|_| Vec::new()).collect();
|
||||||
let arg_unique_types: Vec<_> = (0..args.len()).map(|_| Vec::new()).collect();
|
let arg_unique_types: Vec<_> = (0..args.len()).map(|_| Vec::new()).collect();
|
||||||
let mut macsp = ecx.call_site();
|
let mut macsp = ecx.call_site();
|
||||||
macsp.ctxt = macsp.ctxt.apply_mark(ecx.current_expansion.mark);
|
macsp = macsp.with_ctxt(macsp.ctxt().apply_mark(ecx.current_expansion.mark));
|
||||||
let msg = "format argument must be a string literal.";
|
let msg = "format argument must be a string literal.";
|
||||||
let fmt = match expr_to_spanned_string(ecx, efmt, msg) {
|
let fmt = match expr_to_spanned_string(ecx, efmt, msg) {
|
||||||
Some(fmt) => fmt,
|
Some(fmt) => fmt,
|
||||||
|
|
|
@ -371,7 +371,7 @@ fn mk_registrar(cx: &mut ExtCtxt,
|
||||||
allow_internal_unsafe: false,
|
allow_internal_unsafe: false,
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
let span = Span { ctxt: SyntaxContext::empty().apply_mark(mark), ..DUMMY_SP };
|
let span = DUMMY_SP.with_ctxt(SyntaxContext::empty().apply_mark(mark));
|
||||||
|
|
||||||
let proc_macro = Ident::from_str("proc_macro");
|
let proc_macro = Ident::from_str("proc_macro");
|
||||||
let krate = cx.item(span,
|
let krate = cx.item(span,
|
||||||
|
|
|
@ -60,13 +60,15 @@ pub type FileName = String;
|
||||||
/// range between files.
|
/// range between files.
|
||||||
#[derive(Clone, Copy, Hash, PartialEq, Eq, Ord, PartialOrd)]
|
#[derive(Clone, Copy, Hash, PartialEq, Eq, Ord, PartialOrd)]
|
||||||
pub struct Span {
|
pub struct Span {
|
||||||
pub lo: BytePos,
|
lo: BytePos,
|
||||||
pub hi: BytePos,
|
hi: BytePos,
|
||||||
/// Information about where the macro came from, if this piece of
|
/// Information about where the macro came from, if this piece of
|
||||||
/// code was created by a macro expansion.
|
/// code was created by a macro expansion.
|
||||||
pub ctxt: SyntaxContext,
|
ctxt: SyntaxContext,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub const DUMMY_SP: Span = Span { lo: BytePos(0), hi: BytePos(0), ctxt: NO_EXPANSION };
|
||||||
|
|
||||||
/// A collection of spans. Spans have two orthogonal attributes:
|
/// A collection of spans. Spans have two orthogonal attributes:
|
||||||
///
|
///
|
||||||
/// - they can be *primary spans*. In this case they are the locus of
|
/// - they can be *primary spans*. In this case they are the locus of
|
||||||
|
@ -80,16 +82,46 @@ pub struct MultiSpan {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Span {
|
impl Span {
|
||||||
|
#[inline]
|
||||||
|
pub fn new(lo: BytePos, hi: BytePos, ctxt: SyntaxContext) -> Self {
|
||||||
|
Span { lo, hi, ctxt }
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub fn lo(self) -> BytePos {
|
||||||
|
self.lo
|
||||||
|
}
|
||||||
|
#[inline]
|
||||||
|
pub fn with_lo(self, lo: BytePos) -> Span {
|
||||||
|
Span::new(lo, self.hi(), self.ctxt())
|
||||||
|
}
|
||||||
|
#[inline]
|
||||||
|
pub fn hi(self) -> BytePos {
|
||||||
|
self.hi
|
||||||
|
}
|
||||||
|
#[inline]
|
||||||
|
pub fn with_hi(self, hi: BytePos) -> Span {
|
||||||
|
Span::new(self.lo(), hi, self.ctxt())
|
||||||
|
}
|
||||||
|
#[inline]
|
||||||
|
pub fn ctxt(self) -> SyntaxContext {
|
||||||
|
self.ctxt
|
||||||
|
}
|
||||||
|
#[inline]
|
||||||
|
pub fn with_ctxt(self, ctxt: SyntaxContext) -> Span {
|
||||||
|
Span::new(self.lo(), self.hi(), ctxt)
|
||||||
|
}
|
||||||
|
|
||||||
/// Returns a new span representing just the end-point of this span
|
/// Returns a new span representing just the end-point of this span
|
||||||
pub fn end_point(self) -> Span {
|
pub fn end_point(self) -> Span {
|
||||||
let lo = cmp::max(self.hi.0 - 1, self.lo.0);
|
let lo = cmp::max(self.hi().0 - 1, self.lo().0);
|
||||||
Span { lo: BytePos(lo), ..self }
|
self.with_lo(BytePos(lo))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns a new span representing the next character after the end-point of this span
|
/// Returns a new span representing the next character after the end-point of this span
|
||||||
pub fn next_point(self) -> Span {
|
pub fn next_point(self) -> Span {
|
||||||
let lo = cmp::max(self.hi.0, self.lo.0 + 1);
|
let lo = cmp::max(self.hi().0, self.lo().0 + 1);
|
||||||
Span { lo: BytePos(lo), hi: BytePos(lo), ..self }
|
Span::new(BytePos(lo), BytePos(lo), self.ctxt())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns `self` if `self` is not the dummy span, and `other` otherwise.
|
/// Returns `self` if `self` is not the dummy span, and `other` otherwise.
|
||||||
|
@ -99,7 +131,7 @@ impl Span {
|
||||||
|
|
||||||
/// Return true if `self` fully encloses `other`.
|
/// Return true if `self` fully encloses `other`.
|
||||||
pub fn contains(self, other: Span) -> bool {
|
pub fn contains(self, other: Span) -> bool {
|
||||||
self.lo <= other.lo && other.hi <= self.hi
|
self.lo() <= other.lo() && other.hi() <= self.hi()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return true if the spans are equal with regards to the source text.
|
/// Return true if the spans are equal with regards to the source text.
|
||||||
|
@ -107,13 +139,13 @@ impl Span {
|
||||||
/// Use this instead of `==` when either span could be generated code,
|
/// Use this instead of `==` when either span could be generated code,
|
||||||
/// and you only care that they point to the same bytes of source text.
|
/// and you only care that they point to the same bytes of source text.
|
||||||
pub fn source_equal(&self, other: &Span) -> bool {
|
pub fn source_equal(&self, other: &Span) -> bool {
|
||||||
self.lo == other.lo && self.hi == other.hi
|
self.lo() == other.lo() && self.hi() == other.hi()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns `Some(span)`, where the start is trimmed by the end of `other`
|
/// Returns `Some(span)`, where the start is trimmed by the end of `other`
|
||||||
pub fn trim_start(self, other: Span) -> Option<Span> {
|
pub fn trim_start(self, other: Span) -> Option<Span> {
|
||||||
if self.hi > other.hi {
|
if self.hi() > other.hi() {
|
||||||
Some(Span { lo: cmp::max(self.lo, other.hi), .. self })
|
Some(self.with_lo(cmp::max(self.lo(), other.hi())))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
@ -122,7 +154,7 @@ impl Span {
|
||||||
/// Return the source span - this is either the supplied span, or the span for
|
/// Return the source span - this is either the supplied span, or the span for
|
||||||
/// the macro callsite that expanded to it.
|
/// the macro callsite that expanded to it.
|
||||||
pub fn source_callsite(self) -> Span {
|
pub fn source_callsite(self) -> Span {
|
||||||
self.ctxt.outer().expn_info().map(|info| info.call_site.source_callsite()).unwrap_or(self)
|
self.ctxt().outer().expn_info().map(|info| info.call_site.source_callsite()).unwrap_or(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return the source callee.
|
/// Return the source callee.
|
||||||
|
@ -132,19 +164,19 @@ impl Span {
|
||||||
/// corresponding to the source callsite.
|
/// corresponding to the source callsite.
|
||||||
pub fn source_callee(self) -> Option<NameAndSpan> {
|
pub fn source_callee(self) -> Option<NameAndSpan> {
|
||||||
fn source_callee(info: ExpnInfo) -> NameAndSpan {
|
fn source_callee(info: ExpnInfo) -> NameAndSpan {
|
||||||
match info.call_site.ctxt.outer().expn_info() {
|
match info.call_site.ctxt().outer().expn_info() {
|
||||||
Some(info) => source_callee(info),
|
Some(info) => source_callee(info),
|
||||||
None => info.callee,
|
None => info.callee,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
self.ctxt.outer().expn_info().map(source_callee)
|
self.ctxt().outer().expn_info().map(source_callee)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Check if a span is "internal" to a macro in which #[unstable]
|
/// Check if a span is "internal" to a macro in which #[unstable]
|
||||||
/// items can be used (that is, a macro marked with
|
/// items can be used (that is, a macro marked with
|
||||||
/// `#[allow_internal_unstable]`).
|
/// `#[allow_internal_unstable]`).
|
||||||
pub fn allows_unstable(&self) -> bool {
|
pub fn allows_unstable(&self) -> bool {
|
||||||
match self.ctxt.outer().expn_info() {
|
match self.ctxt().outer().expn_info() {
|
||||||
Some(info) => info.callee.allow_internal_unstable,
|
Some(info) => info.callee.allow_internal_unstable,
|
||||||
None => false,
|
None => false,
|
||||||
}
|
}
|
||||||
|
@ -152,7 +184,7 @@ impl Span {
|
||||||
|
|
||||||
/// Check if this span arises from a compiler desugaring of kind `kind`.
|
/// Check if this span arises from a compiler desugaring of kind `kind`.
|
||||||
pub fn is_compiler_desugaring(&self, kind: CompilerDesugaringKind) -> bool {
|
pub fn is_compiler_desugaring(&self, kind: CompilerDesugaringKind) -> bool {
|
||||||
match self.ctxt.outer().expn_info() {
|
match self.ctxt().outer().expn_info() {
|
||||||
Some(info) => match info.callee.format {
|
Some(info) => match info.callee.format {
|
||||||
ExpnFormat::CompilerDesugaring(k) => k == kind,
|
ExpnFormat::CompilerDesugaring(k) => k == kind,
|
||||||
_ => false,
|
_ => false,
|
||||||
|
@ -165,7 +197,7 @@ impl Span {
|
||||||
/// can be used without triggering the `unsafe_code` lint
|
/// can be used without triggering the `unsafe_code` lint
|
||||||
// (that is, a macro marked with `#[allow_internal_unsafe]`).
|
// (that is, a macro marked with `#[allow_internal_unsafe]`).
|
||||||
pub fn allows_unsafe(&self) -> bool {
|
pub fn allows_unsafe(&self) -> bool {
|
||||||
match self.ctxt.outer().expn_info() {
|
match self.ctxt().outer().expn_info() {
|
||||||
Some(info) => info.callee.allow_internal_unsafe,
|
Some(info) => info.callee.allow_internal_unsafe,
|
||||||
None => false,
|
None => false,
|
||||||
}
|
}
|
||||||
|
@ -175,7 +207,7 @@ impl Span {
|
||||||
let mut prev_span = DUMMY_SP;
|
let mut prev_span = DUMMY_SP;
|
||||||
let mut result = vec![];
|
let mut result = vec![];
|
||||||
loop {
|
loop {
|
||||||
let info = match self.ctxt.outer().expn_info() {
|
let info = match self.ctxt().outer().expn_info() {
|
||||||
Some(info) => info,
|
Some(info) => info,
|
||||||
None => break,
|
None => break,
|
||||||
};
|
};
|
||||||
|
@ -205,42 +237,30 @@ impl Span {
|
||||||
|
|
||||||
/// Return a `Span` that would enclose both `self` and `end`.
|
/// Return a `Span` that would enclose both `self` and `end`.
|
||||||
pub fn to(self, end: Span) -> Span {
|
pub fn to(self, end: Span) -> Span {
|
||||||
Span {
|
Span::new(
|
||||||
lo: cmp::min(self.lo, end.lo),
|
cmp::min(self.lo(), end.lo()),
|
||||||
hi: cmp::max(self.hi, end.hi),
|
cmp::max(self.hi(), end.hi()),
|
||||||
// FIXME(jseyfried): self.ctxt should always equal end.ctxt here (c.f. issue #23480)
|
// FIXME(jseyfried): self.ctxt should always equal end.ctxt here (c.f. issue #23480)
|
||||||
ctxt: if self.ctxt == SyntaxContext::empty() {
|
if self.ctxt() == SyntaxContext::empty() { end.ctxt() } else { self.ctxt() },
|
||||||
end.ctxt
|
)
|
||||||
} else {
|
|
||||||
self.ctxt
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return a `Span` between the end of `self` to the beginning of `end`.
|
/// Return a `Span` between the end of `self` to the beginning of `end`.
|
||||||
pub fn between(self, end: Span) -> Span {
|
pub fn between(self, end: Span) -> Span {
|
||||||
Span {
|
Span::new(
|
||||||
lo: self.hi,
|
self.hi(),
|
||||||
hi: end.lo,
|
end.lo(),
|
||||||
ctxt: if end.ctxt == SyntaxContext::empty() {
|
if end.ctxt() == SyntaxContext::empty() { end.ctxt() } else { self.ctxt() },
|
||||||
end.ctxt
|
)
|
||||||
} else {
|
|
||||||
self.ctxt
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return a `Span` between the beginning of `self` to the beginning of `end`.
|
/// Return a `Span` between the beginning of `self` to the beginning of `end`.
|
||||||
pub fn until(self, end: Span) -> Span {
|
pub fn until(self, end: Span) -> Span {
|
||||||
Span {
|
Span::new(
|
||||||
lo: self.lo,
|
self.lo(),
|
||||||
hi: end.lo,
|
end.lo(),
|
||||||
ctxt: if end.ctxt == SyntaxContext::empty() {
|
if end.ctxt() == SyntaxContext::empty() { end.ctxt() } else { self.ctxt() },
|
||||||
end.ctxt
|
)
|
||||||
} else {
|
|
||||||
self.ctxt
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -267,11 +287,11 @@ impl serialize::UseSpecializedEncodable for Span {
|
||||||
fn default_encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
|
fn default_encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
|
||||||
s.emit_struct("Span", 2, |s| {
|
s.emit_struct("Span", 2, |s| {
|
||||||
s.emit_struct_field("lo", 0, |s| {
|
s.emit_struct_field("lo", 0, |s| {
|
||||||
self.lo.encode(s)
|
self.lo().encode(s)
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
s.emit_struct_field("hi", 1, |s| {
|
s.emit_struct_field("hi", 1, |s| {
|
||||||
self.hi.encode(s)
|
self.hi().encode(s)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -282,14 +302,14 @@ impl serialize::UseSpecializedDecodable for Span {
|
||||||
d.read_struct("Span", 2, |d| {
|
d.read_struct("Span", 2, |d| {
|
||||||
let lo = d.read_struct_field("lo", 0, Decodable::decode)?;
|
let lo = d.read_struct_field("lo", 0, Decodable::decode)?;
|
||||||
let hi = d.read_struct_field("hi", 1, Decodable::decode)?;
|
let hi = d.read_struct_field("hi", 1, Decodable::decode)?;
|
||||||
Ok(Span { lo: lo, hi: hi, ctxt: NO_EXPANSION })
|
Ok(Span::new(lo, hi, NO_EXPANSION))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn default_span_debug(span: Span, f: &mut fmt::Formatter) -> fmt::Result {
|
fn default_span_debug(span: Span, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
write!(f, "Span {{ lo: {:?}, hi: {:?}, ctxt: {:?} }}",
|
write!(f, "Span {{ lo: {:?}, hi: {:?}, ctxt: {:?} }}",
|
||||||
span.lo, span.hi, span.ctxt)
|
span.lo(), span.hi(), span.ctxt())
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Debug for Span {
|
impl fmt::Debug for Span {
|
||||||
|
@ -298,8 +318,6 @@ impl fmt::Debug for Span {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub const DUMMY_SP: Span = Span { lo: BytePos(0), hi: BytePos(0), ctxt: NO_EXPANSION };
|
|
||||||
|
|
||||||
impl MultiSpan {
|
impl MultiSpan {
|
||||||
pub fn new() -> MultiSpan {
|
pub fn new() -> MultiSpan {
|
||||||
MultiSpan {
|
MultiSpan {
|
||||||
|
|
|
@ -16,7 +16,7 @@ extern crate crate_with_invalid_spans;
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
// The AST of `exported_generic` stored in crate_with_invalid_spans's
|
// The AST of `exported_generic` stored in crate_with_invalid_spans's
|
||||||
// metadata should contain an invalid span where span.lo > span.hi.
|
// metadata should contain an invalid span where span.lo() > span.hi().
|
||||||
// Let's make sure the compiler doesn't crash when encountering this.
|
// Let's make sure the compiler doesn't crash when encountering this.
|
||||||
let _ = crate_with_invalid_spans::exported_generic(32u32, 7u32);
|
let _ = crate_with_invalid_spans::exported_generic(32u32, 7u32);
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue