Add types for character position and byte position in the codemap
This commit is contained in:
parent
9ecf86343a
commit
f67bfe9738
22 changed files with 285 additions and 161 deletions
|
@ -8,7 +8,7 @@ use trans::build::B;
|
|||
use middle::ty;
|
||||
use syntax::{ast, codemap, ast_util, ast_map};
|
||||
use syntax::parse::token::ident_interner;
|
||||
use codemap::span;
|
||||
use codemap::{span, CharPos};
|
||||
use ast::Ty;
|
||||
use pat_util::*;
|
||||
use util::ppaux::ty_to_str;
|
||||
|
@ -112,7 +112,7 @@ type compile_unit_md = {name: ~str};
|
|||
type subprogram_md = {id: ast::node_id};
|
||||
type local_var_md = {id: ast::node_id};
|
||||
type tydesc_md = {hash: uint};
|
||||
type block_md = {start: codemap::Loc, end: codemap::Loc};
|
||||
type block_md = {start: codemap::Loc<CharPos>, end: codemap::Loc<CharPos>};
|
||||
type argument_md = {id: ast::node_id};
|
||||
type retval_md = {id: ast::node_id};
|
||||
|
||||
|
@ -266,8 +266,8 @@ fn create_block(cx: block) -> @metadata<block_md> {
|
|||
};
|
||||
let lldata = ~[lltag(tg),
|
||||
parent,
|
||||
lli32(start.line as int),
|
||||
lli32(start.col as int),
|
||||
lli32(start.line.to_int()),
|
||||
lli32(start.col.to_int()),
|
||||
file_node.node,
|
||||
lli32(unique_id)
|
||||
];
|
||||
|
@ -713,8 +713,8 @@ fn update_source_pos(cx: block, s: span) {
|
|||
let cm = cx.sess().codemap;
|
||||
let blockmd = create_block(cx);
|
||||
let loc = cm.lookup_char_pos(s.lo);
|
||||
let scopedata = ~[lli32(loc.line as int),
|
||||
lli32(loc.col as int),
|
||||
let scopedata = ~[lli32(loc.line.to_int()),
|
||||
lli32(loc.col.to_int()),
|
||||
blockmd.node,
|
||||
llnull()];
|
||||
let dbgscope = llmdnode(scopedata);
|
||||
|
|
|
@ -106,7 +106,8 @@ fn explain_region_and_span(cx: ctxt, region: ty::Region)
|
|||
-> (~str, Option<span>)
|
||||
{
|
||||
let lo = cx.sess.codemap.lookup_char_pos_adj(span.lo);
|
||||
(fmt!("the %s at %u:%u", heading, lo.line, lo.col), Some(span))
|
||||
(fmt!("the %s at %u:%u", heading,
|
||||
lo.line, lo.col.to_uint()), Some(span))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use codemap::span;
|
||||
use codemap::{span, CharPos};
|
||||
use ast::*;
|
||||
|
||||
pure fn spanned<T>(lo: uint, hi: uint, +t: T) -> spanned<T> {
|
||||
pure fn spanned<T>(+lo: CharPos, +hi: CharPos, +t: T) -> spanned<T> {
|
||||
respan(mk_sp(lo, hi), move t)
|
||||
}
|
||||
|
||||
|
@ -14,12 +14,12 @@ pure fn dummy_spanned<T>(+t: T) -> spanned<T> {
|
|||
}
|
||||
|
||||
/* assuming that we're not in macro expansion */
|
||||
pure fn mk_sp(lo: uint, hi: uint) -> span {
|
||||
pure fn mk_sp(+lo: CharPos, +hi: CharPos) -> span {
|
||||
span {lo: lo, hi: hi, expn_info: None}
|
||||
}
|
||||
|
||||
// make this a const, once the compiler supports it
|
||||
pure fn dummy_sp() -> span { return mk_sp(0u, 0u); }
|
||||
pure fn dummy_sp() -> span { return mk_sp(CharPos(0), CharPos(0)); }
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -6,6 +6,7 @@ use either::Either;
|
|||
use diagnostic::span_handler;
|
||||
use ast_util::{spanned, dummy_spanned};
|
||||
use parse::comments::{doc_comment_style, strip_doc_comment_decoration};
|
||||
use codemap::CharPos;
|
||||
|
||||
// Constructors
|
||||
export mk_name_value_item_str;
|
||||
|
@ -74,7 +75,8 @@ fn mk_attr(item: @ast::meta_item) -> ast::attribute {
|
|||
is_sugared_doc: false});
|
||||
}
|
||||
|
||||
fn mk_sugared_doc_attr(text: ~str, lo: uint, hi: uint) -> ast::attribute {
|
||||
fn mk_sugared_doc_attr(text: ~str,
|
||||
+lo: CharPos, +hi: CharPos) -> ast::attribute {
|
||||
let lit = spanned(lo, hi, ast::lit_str(@text));
|
||||
let attr = {
|
||||
style: doc_comment_style(text),
|
||||
|
|
|
@ -10,8 +10,109 @@ use std::serialization::{Serializable,
|
|||
Serializer,
|
||||
Deserializer};
|
||||
|
||||
pub type BytePos = uint;
|
||||
pub type CharPos = uint;
|
||||
trait Pos {
|
||||
static pure fn from_uint(n: uint) -> self;
|
||||
pure fn to_uint(&self) -> uint;
|
||||
}
|
||||
|
||||
pub enum BytePos = uint;
|
||||
pub enum CharPos = uint;
|
||||
|
||||
impl BytePos: Pos {
|
||||
static pure fn from_uint(n: uint) -> BytePos { BytePos(n) }
|
||||
pure fn to_uint(&self) -> uint { **self }
|
||||
}
|
||||
|
||||
impl BytePos: cmp::Eq {
|
||||
pure fn eq(other: &BytePos) -> bool {
|
||||
*self == **other
|
||||
}
|
||||
pure fn ne(other: &BytePos) -> bool { !self.eq(other) }
|
||||
}
|
||||
|
||||
impl BytePos: cmp::Ord {
|
||||
pure fn lt(other: &BytePos) -> bool { *self < **other }
|
||||
pure fn le(other: &BytePos) -> bool { *self <= **other }
|
||||
pure fn ge(other: &BytePos) -> bool { *self >= **other }
|
||||
pure fn gt(other: &BytePos) -> bool { *self > **other }
|
||||
}
|
||||
|
||||
impl BytePos: Num {
|
||||
pure fn add(other: &BytePos) -> BytePos {
|
||||
BytePos(*self + **other)
|
||||
}
|
||||
pure fn sub(other: &BytePos) -> BytePos {
|
||||
BytePos(*self - **other)
|
||||
}
|
||||
pure fn mul(other: &BytePos) -> BytePos {
|
||||
BytePos(*self * (**other))
|
||||
}
|
||||
pure fn div(other: &BytePos) -> BytePos {
|
||||
BytePos(*self / **other)
|
||||
}
|
||||
pure fn modulo(other: &BytePos) -> BytePos {
|
||||
BytePos(*self % **other)
|
||||
}
|
||||
pure fn neg() -> BytePos {
|
||||
BytePos(-*self)
|
||||
}
|
||||
pure fn to_int() -> int { *self as int }
|
||||
static pure fn from_int(+n: int) -> BytePos { BytePos(n as uint) }
|
||||
}
|
||||
|
||||
impl BytePos: to_bytes::IterBytes {
|
||||
pure fn iter_bytes(+lsb0: bool, f: to_bytes::Cb) {
|
||||
(*self).iter_bytes(lsb0, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl CharPos: Pos {
|
||||
static pure fn from_uint(n: uint) -> CharPos { CharPos(n) }
|
||||
pure fn to_uint(&self) -> uint { **self }
|
||||
}
|
||||
|
||||
impl CharPos: cmp::Eq {
|
||||
pure fn eq(other: &CharPos) -> bool {
|
||||
*self == **other
|
||||
}
|
||||
pure fn ne(other: &CharPos) -> bool { !self.eq(other) }
|
||||
}
|
||||
|
||||
impl CharPos: cmp::Ord {
|
||||
pure fn lt(other: &CharPos) -> bool { *self < **other }
|
||||
pure fn le(other: &CharPos) -> bool { *self <= **other }
|
||||
pure fn ge(other: &CharPos) -> bool { *self >= **other }
|
||||
pure fn gt(other: &CharPos) -> bool { *self > **other }
|
||||
}
|
||||
|
||||
impl CharPos: Num {
|
||||
pure fn add(other: &CharPos) -> CharPos {
|
||||
CharPos(*self + **other)
|
||||
}
|
||||
pure fn sub(other: &CharPos) -> CharPos {
|
||||
CharPos(*self - **other)
|
||||
}
|
||||
pure fn mul(other: &CharPos) -> CharPos {
|
||||
CharPos(*self * (**other))
|
||||
}
|
||||
pure fn div(other: &CharPos) -> CharPos {
|
||||
CharPos(*self / **other)
|
||||
}
|
||||
pure fn modulo(other: &CharPos) -> CharPos {
|
||||
CharPos(*self % **other)
|
||||
}
|
||||
pure fn neg() -> CharPos {
|
||||
CharPos(-*self)
|
||||
}
|
||||
pure fn to_int() -> int { *self as int }
|
||||
static pure fn from_int(+n: int) -> CharPos { CharPos(n as uint) }
|
||||
}
|
||||
|
||||
impl CharPos: to_bytes::IterBytes {
|
||||
pure fn iter_bytes(+lsb0: bool, f: to_bytes::Cb) {
|
||||
(*self).iter_bytes(lsb0, f)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct span {
|
||||
lo: CharPos,
|
||||
|
@ -37,8 +138,8 @@ impl<D: Deserializer> span: Deserializable<D> {
|
|||
}
|
||||
}
|
||||
|
||||
pub struct Loc {
|
||||
file: @FileMap, line: uint, col: uint
|
||||
pub struct Loc<A: Pos> {
|
||||
file: @FileMap, line: uint, col: A
|
||||
}
|
||||
|
||||
pub struct FilePos {
|
||||
|
@ -69,7 +170,7 @@ pub struct FileLines {
|
|||
pub enum FileSubstr {
|
||||
pub FssNone,
|
||||
pub FssInternal(span),
|
||||
pub FssExternal({filename: ~str, line: uint, col: uint})
|
||||
pub FssExternal({filename: ~str, line: uint, col: CharPos})
|
||||
}
|
||||
|
||||
pub struct FileMap {
|
||||
|
@ -83,7 +184,7 @@ pub struct FileMap {
|
|||
pub impl FileMap {
|
||||
static fn new_w_substr(+filename: FileName, +substr: FileSubstr,
|
||||
src: @~str,
|
||||
start_pos_ch: uint, start_pos_byte: uint)
|
||||
+start_pos_ch: CharPos, +start_pos_byte: BytePos)
|
||||
-> FileMap {
|
||||
return FileMap {
|
||||
name: filename, substr: substr, src: src,
|
||||
|
@ -93,18 +194,22 @@ pub impl FileMap {
|
|||
}
|
||||
|
||||
static fn new(+filename: FileName, src: @~str,
|
||||
start_pos_ch: CharPos, start_pos_byte: BytePos)
|
||||
+start_pos_ch: CharPos, +start_pos_byte: BytePos)
|
||||
-> FileMap {
|
||||
return FileMap::new_w_substr(filename, FssNone, src,
|
||||
start_pos_ch, start_pos_byte);
|
||||
}
|
||||
|
||||
fn next_line(@self, chpos: CharPos, byte_pos: BytePos) {
|
||||
self.lines.push(FilePos {ch: chpos, byte: byte_pos + self.start_pos.byte});
|
||||
fn next_line(@self, +chpos: CharPos, +byte_pos: BytePos) {
|
||||
self.lines.push(FilePos {
|
||||
ch: chpos,
|
||||
byte: byte_pos + self.start_pos.byte
|
||||
});
|
||||
}
|
||||
|
||||
pub fn get_line(@self, line: int) -> ~str unsafe {
|
||||
let begin: uint = self.lines[line].byte - self.start_pos.byte;
|
||||
let begin: BytePos = self.lines[line].byte - self.start_pos.byte;
|
||||
let begin = begin.to_uint();
|
||||
let end = match str::find_char_from(*self.src, '\n', begin) {
|
||||
Some(e) => e,
|
||||
None => str::len(*self.src)
|
||||
|
@ -127,21 +232,22 @@ pub impl CodeMap {
|
|||
|
||||
pub fn mk_substr_filename(@self, sp: span) -> ~str {
|
||||
let pos = self.lookup_char_pos(sp.lo);
|
||||
return fmt!("<%s:%u:%u>", pos.file.name, pos.line, pos.col);
|
||||
return fmt!("<%s:%u:%u>", pos.file.name,
|
||||
pos.line, pos.col.to_uint());
|
||||
}
|
||||
|
||||
pub fn lookup_char_pos(@self, pos: CharPos) -> Loc {
|
||||
pure fn lookup(pos: FilePos) -> uint { return pos.ch; }
|
||||
pub fn lookup_char_pos(@self, +pos: CharPos) -> Loc<CharPos> {
|
||||
pure fn lookup(pos: FilePos) -> uint { return pos.ch.to_uint(); }
|
||||
return self.lookup_pos(pos, lookup);
|
||||
}
|
||||
|
||||
pub fn lookup_byte_pos(@self, pos: BytePos) -> Loc {
|
||||
pure fn lookup(pos: FilePos) -> uint { return pos.byte; }
|
||||
pub fn lookup_byte_pos(@self, +pos: BytePos) -> Loc<BytePos> {
|
||||
pure fn lookup(pos: FilePos) -> uint { return pos.byte.to_uint(); }
|
||||
return self.lookup_pos(pos, lookup);
|
||||
}
|
||||
|
||||
pub fn lookup_char_pos_adj(@self, pos: CharPos)
|
||||
-> {filename: ~str, line: uint, col: uint, file: Option<@FileMap>}
|
||||
pub fn lookup_char_pos_adj(@self, +pos: CharPos)
|
||||
-> {filename: ~str, line: uint, col: CharPos, file: Option<@FileMap>}
|
||||
{
|
||||
let loc = self.lookup_char_pos(pos);
|
||||
match (loc.file.substr) {
|
||||
|
@ -152,7 +258,8 @@ pub impl CodeMap {
|
|||
file: Some(loc.file)}
|
||||
}
|
||||
FssInternal(sp) => {
|
||||
self.lookup_char_pos_adj(sp.lo + (pos - loc.file.start_pos.ch))
|
||||
self.lookup_char_pos_adj(
|
||||
sp.lo + (pos - loc.file.start_pos.ch))
|
||||
}
|
||||
FssExternal(eloc) => {
|
||||
{filename: /* FIXME (#2543) */ copy eloc.filename,
|
||||
|
@ -164,14 +271,17 @@ pub impl CodeMap {
|
|||
}
|
||||
|
||||
pub fn adjust_span(@self, sp: span) -> span {
|
||||
pure fn lookup(pos: FilePos) -> uint { return pos.ch; }
|
||||
pure fn lookup(pos: FilePos) -> uint { return pos.ch.to_uint(); }
|
||||
let line = self.lookup_line(sp.lo, lookup);
|
||||
match (line.fm.substr) {
|
||||
FssNone => sp,
|
||||
FssInternal(s) => {
|
||||
self.adjust_span(span {lo: s.lo + (sp.lo - line.fm.start_pos.ch),
|
||||
hi: s.lo + (sp.hi - line.fm.start_pos.ch),
|
||||
expn_info: sp.expn_info})}
|
||||
self.adjust_span(span {
|
||||
lo: s.lo + (sp.lo - line.fm.start_pos.ch),
|
||||
hi: s.lo + (sp.hi - line.fm.start_pos.ch),
|
||||
expn_info: sp.expn_info
|
||||
})
|
||||
}
|
||||
FssExternal(_) => sp
|
||||
}
|
||||
}
|
||||
|
@ -180,7 +290,7 @@ pub impl CodeMap {
|
|||
let lo = self.lookup_char_pos_adj(sp.lo);
|
||||
let hi = self.lookup_char_pos_adj(sp.hi);
|
||||
return fmt!("%s:%u:%u: %u:%u", lo.filename,
|
||||
lo.line, lo.col, hi.line, hi.col)
|
||||
lo.line, lo.col.to_uint(), hi.line, hi.col.to_uint())
|
||||
}
|
||||
|
||||
pub fn span_to_filename(@self, sp: span) -> FileName {
|
||||
|
@ -198,21 +308,24 @@ pub impl CodeMap {
|
|||
return @FileLines {file: lo.file, lines: lines};
|
||||
}
|
||||
|
||||
fn lookup_byte_offset(@self, chpos: CharPos)
|
||||
fn lookup_byte_offset(@self, +chpos: CharPos)
|
||||
-> {fm: @FileMap, pos: BytePos} {
|
||||
pure fn lookup(pos: FilePos) -> uint { return pos.ch; }
|
||||
pure fn lookup(pos: FilePos) -> uint { return pos.ch.to_uint(); }
|
||||
let {fm, line} = self.lookup_line(chpos, lookup);
|
||||
let line_offset = fm.lines[line].byte - fm.start_pos.byte;
|
||||
let col = chpos - fm.lines[line].ch;
|
||||
let col_offset = str::count_bytes(*fm.src, line_offset, col);
|
||||
{fm: fm, pos: line_offset + col_offset}
|
||||
let col_offset = str::count_bytes(*fm.src,
|
||||
line_offset.to_uint(),
|
||||
col.to_uint());
|
||||
{fm: fm, pos: line_offset + BytePos(col_offset)}
|
||||
}
|
||||
|
||||
pub fn span_to_snippet(@self, sp: span) -> ~str {
|
||||
let begin = self.lookup_byte_offset(sp.lo);
|
||||
let end = self.lookup_byte_offset(sp.hi);
|
||||
assert begin.fm.start_pos == end.fm.start_pos;
|
||||
return str::slice(*begin.fm.src, begin.pos, end.pos);
|
||||
return str::slice(*begin.fm.src,
|
||||
begin.pos.to_uint(), end.pos.to_uint());
|
||||
}
|
||||
|
||||
pub fn get_filemap(@self, filename: ~str) -> @FileMap {
|
||||
|
@ -225,7 +338,7 @@ pub impl CodeMap {
|
|||
}
|
||||
|
||||
priv impl CodeMap {
|
||||
fn lookup_line(@self, pos: uint, lookup: LookupFn)
|
||||
fn lookup_line<A: Pos>(@self, pos: A, lookup: LookupFn)
|
||||
-> {fm: @FileMap, line: uint}
|
||||
{
|
||||
let len = self.files.len();
|
||||
|
@ -233,31 +346,40 @@ priv impl CodeMap {
|
|||
let mut b = len;
|
||||
while b - a > 1u {
|
||||
let m = (a + b) / 2u;
|
||||
if lookup(self.files[m].start_pos) > pos { b = m; } else { a = m; }
|
||||
if lookup(self.files[m].start_pos) > pos.to_uint() {
|
||||
b = m;
|
||||
} else {
|
||||
a = m;
|
||||
}
|
||||
}
|
||||
if (a >= len) {
|
||||
fail fmt!("position %u does not resolve to a source location", pos)
|
||||
fail fmt!("position %u does not resolve to a source location",
|
||||
pos.to_uint())
|
||||
}
|
||||
let f = self.files[a];
|
||||
a = 0u;
|
||||
b = vec::len(f.lines);
|
||||
while b - a > 1u {
|
||||
let m = (a + b) / 2u;
|
||||
if lookup(f.lines[m]) > pos { b = m; } else { a = m; }
|
||||
if lookup(f.lines[m]) > pos.to_uint() { b = m; } else { a = m; }
|
||||
}
|
||||
return {fm: f, line: a};
|
||||
}
|
||||
|
||||
fn lookup_pos(@self, pos: uint, lookup: LookupFn) -> Loc {
|
||||
fn lookup_pos<A: Pos Num>(@self, pos: A, lookup: LookupFn) -> Loc<A> {
|
||||
let {fm: f, line: a} = self.lookup_line(pos, lookup);
|
||||
return Loc {file: f, line: a + 1u, col: pos - lookup(f.lines[a])};
|
||||
return Loc {
|
||||
file: f,
|
||||
line: a + 1u,
|
||||
col: pos - from_uint(lookup(f.lines[a]))
|
||||
};
|
||||
}
|
||||
|
||||
fn span_to_str_no_adj(@self, sp: span) -> ~str {
|
||||
let lo = self.lookup_char_pos(sp.lo);
|
||||
let hi = self.lookup_char_pos(sp.hi);
|
||||
return fmt!("%s:%u:%u: %u:%u", lo.file.name,
|
||||
lo.line, lo.col, hi.line, hi.col)
|
||||
lo.line, lo.col.to_uint(), hi.line, hi.col.to_uint())
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -245,7 +245,7 @@ fn highlight_lines(cm: @codemap::CodeMap, sp: span,
|
|||
while num > 0u { num /= 10u; digits += 1u; }
|
||||
|
||||
// indent past |name:## | and the 0-offset column location
|
||||
let mut left = str::len(fm.name) + digits + lo.col + 3u;
|
||||
let mut left = str::len(fm.name) + digits + lo.col.to_uint() + 3u;
|
||||
let mut s = ~"";
|
||||
while left > 0u { str::push_char(&mut s, ' '); left -= 1u; }
|
||||
|
||||
|
@ -253,7 +253,7 @@ fn highlight_lines(cm: @codemap::CodeMap, sp: span,
|
|||
let hi = cm.lookup_char_pos(sp.hi);
|
||||
if hi.col != lo.col {
|
||||
// the ^ already takes up one space
|
||||
let mut width = hi.col - lo.col - 1u;
|
||||
let mut width = hi.col.to_uint() - lo.col.to_uint() - 1u;
|
||||
while width > 0u { str::push_char(&mut s, '~'); width -= 1u; }
|
||||
}
|
||||
io::stderr().write_str(s + ~"\n");
|
||||
|
|
|
@ -178,7 +178,9 @@ fn mk_ctxt(parse_sess: parse::parse_sess,
|
|||
}
|
||||
fn bt_pop() {
|
||||
match self.backtrace {
|
||||
Some(@ExpandedFrom({call_site: span {expn_info: prev, _}, _})) => {
|
||||
Some(@ExpandedFrom({
|
||||
call_site: span {expn_info: prev, _}, _
|
||||
})) => {
|
||||
self.backtrace = prev
|
||||
}
|
||||
_ => self.bug(~"tried to pop without a push")
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
// something smarter.
|
||||
|
||||
use ast::{ident, node_id};
|
||||
use ast_util::{ident_to_path, respan};
|
||||
use ast_util::{ident_to_path, respan, dummy_sp};
|
||||
use codemap::span;
|
||||
use ext::base::mk_ctxt;
|
||||
|
||||
|
@ -23,10 +23,6 @@ fn path(ids: ~[ident], span: span) -> @ast::path {
|
|||
types: ~[]}
|
||||
}
|
||||
|
||||
fn empty_span() -> span {
|
||||
span {lo: 0, hi: 0, expn_info: None}
|
||||
}
|
||||
|
||||
trait append_types {
|
||||
fn add_ty(ty: @ast::Ty) -> @ast::path;
|
||||
fn add_tys(+tys: ~[@ast::Ty]) -> @ast::path;
|
||||
|
@ -83,26 +79,21 @@ trait ext_ctxt_ast_builder {
|
|||
fn stmt_let(ident: ident, e: @ast::expr) -> @ast::stmt;
|
||||
fn stmt_expr(e: @ast::expr) -> @ast::stmt;
|
||||
fn block_expr(b: ast::blk) -> @ast::expr;
|
||||
fn empty_span() -> span;
|
||||
fn ty_option(ty: @ast::Ty) -> @ast::Ty;
|
||||
}
|
||||
|
||||
impl ext_ctxt: ext_ctxt_ast_builder {
|
||||
fn ty_option(ty: @ast::Ty) -> @ast::Ty {
|
||||
self.ty_path_ast_builder(path(~[self.ident_of(~"Option")],
|
||||
self.empty_span())
|
||||
dummy_sp())
|
||||
.add_ty(ty))
|
||||
}
|
||||
|
||||
fn empty_span() -> span {
|
||||
span {lo: 0, hi: 0, expn_info: self.backtrace()}
|
||||
}
|
||||
|
||||
fn block_expr(b: ast::blk) -> @ast::expr {
|
||||
@{id: self.next_id(),
|
||||
callee_id: self.next_id(),
|
||||
node: ast::expr_block(b),
|
||||
span: self.empty_span()}
|
||||
span: dummy_sp()}
|
||||
}
|
||||
|
||||
fn move_expr(e: @ast::expr) -> @ast::expr {
|
||||
|
@ -114,7 +105,7 @@ impl ext_ctxt: ext_ctxt_ast_builder {
|
|||
|
||||
fn stmt_expr(e: @ast::expr) -> @ast::stmt {
|
||||
@{node: ast::stmt_expr(e, self.next_id()),
|
||||
span: self.empty_span()}
|
||||
span: dummy_sp()}
|
||||
}
|
||||
|
||||
fn stmt_let(ident: ident, e: @ast::expr) -> @ast::stmt {
|
||||
|
@ -130,43 +121,43 @@ impl ext_ctxt: ext_ctxt_ast_builder {
|
|||
pat: @{id: self.next_id(),
|
||||
node: ast::pat_ident(ast::bind_by_implicit_ref,
|
||||
path(~[ident],
|
||||
self.empty_span()),
|
||||
dummy_sp()),
|
||||
None),
|
||||
span: self.empty_span()},
|
||||
span: dummy_sp()},
|
||||
init: Some(self.move_expr(e)),
|
||||
id: self.next_id()},
|
||||
span: self.empty_span()}]),
|
||||
span: self.empty_span()}, self.next_id()),
|
||||
span: self.empty_span()}
|
||||
span: dummy_sp()}]),
|
||||
span: dummy_sp()}, self.next_id()),
|
||||
span: dummy_sp()}
|
||||
}
|
||||
|
||||
fn field_imm(name: ident, e: @ast::expr) -> ast::field {
|
||||
{node: {mutbl: ast::m_imm, ident: name, expr: e},
|
||||
span: self.empty_span()}
|
||||
span: dummy_sp()}
|
||||
}
|
||||
|
||||
fn rec(+fields: ~[ast::field]) -> @ast::expr {
|
||||
@{id: self.next_id(),
|
||||
callee_id: self.next_id(),
|
||||
node: ast::expr_rec(fields, None),
|
||||
span: self.empty_span()}
|
||||
span: dummy_sp()}
|
||||
}
|
||||
|
||||
fn ty_field_imm(name: ident, ty: @ast::Ty) -> ast::ty_field {
|
||||
{node: {ident: name, mt: { ty: ty, mutbl: ast::m_imm } },
|
||||
span: self.empty_span()}
|
||||
span: dummy_sp()}
|
||||
}
|
||||
|
||||
fn ty_rec(+fields: ~[ast::ty_field]) -> @ast::Ty {
|
||||
@{id: self.next_id(),
|
||||
node: ast::ty_rec(fields),
|
||||
span: self.empty_span()}
|
||||
span: dummy_sp()}
|
||||
}
|
||||
|
||||
fn ty_infer() -> @ast::Ty {
|
||||
@{id: self.next_id(),
|
||||
node: ast::ty_infer,
|
||||
span: self.empty_span()}
|
||||
span: dummy_sp()}
|
||||
}
|
||||
|
||||
fn ty_param(id: ast::ident, +bounds: ~[ast::ty_param_bound])
|
||||
|
@ -181,9 +172,9 @@ impl ext_ctxt: ext_ctxt_ast_builder {
|
|||
pat: @{id: self.next_id(),
|
||||
node: ast::pat_ident(
|
||||
ast::bind_by_value,
|
||||
ast_util::ident_to_path(self.empty_span(), name),
|
||||
ast_util::ident_to_path(dummy_sp(), name),
|
||||
None),
|
||||
span: self.empty_span()},
|
||||
span: dummy_sp()},
|
||||
id: self.next_id()}
|
||||
}
|
||||
|
||||
|
@ -195,7 +186,7 @@ impl ext_ctxt: ext_ctxt_ast_builder {
|
|||
rules: ast::default_blk};
|
||||
|
||||
{node: blk,
|
||||
span: self.empty_span()}
|
||||
span: dummy_sp()}
|
||||
}
|
||||
|
||||
fn expr_block(e: @ast::expr) -> ast::blk {
|
||||
|
@ -215,11 +206,11 @@ impl ext_ctxt: ext_ctxt_ast_builder {
|
|||
|
||||
// XXX: Would be nice if our generated code didn't violate
|
||||
// Rust coding conventions
|
||||
let non_camel_case_attribute = respan(self.empty_span(), {
|
||||
let non_camel_case_attribute = respan(dummy_sp(), {
|
||||
style: ast::attr_outer,
|
||||
value: respan(self.empty_span(),
|
||||
value: respan(dummy_sp(),
|
||||
ast::meta_list(~"allow", ~[
|
||||
@respan(self.empty_span(),
|
||||
@respan(dummy_sp(),
|
||||
ast::meta_word(~"non_camel_case_types"))
|
||||
])),
|
||||
is_sugared_doc: false
|
||||
|
@ -239,7 +230,7 @@ impl ext_ctxt: ext_ctxt_ast_builder {
|
|||
+ty_params: ~[ast::ty_param],
|
||||
+body: ast::blk) -> @ast::item {
|
||||
self.item(name,
|
||||
self.empty_span(),
|
||||
dummy_sp(),
|
||||
ast::item_fn(self.fn_decl(inputs, output),
|
||||
ast::impure_fn,
|
||||
ty_params,
|
||||
|
@ -298,7 +289,7 @@ impl ext_ctxt: ext_ctxt_ast_builder {
|
|||
fn ty_nil_ast_builder() -> @ast::Ty {
|
||||
@{id: self.next_id(),
|
||||
node: ast::ty_nil,
|
||||
span: self.empty_span()}
|
||||
span: dummy_sp()}
|
||||
}
|
||||
|
||||
fn item_ty_poly(name: ident,
|
||||
|
@ -314,6 +305,6 @@ impl ext_ctxt: ext_ctxt_ast_builder {
|
|||
|
||||
fn ty_vars(+ty_params: ~[ast::ty_param]) -> ~[@ast::Ty] {
|
||||
ty_params.map(|p| self.ty_path_ast_builder(
|
||||
path(~[p.ident], self.empty_span())))
|
||||
path(~[p.ident], dummy_sp())))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,7 +22,6 @@ that.
|
|||
use ext::base::ext_ctxt;
|
||||
|
||||
use proto::{state, protocol, next_state};
|
||||
use ast_builder::empty_span;
|
||||
|
||||
impl ext_ctxt: proto::visitor<(), (), ()> {
|
||||
fn visit_proto(_proto: protocol,
|
||||
|
|
|
@ -29,8 +29,6 @@ updating the states using rule (2) until there are no changes.
|
|||
|
||||
use std::bitv::{Bitv};
|
||||
|
||||
use ast_builder::empty_span;
|
||||
|
||||
fn analyze(proto: protocol, _cx: ext_ctxt) {
|
||||
debug!("initializing colive analysis");
|
||||
let num_states = proto.num_states();
|
||||
|
|
|
@ -5,6 +5,7 @@ use to_str::ToStr;
|
|||
use dvec::DVec;
|
||||
|
||||
use ast::ident;
|
||||
use ast_util::dummy_sp;
|
||||
use util::interner;
|
||||
use print::pprust;
|
||||
use pprust::{item_to_str, ty_to_str};
|
||||
|
@ -12,7 +13,7 @@ use ext::base::{mk_ctxt, ext_ctxt};
|
|||
use parse::*;
|
||||
use proto::*;
|
||||
|
||||
use ast_builder::{append_types, path, empty_span};
|
||||
use ast_builder::{append_types, path};
|
||||
|
||||
// Transitional reexports so qquote can find the paths it is looking for
|
||||
mod syntax {
|
||||
|
@ -256,11 +257,11 @@ impl state: to_type_decls {
|
|||
cx.ty_path_ast_builder(
|
||||
path(~[cx.ident_of(~"pipes"),
|
||||
cx.ident_of(dir.to_str() + ~"Packet")],
|
||||
empty_span())
|
||||
dummy_sp())
|
||||
.add_ty(cx.ty_path_ast_builder(
|
||||
path(~[cx.ident_of(self.proto.name),
|
||||
self.data_name()],
|
||||
empty_span())
|
||||
dummy_sp())
|
||||
.add_tys(cx.ty_vars(self.ty_params))))),
|
||||
self.ty_params));
|
||||
}
|
||||
|
@ -273,11 +274,11 @@ impl state: to_type_decls {
|
|||
path(~[cx.ident_of(~"pipes"),
|
||||
cx.ident_of(dir.to_str()
|
||||
+ ~"PacketBuffered")],
|
||||
empty_span())
|
||||
dummy_sp())
|
||||
.add_tys(~[cx.ty_path_ast_builder(
|
||||
path(~[cx.ident_of(self.proto.name),
|
||||
self.data_name()],
|
||||
empty_span())
|
||||
dummy_sp())
|
||||
.add_tys(cx.ty_vars(self.ty_params))),
|
||||
self.proto.buffer_ty_path(cx)])),
|
||||
self.ty_params));
|
||||
|
@ -394,7 +395,7 @@ impl protocol: gen_init {
|
|||
|
||||
cx.item_ty_poly(
|
||||
cx.ident_of(~"__Buffer"),
|
||||
cx.empty_span(),
|
||||
dummy_sp(),
|
||||
cx.ty_rec(fields),
|
||||
params)
|
||||
}
|
||||
|
|
|
@ -4,6 +4,7 @@ use parse::parser;
|
|||
use parse::parser::{Parser, parse_from_source_str};
|
||||
use dvec::DVec;
|
||||
use parse::token::ident_interner;
|
||||
use codemap::CharPos;
|
||||
|
||||
use fold::*;
|
||||
use visit::*;
|
||||
|
@ -15,13 +16,13 @@ use io::*;
|
|||
use codemap::span;
|
||||
|
||||
struct gather_item {
|
||||
lo: uint,
|
||||
hi: uint,
|
||||
lo: CharPos,
|
||||
hi: CharPos,
|
||||
e: @ast::expr,
|
||||
constr: ~str
|
||||
}
|
||||
|
||||
type aq_ctxt = @{lo: uint, gather: DVec<gather_item>};
|
||||
type aq_ctxt = @{lo: CharPos, gather: DVec<gather_item>};
|
||||
enum fragment {
|
||||
from_expr(@ast::expr),
|
||||
from_ty(@ast::Ty)
|
||||
|
@ -114,7 +115,7 @@ impl @ast::pat: qq_helper {
|
|||
fn get_fold_fn() -> ~str {~"fold_pat"}
|
||||
}
|
||||
|
||||
fn gather_anti_quotes<N: qq_helper>(lo: uint, node: N) -> aq_ctxt
|
||||
fn gather_anti_quotes<N: qq_helper>(lo: CharPos, node: N) -> aq_ctxt
|
||||
{
|
||||
let v = @{visit_expr: |node, &&cx, v| visit_aq(node, ~"from_expr", cx, v),
|
||||
visit_ty: |node, &&cx, v| visit_aq(node, ~"from_ty", cx, v),
|
||||
|
@ -226,7 +227,8 @@ fn finish<T: qq_helper>
|
|||
let mut str2 = ~"";
|
||||
enum state {active, skip(uint), blank};
|
||||
let mut state = active;
|
||||
let mut i = 0u, j = 0u;
|
||||
let mut i = CharPos(0u);
|
||||
let mut j = 0u;
|
||||
let g_len = cx.gather.len();
|
||||
for str::chars_each(*str) |ch| {
|
||||
if (j < g_len && i == cx.gather[j].lo) {
|
||||
|
@ -242,7 +244,7 @@ fn finish<T: qq_helper>
|
|||
blank if is_space(ch) => str::push_char(&mut str2, ch),
|
||||
blank => str::push_char(&mut str2, ' ')
|
||||
}
|
||||
i += 1u;
|
||||
i += CharPos(1u);
|
||||
if (j < g_len && i == cx.gather[j].hi) {
|
||||
assert ch == ')';
|
||||
state = active;
|
||||
|
@ -270,7 +272,7 @@ fn finish<T: qq_helper>
|
|||
~"qquote", ~"mk_file_substr"]),
|
||||
~[mk_uniq_str(cx,sp, loc.file.name),
|
||||
mk_uint(cx,sp, loc.line),
|
||||
mk_uint(cx,sp, loc.col)]),
|
||||
mk_uint(cx,sp, loc.col.to_uint())]),
|
||||
mk_unary(cx,sp, ast::box(ast::m_imm),
|
||||
mk_uniq_str(cx,sp, str2)),
|
||||
cfg_call(),
|
||||
|
@ -346,7 +348,7 @@ fn replace_ty(repls: ~[fragment],
|
|||
|
||||
fn mk_file_substr(fname: ~str, line: uint, col: uint) ->
|
||||
codemap::FileSubstr {
|
||||
codemap::FssExternal({filename: fname, line: line, col: col})
|
||||
codemap::FssExternal({filename: fname, line: line, col: CharPos(col)})
|
||||
}
|
||||
|
||||
// Local Variables:
|
||||
|
|
|
@ -25,7 +25,7 @@ fn expand_col(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
|
|||
_body: ast::mac_body) -> @ast::expr {
|
||||
get_mac_args(cx, sp, arg, 0u, option::Some(0u), ~"col");
|
||||
let loc = cx.codemap().lookup_char_pos(sp.lo);
|
||||
return mk_uint(cx, sp, loc.col);
|
||||
return mk_uint(cx, sp, loc.col.to_uint());
|
||||
}
|
||||
|
||||
/* file!(): expands to the current filename */
|
||||
|
|
|
@ -11,6 +11,7 @@ use dvec::DVec;
|
|||
use ast::{matcher, match_tok, match_seq, match_nonterminal, ident};
|
||||
use ast_util::mk_sp;
|
||||
use std::map::HashMap;
|
||||
use codemap::CharPos;
|
||||
|
||||
/* This is an Earley-like parser, without support for in-grammar nonterminals,
|
||||
only by calling out to the main rust parser for named nonterminals (which it
|
||||
|
@ -102,7 +103,7 @@ type matcher_pos = ~{
|
|||
mut up: matcher_pos_up, // mutable for swapping only
|
||||
matches: ~[DVec<@named_match>],
|
||||
match_lo: uint, match_hi: uint,
|
||||
sp_lo: uint,
|
||||
sp_lo: CharPos,
|
||||
};
|
||||
|
||||
fn copy_up(&& mpu: matcher_pos_up) -> matcher_pos {
|
||||
|
@ -122,7 +123,7 @@ fn count_names(ms: &[matcher]) -> uint {
|
|||
}
|
||||
|
||||
#[allow(non_implicitly_copyable_typarams)]
|
||||
fn initial_matcher_pos(ms: ~[matcher], sep: Option<Token>, lo: uint)
|
||||
fn initial_matcher_pos(ms: ~[matcher], sep: Option<Token>, lo: CharPos)
|
||||
-> matcher_pos {
|
||||
let mut match_idx_hi = 0u;
|
||||
for ms.each() |elt| {
|
||||
|
|
|
@ -53,7 +53,7 @@ fn new_tt_reader(sp_diag: span_handler, itr: @ident_interner,
|
|||
mut repeat_len: ~[],
|
||||
/* dummy values, never read: */
|
||||
mut cur_tok: EOF,
|
||||
mut cur_span: ast_util::mk_sp(0u,0u)
|
||||
mut cur_span: ast_util::dummy_sp()
|
||||
};
|
||||
tt_next_token(r); /* get cur_tok and cur_span set up */
|
||||
return r;
|
||||
|
|
|
@ -20,7 +20,7 @@ use util::interner;
|
|||
use diagnostic::{span_handler, mk_span_handler, mk_handler, emitter};
|
||||
use lexer::{reader, string_reader};
|
||||
use parse::token::{ident_interner, mk_ident_interner};
|
||||
use codemap::{CodeMap, FileMap};
|
||||
use codemap::{CodeMap, FileMap, CharPos, BytePos};
|
||||
|
||||
type parse_sess = @{
|
||||
cm: @codemap::CodeMap,
|
||||
|
@ -28,8 +28,8 @@ type parse_sess = @{
|
|||
span_diagnostic: span_handler,
|
||||
interner: @ident_interner,
|
||||
// these two must be kept up to date
|
||||
mut chpos: uint,
|
||||
mut byte_pos: uint
|
||||
mut chpos: CharPos,
|
||||
mut byte_pos: BytePos
|
||||
};
|
||||
|
||||
fn new_parse_sess(demitter: Option<emitter>) -> parse_sess {
|
||||
|
@ -38,7 +38,7 @@ fn new_parse_sess(demitter: Option<emitter>) -> parse_sess {
|
|||
mut next_id: 1,
|
||||
span_diagnostic: mk_span_handler(mk_handler(demitter), cm),
|
||||
interner: mk_ident_interner(),
|
||||
mut chpos: 0u, mut byte_pos: 0u};
|
||||
mut chpos: CharPos(0u), mut byte_pos: BytePos(0u)};
|
||||
}
|
||||
|
||||
fn new_parse_sess_special_handler(sh: span_handler, cm: @codemap::CodeMap)
|
||||
|
@ -47,7 +47,7 @@ fn new_parse_sess_special_handler(sh: span_handler, cm: @codemap::CodeMap)
|
|||
mut next_id: 1,
|
||||
span_diagnostic: sh,
|
||||
interner: mk_ident_interner(),
|
||||
mut chpos: 0u, mut byte_pos: 0u};
|
||||
mut chpos: CharPos(0u), mut byte_pos: BytePos(0u)};
|
||||
}
|
||||
|
||||
fn parse_crate_from_file(input: &Path, cfg: ast::crate_cfg,
|
||||
|
|
|
@ -14,7 +14,7 @@ trait parser_attr {
|
|||
-> attr_or_ext;
|
||||
fn parse_outer_attributes() -> ~[ast::attribute];
|
||||
fn parse_attribute(style: ast::attr_style) -> ast::attribute;
|
||||
fn parse_attribute_naked(style: ast::attr_style, lo: uint) ->
|
||||
fn parse_attribute_naked(style: ast::attr_style, lo: CharPos) ->
|
||||
ast::attribute;
|
||||
fn parse_inner_attrs_and_next() ->
|
||||
{inner: ~[ast::attribute], next: ~[ast::attribute]};
|
||||
|
@ -85,7 +85,7 @@ impl Parser: parser_attr {
|
|||
return self.parse_attribute_naked(style, lo);
|
||||
}
|
||||
|
||||
fn parse_attribute_naked(style: ast::attr_style, lo: uint) ->
|
||||
fn parse_attribute_naked(style: ast::attr_style, lo: CharPos) ->
|
||||
ast::attribute {
|
||||
self.expect(token::LBRACKET);
|
||||
let meta_item = self.parse_meta_item();
|
||||
|
|
|
@ -3,7 +3,7 @@ use io::ReaderUtil;
|
|||
use util::interner;
|
||||
use lexer::{string_reader, bump, is_eof, nextch,
|
||||
is_whitespace, get_str_from, reader};
|
||||
use codemap::FileMap;
|
||||
use codemap::{FileMap, CharPos};
|
||||
|
||||
export cmnt;
|
||||
export lit;
|
||||
|
@ -28,7 +28,7 @@ impl cmnt_style : cmp::Eq {
|
|||
}
|
||||
}
|
||||
|
||||
type cmnt = {style: cmnt_style, lines: ~[~str], pos: uint};
|
||||
type cmnt = {style: cmnt_style, lines: ~[~str], pos: CharPos};
|
||||
|
||||
fn is_doc_comment(s: ~str) -> bool {
|
||||
s.starts_with(~"///") ||
|
||||
|
@ -137,7 +137,7 @@ fn push_blank_line_comment(rdr: string_reader, comments: &mut ~[cmnt]) {
|
|||
fn consume_whitespace_counting_blank_lines(rdr: string_reader,
|
||||
comments: &mut ~[cmnt]) {
|
||||
while is_whitespace(rdr.curr) && !is_eof(rdr) {
|
||||
if rdr.col == 0u && rdr.curr == '\n' {
|
||||
if rdr.col == CharPos(0u) && rdr.curr == '\n' {
|
||||
push_blank_line_comment(rdr, comments);
|
||||
}
|
||||
bump(rdr);
|
||||
|
@ -181,6 +181,8 @@ fn read_line_comments(rdr: string_reader, code_to_the_left: bool,
|
|||
}
|
||||
}
|
||||
|
||||
// FIXME #3961: This is not the right way to convert string byte
|
||||
// offsets to characters.
|
||||
fn all_whitespace(s: ~str, begin: uint, end: uint) -> bool {
|
||||
let mut i: uint = begin;
|
||||
while i != end {
|
||||
|
@ -190,9 +192,11 @@ fn all_whitespace(s: ~str, begin: uint, end: uint) -> bool {
|
|||
}
|
||||
|
||||
fn trim_whitespace_prefix_and_push_line(lines: &mut ~[~str],
|
||||
s: ~str, col: uint) {
|
||||
s: ~str, col: CharPos) {
|
||||
let mut s1;
|
||||
let len = str::len(s);
|
||||
// FIXME #3961: Doing bytewise comparison and slicing with CharPos
|
||||
let col = col.to_uint();
|
||||
if all_whitespace(s, 0u, uint::min(len, col)) {
|
||||
if col < len {
|
||||
s1 = str::slice(s, col, len);
|
||||
|
@ -207,7 +211,7 @@ fn read_block_comment(rdr: string_reader, code_to_the_left: bool,
|
|||
debug!(">>> block comment");
|
||||
let p = rdr.chpos;
|
||||
let mut lines: ~[~str] = ~[];
|
||||
let mut col: uint = rdr.col;
|
||||
let mut col: CharPos = rdr.col;
|
||||
bump(rdr);
|
||||
bump(rdr);
|
||||
|
||||
|
@ -280,7 +284,7 @@ fn consume_comment(rdr: string_reader, code_to_the_left: bool,
|
|||
debug!("<<< consume comment");
|
||||
}
|
||||
|
||||
type lit = {lit: ~str, pos: uint};
|
||||
type lit = {lit: ~str, pos: CharPos};
|
||||
|
||||
fn gather_comments_and_literals(span_diagnostic: diagnostic::span_handler,
|
||||
path: ~str,
|
||||
|
@ -289,7 +293,8 @@ fn gather_comments_and_literals(span_diagnostic: diagnostic::span_handler,
|
|||
let src = @str::from_bytes(srdr.read_whole_stream());
|
||||
let itr = parse::token::mk_fake_ident_interner();
|
||||
let rdr = lexer::new_low_level_string_reader
|
||||
(span_diagnostic, @FileMap::new(path, src, 0u, 0u), itr);
|
||||
(span_diagnostic, @FileMap::new(path, src,
|
||||
CharPos(0u), BytePos(0u)), itr);
|
||||
|
||||
let mut comments: ~[cmnt] = ~[];
|
||||
let mut literals: ~[lit] = ~[];
|
||||
|
|
|
@ -205,7 +205,7 @@ impl Parser: parser_common {
|
|||
if self.token == token::GT {
|
||||
self.bump();
|
||||
} else if self.token == token::BINOP(token::SHR) {
|
||||
self.swap(token::GT, self.span.lo + 1u, self.span.hi);
|
||||
self.swap(token::GT, self.span.lo + CharPos(1u), self.span.hi);
|
||||
} else {
|
||||
let mut s: ~str = ~"expected `";
|
||||
s += token_to_str(self.reader, token::GT);
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use diagnostic::span_handler;
|
||||
use codemap::{span, CodeMap};
|
||||
use codemap::{span, CodeMap, CharPos, BytePos};
|
||||
use ext::tt::transcribe::{tt_reader, new_tt_reader, dup_tt_reader,
|
||||
tt_next_token};
|
||||
|
||||
|
@ -21,10 +21,10 @@ trait reader {
|
|||
type string_reader = @{
|
||||
span_diagnostic: span_handler,
|
||||
src: @~str,
|
||||
mut col: uint,
|
||||
mut pos: uint,
|
||||
mut col: CharPos,
|
||||
mut pos: BytePos,
|
||||
mut curr: char,
|
||||
mut chpos: uint,
|
||||
mut chpos: CharPos,
|
||||
filemap: @codemap::FileMap,
|
||||
interner: @token::ident_interner,
|
||||
/* cached: */
|
||||
|
@ -46,15 +46,15 @@ fn new_low_level_string_reader(span_diagnostic: span_handler,
|
|||
itr: @token::ident_interner)
|
||||
-> string_reader {
|
||||
let r = @{span_diagnostic: span_diagnostic, src: filemap.src,
|
||||
mut col: 0u, mut pos: 0u, mut curr: -1 as char,
|
||||
mut col: CharPos(0), mut pos: BytePos(0), mut curr: -1 as char,
|
||||
mut chpos: filemap.start_pos.ch,
|
||||
filemap: filemap, interner: itr,
|
||||
/* dummy values; not read */
|
||||
mut peek_tok: token::EOF,
|
||||
mut peek_span: ast_util::mk_sp(0u,0u)};
|
||||
if r.pos < (*filemap.src).len() {
|
||||
let next = str::char_range_at(*r.src, r.pos);
|
||||
r.pos = next.next;
|
||||
mut peek_span: ast_util::dummy_sp()};
|
||||
if r.pos.to_uint() < (*filemap.src).len() {
|
||||
let next = str::char_range_at(*r.src, r.pos.to_uint());
|
||||
r.pos = BytePos(next.next);
|
||||
r.curr = next.ch;
|
||||
}
|
||||
return r;
|
||||
|
@ -124,27 +124,27 @@ fn string_advance_token(&&r: string_reader) {
|
|||
|
||||
}
|
||||
|
||||
fn get_str_from(rdr: string_reader, start: uint) -> ~str unsafe {
|
||||
fn get_str_from(rdr: string_reader, start: BytePos) -> ~str unsafe {
|
||||
// I'm pretty skeptical about this subtraction. What if there's a
|
||||
// multi-byte character before the mark?
|
||||
return str::slice(*rdr.src, start - 1u, rdr.pos - 1u);
|
||||
return str::slice(*rdr.src, start.to_uint() - 1u, rdr.pos.to_uint() - 1u);
|
||||
}
|
||||
|
||||
fn bump(rdr: string_reader) {
|
||||
if rdr.pos < (*rdr.src).len() {
|
||||
rdr.col += 1u;
|
||||
rdr.chpos += 1u;
|
||||
if rdr.pos.to_uint() < (*rdr.src).len() {
|
||||
rdr.col += CharPos(1u);
|
||||
rdr.chpos += CharPos(1u);
|
||||
if rdr.curr == '\n' {
|
||||
rdr.filemap.next_line(rdr.chpos, rdr.pos);
|
||||
rdr.col = 0u;
|
||||
rdr.col = CharPos(0u);
|
||||
}
|
||||
let next = str::char_range_at(*rdr.src, rdr.pos);
|
||||
rdr.pos = next.next;
|
||||
let next = str::char_range_at(*rdr.src, rdr.pos.to_uint());
|
||||
rdr.pos = BytePos(next.next);
|
||||
rdr.curr = next.ch;
|
||||
} else {
|
||||
if (rdr.curr != -1 as char) {
|
||||
rdr.col += 1u;
|
||||
rdr.chpos += 1u;
|
||||
rdr.col += CharPos(1u);
|
||||
rdr.chpos += CharPos(1u);
|
||||
rdr.curr = -1 as char;
|
||||
}
|
||||
}
|
||||
|
@ -153,8 +153,8 @@ fn is_eof(rdr: string_reader) -> bool {
|
|||
rdr.curr == -1 as char
|
||||
}
|
||||
fn nextch(rdr: string_reader) -> char {
|
||||
if rdr.pos < (*rdr.src).len() {
|
||||
return str::char_at(*rdr.src, rdr.pos);
|
||||
if rdr.pos.to_uint() < (*rdr.src).len() {
|
||||
return str::char_at(*rdr.src, rdr.pos.to_uint());
|
||||
} else { return -1 as char; }
|
||||
}
|
||||
|
||||
|
@ -211,7 +211,7 @@ fn consume_any_line_comment(rdr: string_reader)
|
|||
bump(rdr);
|
||||
// line comments starting with "///" or "//!" are doc-comments
|
||||
if rdr.curr == '/' || rdr.curr == '!' {
|
||||
let start_chpos = rdr.chpos - 2u;
|
||||
let start_chpos = rdr.chpos - CharPos(2u);
|
||||
let mut acc = ~"//";
|
||||
while rdr.curr != '\n' && !is_eof(rdr) {
|
||||
str::push_char(&mut acc, rdr.curr);
|
||||
|
@ -235,7 +235,7 @@ fn consume_any_line_comment(rdr: string_reader)
|
|||
let cmap = @CodeMap::new();
|
||||
(*cmap).files.push(rdr.filemap);
|
||||
let loc = cmap.lookup_char_pos_adj(rdr.chpos);
|
||||
if loc.line == 1u && loc.col == 0u {
|
||||
if loc.line == 1u && loc.col == CharPos(0u) {
|
||||
while rdr.curr != '\n' && !is_eof(rdr) { bump(rdr); }
|
||||
return consume_whitespace_and_comments(rdr);
|
||||
}
|
||||
|
@ -250,7 +250,7 @@ fn consume_block_comment(rdr: string_reader)
|
|||
|
||||
// block comments starting with "/**" or "/*!" are doc-comments
|
||||
if rdr.curr == '*' || rdr.curr == '!' {
|
||||
let start_chpos = rdr.chpos - 2u;
|
||||
let start_chpos = rdr.chpos - CharPos(2u);
|
||||
let mut acc = ~"/*";
|
||||
while !(rdr.curr == '*' && nextch(rdr) == '/') && !is_eof(rdr) {
|
||||
str::push_char(&mut acc, rdr.curr);
|
||||
|
@ -584,7 +584,7 @@ fn next_token_inner(rdr: string_reader) -> token::Token {
|
|||
return token::LIT_INT(c2 as i64, ast::ty_char);
|
||||
}
|
||||
'"' => {
|
||||
let n = rdr.chpos;
|
||||
let n = rdr.pos;
|
||||
bump(rdr);
|
||||
while rdr.curr != '"' {
|
||||
if is_eof(rdr) {
|
||||
|
|
|
@ -5,7 +5,7 @@ use either::{Either, Left, Right};
|
|||
use std::map::HashMap;
|
||||
use token::{can_begin_expr, is_ident, is_ident_or_path, is_plain_ident,
|
||||
INTERPOLATED, special_idents};
|
||||
use codemap::{span,FssNone};
|
||||
use codemap::{span,FssNone, CharPos};
|
||||
use util::interner::Interner;
|
||||
use ast_util::{spanned, respan, mk_sp, ident_to_path, operator_prec};
|
||||
use lexer::reader;
|
||||
|
@ -244,7 +244,7 @@ impl Parser {
|
|||
self.token = next.tok;
|
||||
self.span = next.sp;
|
||||
}
|
||||
fn swap(next: token::Token, lo: uint, hi: uint) {
|
||||
fn swap(next: token::Token, +lo: CharPos, +hi: CharPos) {
|
||||
self.token = next;
|
||||
self.span = mk_sp(lo, hi);
|
||||
}
|
||||
|
@ -904,12 +904,12 @@ impl Parser {
|
|||
return spanned(lo, e.span.hi, {mutbl: m, ident: i, expr: e});
|
||||
}
|
||||
|
||||
fn mk_expr(lo: uint, hi: uint, +node: expr_) -> @expr {
|
||||
fn mk_expr(+lo: CharPos, +hi: CharPos, +node: expr_) -> @expr {
|
||||
return @{id: self.get_id(), callee_id: self.get_id(),
|
||||
node: node, span: mk_sp(lo, hi)};
|
||||
}
|
||||
|
||||
fn mk_mac_expr(lo: uint, hi: uint, m: mac_) -> @expr {
|
||||
fn mk_mac_expr(+lo: CharPos, +hi: CharPos, m: mac_) -> @expr {
|
||||
return @{id: self.get_id(),
|
||||
callee_id: self.get_id(),
|
||||
node: expr_mac({node: m, span: mk_sp(lo, hi)}),
|
||||
|
@ -1134,7 +1134,7 @@ impl Parser {
|
|||
return self.mk_expr(lo, hi, ex);
|
||||
}
|
||||
|
||||
fn parse_block_expr(lo: uint, blk_mode: blk_check_mode) -> @expr {
|
||||
fn parse_block_expr(lo: CharPos, blk_mode: blk_check_mode) -> @expr {
|
||||
self.expect(token::LBRACE);
|
||||
let blk = self.parse_block_tail(lo, blk_mode);
|
||||
return self.mk_expr(blk.span.lo, blk.span.hi, expr_block(blk));
|
||||
|
@ -1146,7 +1146,7 @@ impl Parser {
|
|||
return self.parse_syntax_ext_naked(lo);
|
||||
}
|
||||
|
||||
fn parse_syntax_ext_naked(lo: uint) -> @expr {
|
||||
fn parse_syntax_ext_naked(lo: CharPos) -> @expr {
|
||||
match self.token {
|
||||
token::IDENT(_, _) => (),
|
||||
_ => self.fatal(~"expected a syntax expander name")
|
||||
|
@ -2279,11 +2279,11 @@ impl Parser {
|
|||
// I guess that also means "already parsed the 'impure'" if
|
||||
// necessary, and this should take a qualifier.
|
||||
// some blocks start with "#{"...
|
||||
fn parse_block_tail(lo: uint, s: blk_check_mode) -> blk {
|
||||
fn parse_block_tail(lo: CharPos, s: blk_check_mode) -> blk {
|
||||
self.parse_block_tail_(lo, s, ~[])
|
||||
}
|
||||
|
||||
fn parse_block_tail_(lo: uint, s: blk_check_mode,
|
||||
fn parse_block_tail_(lo: CharPos, s: blk_check_mode,
|
||||
+first_item_attrs: ~[attribute]) -> blk {
|
||||
let mut stmts = ~[];
|
||||
let mut expr = None;
|
||||
|
@ -2581,7 +2581,7 @@ impl Parser {
|
|||
return {ident: id, tps: ty_params};
|
||||
}
|
||||
|
||||
fn mk_item(lo: uint, hi: uint, +ident: ident,
|
||||
fn mk_item(+lo: CharPos, +hi: CharPos, +ident: ident,
|
||||
+node: item_, vis: visibility,
|
||||
+attrs: ~[attribute]) -> @item {
|
||||
return @{ident: ident,
|
||||
|
@ -3037,7 +3037,7 @@ impl Parser {
|
|||
items: items};
|
||||
}
|
||||
|
||||
fn parse_item_foreign_mod(lo: uint,
|
||||
fn parse_item_foreign_mod(lo: CharPos,
|
||||
visibility: visibility,
|
||||
attrs: ~[attribute],
|
||||
items_allowed: bool)
|
||||
|
@ -3092,7 +3092,7 @@ impl Parser {
|
|||
});
|
||||
}
|
||||
|
||||
fn parse_type_decl() -> {lo: uint, ident: ident} {
|
||||
fn parse_type_decl() -> {lo: CharPos, ident: ident} {
|
||||
let lo = self.last_span.lo;
|
||||
let id = self.parse_ident();
|
||||
return {lo: lo, ident: id};
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use parse::{comments, lexer, token};
|
||||
use codemap::CodeMap;
|
||||
use codemap::{CodeMap, CharPos};
|
||||
use pp::{break_offset, word, printer, space, zerobreak, hardbreak, breaks};
|
||||
use pp::{consistent, inconsistent, eof};
|
||||
use ast::{required, provided};
|
||||
|
@ -631,7 +631,7 @@ fn print_variants(s: ps, variants: ~[ast::variant], span: ast::span) {
|
|||
print_variant(s, *v);
|
||||
word(s.s, ~",");
|
||||
end(s);
|
||||
maybe_print_trailing_comment(s, v.span, None::<uint>);
|
||||
maybe_print_trailing_comment(s, v.span, None);
|
||||
}
|
||||
bclose(s, span);
|
||||
}
|
||||
|
@ -886,7 +886,7 @@ fn print_stmt(s: ps, st: ast::stmt) {
|
|||
}
|
||||
}
|
||||
if parse::classify::stmt_ends_with_semi(st) { word(s.s, ~";"); }
|
||||
maybe_print_trailing_comment(s, st.span, None::<uint>);
|
||||
maybe_print_trailing_comment(s, st.span, None);
|
||||
}
|
||||
|
||||
fn print_block(s: ps, blk: ast::blk) {
|
||||
|
@ -1898,7 +1898,7 @@ fn print_ty_fn(s: ps,
|
|||
}
|
||||
|
||||
fn maybe_print_trailing_comment(s: ps, span: codemap::span,
|
||||
next_pos: Option<uint>) {
|
||||
next_pos: Option<CharPos>) {
|
||||
let mut cm;
|
||||
match s.cm { Some(ccm) => cm = ccm, _ => return }
|
||||
match next_comment(s) {
|
||||
|
@ -1906,7 +1906,7 @@ fn maybe_print_trailing_comment(s: ps, span: codemap::span,
|
|||
if cmnt.style != comments::trailing { return; }
|
||||
let span_line = cm.lookup_char_pos(span.hi);
|
||||
let comment_line = cm.lookup_char_pos(cmnt.pos);
|
||||
let mut next = cmnt.pos + 1u;
|
||||
let mut next = cmnt.pos + CharPos(1u);
|
||||
match next_pos { None => (), Some(p) => next = p }
|
||||
if span.hi < cmnt.pos && cmnt.pos < next &&
|
||||
span_line.line == comment_line.line {
|
||||
|
@ -1981,7 +1981,7 @@ fn lit_to_str(l: @ast::lit) -> ~str {
|
|||
return to_str(l, print_literal, parse::token::mk_fake_ident_interner());
|
||||
}
|
||||
|
||||
fn next_lit(s: ps, pos: uint) -> Option<comments::lit> {
|
||||
fn next_lit(s: ps, pos: CharPos) -> Option<comments::lit> {
|
||||
match s.literals {
|
||||
Some(lits) => {
|
||||
while s.cur_lit < vec::len(lits) {
|
||||
|
@ -1996,7 +1996,7 @@ fn next_lit(s: ps, pos: uint) -> Option<comments::lit> {
|
|||
}
|
||||
}
|
||||
|
||||
fn maybe_print_comment(s: ps, pos: uint) {
|
||||
fn maybe_print_comment(s: ps, pos: CharPos) {
|
||||
loop {
|
||||
match next_comment(s) {
|
||||
Some(cmnt) => {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue