librustc: Lots of de-muting. rs=demuting
This commit is contained in:
parent
2bc9655bc1
commit
472797b04a
63 changed files with 2434 additions and 2086 deletions
|
@ -81,9 +81,9 @@ pub fn init(root: &Path) {
|
|||
let p = root.push("gpg");
|
||||
if !os::path_is_dir(&p) {
|
||||
os::make_dir(&p, 0x1c0i32);
|
||||
let p = run::start_program(~"gpg", ~[~"--homedir",
|
||||
p.to_str(),
|
||||
~"--import"]);
|
||||
let mut p = run::start_program(~"gpg", ~[~"--homedir",
|
||||
p.to_str(),
|
||||
~"--import"]);
|
||||
p.input().write_str(signing_key());
|
||||
let s = p.finish();
|
||||
if s != 0 {
|
||||
|
|
|
@ -24,26 +24,26 @@ use option::{None, Option, Some};
|
|||
use option;
|
||||
use vec;
|
||||
|
||||
pub type DListLink<T> = Option<@DListNode<T>>;
|
||||
pub type DListLink<T> = Option<@mut DListNode<T>>;
|
||||
|
||||
pub struct DListNode<T> {
|
||||
data: T,
|
||||
mut linked: bool, // for assertions
|
||||
mut prev: DListLink<T>,
|
||||
mut next: DListLink<T>,
|
||||
linked: bool, // for assertions
|
||||
prev: DListLink<T>,
|
||||
next: DListLink<T>,
|
||||
}
|
||||
|
||||
pub struct DList<T> {
|
||||
mut size: uint,
|
||||
mut hd: DListLink<T>,
|
||||
mut tl: DListLink<T>,
|
||||
size: uint,
|
||||
hd: DListLink<T>,
|
||||
tl: DListLink<T>,
|
||||
}
|
||||
|
||||
priv impl<T> DListNode<T> {
|
||||
pure fn assert_links(@self) {
|
||||
pure fn assert_links(@mut self) {
|
||||
match self.next {
|
||||
Some(neighbour) => match neighbour.prev {
|
||||
Some(me) => if !managed::ptr_eq(self, me) {
|
||||
Some(me) => if !managed::mut_ptr_eq(self, me) {
|
||||
die!(~"Asymmetric next-link in dlist node.")
|
||||
},
|
||||
None => die!(~"One-way next-link in dlist node.")
|
||||
|
@ -52,7 +52,7 @@ priv impl<T> DListNode<T> {
|
|||
}
|
||||
match self.prev {
|
||||
Some(neighbour) => match neighbour.next {
|
||||
Some(me) => if !managed::ptr_eq(me, self) {
|
||||
Some(me) => if !managed::mut_ptr_eq(me, self) {
|
||||
die!(~"Asymmetric prev-link in dlist node.")
|
||||
},
|
||||
None => die!(~"One-way prev-link in dlist node.")
|
||||
|
@ -64,24 +64,24 @@ priv impl<T> DListNode<T> {
|
|||
|
||||
impl<T> DListNode<T> {
|
||||
/// Get the next node in the list, if there is one.
|
||||
pure fn next_link(@self) -> DListLink<T> {
|
||||
pure fn next_link(@mut self) -> DListLink<T> {
|
||||
self.assert_links();
|
||||
self.next
|
||||
}
|
||||
/// Get the next node in the list, failing if there isn't one.
|
||||
pure fn next_node(@self) -> @DListNode<T> {
|
||||
pure fn next_node(@mut self) -> @mut DListNode<T> {
|
||||
match self.next_link() {
|
||||
Some(nobe) => nobe,
|
||||
None => die!(~"This dlist node has no next neighbour.")
|
||||
}
|
||||
}
|
||||
/// Get the previous node in the list, if there is one.
|
||||
pure fn prev_link(@self) -> DListLink<T> {
|
||||
pure fn prev_link(@mut self) -> DListLink<T> {
|
||||
self.assert_links();
|
||||
self.prev
|
||||
}
|
||||
/// Get the previous node in the list, failing if there isn't one.
|
||||
pure fn prev_node(@self) -> @DListNode<T> {
|
||||
pure fn prev_node(@mut self) -> @mut DListNode<T> {
|
||||
match self.prev_link() {
|
||||
Some(nobe) => nobe,
|
||||
None => die!(~"This dlist node has no previous neighbour.")
|
||||
|
@ -90,23 +90,23 @@ impl<T> DListNode<T> {
|
|||
}
|
||||
|
||||
/// Creates a new dlist node with the given data.
|
||||
pub pure fn new_dlist_node<T>(data: T) -> @DListNode<T> {
|
||||
@DListNode { data: data, linked: false, prev: None, next: None }
|
||||
pub pure fn new_dlist_node<T>(data: T) -> @mut DListNode<T> {
|
||||
@mut DListNode { data: data, linked: false, prev: None, next: None }
|
||||
}
|
||||
|
||||
/// Creates a new, empty dlist.
|
||||
pub pure fn DList<T>() -> @DList<T> {
|
||||
@DList { size: 0, hd: None, tl: None }
|
||||
pub pure fn DList<T>() -> @mut DList<T> {
|
||||
@mut DList { size: 0, hd: None, tl: None }
|
||||
}
|
||||
|
||||
/// Creates a new dlist with a single element
|
||||
pub pure fn from_elem<T>(data: T) -> @DList<T> {
|
||||
pub pure fn from_elem<T>(data: T) -> @mut DList<T> {
|
||||
let list = DList();
|
||||
unsafe { list.push(data); }
|
||||
list
|
||||
}
|
||||
|
||||
pub fn from_vec<T: Copy>(vec: &[T]) -> @DList<T> {
|
||||
pub fn from_vec<T: Copy>(vec: &[T]) -> @mut DList<T> {
|
||||
do vec::foldl(DList(), vec) |list,data| {
|
||||
list.push(*data); // Iterating left-to-right -- add newly to the tail.
|
||||
list
|
||||
|
@ -115,7 +115,7 @@ pub fn from_vec<T: Copy>(vec: &[T]) -> @DList<T> {
|
|||
|
||||
/// Produce a list from a list of lists, leaving no elements behind in the
|
||||
/// input. O(number of sub-lists).
|
||||
pub fn concat<T>(lists: @DList<@DList<T>>) -> @DList<T> {
|
||||
pub fn concat<T>(lists: @mut DList<@mut DList<T>>) -> @mut DList<T> {
|
||||
let result = DList();
|
||||
while !lists.is_empty() {
|
||||
result.append(lists.pop().get());
|
||||
|
@ -125,9 +125,14 @@ pub fn concat<T>(lists: @DList<@DList<T>>) -> @DList<T> {
|
|||
|
||||
priv impl<T> DList<T> {
|
||||
static pure fn new_link(data: T) -> DListLink<T> {
|
||||
Some(@DListNode { data: data, linked: true, prev: None, next: None })
|
||||
Some(@mut DListNode {
|
||||
data: data,
|
||||
linked: true,
|
||||
prev: None,
|
||||
next: None
|
||||
})
|
||||
}
|
||||
pure fn assert_mine(@self, nobe: @DListNode<T>) {
|
||||
pure fn assert_mine(@mut self, nobe: @mut DListNode<T>) {
|
||||
// These asserts could be stronger if we had node-root back-pointers,
|
||||
// but those wouldn't allow for O(1) append.
|
||||
if self.size == 0 {
|
||||
|
@ -135,15 +140,15 @@ priv impl<T> DList<T> {
|
|||
}
|
||||
if !nobe.linked { die!(~"That node isn't linked to any dlist.") }
|
||||
if !((nobe.prev.is_some()
|
||||
|| managed::ptr_eq(self.hd.expect(~"headless dlist?"),
|
||||
|| managed::mut_ptr_eq(self.hd.expect(~"headless dlist?"),
|
||||
nobe)) &&
|
||||
(nobe.next.is_some()
|
||||
|| managed::ptr_eq(self.tl.expect(~"tailless dlist?"),
|
||||
|| managed::mut_ptr_eq(self.tl.expect(~"tailless dlist?"),
|
||||
nobe))) {
|
||||
die!(~"That node isn't on this dlist.")
|
||||
}
|
||||
}
|
||||
fn make_mine(nobe: @DListNode<T>) {
|
||||
fn make_mine(nobe: @mut DListNode<T>) {
|
||||
if nobe.prev.is_some() || nobe.next.is_some() || nobe.linked {
|
||||
die!(~"Cannot insert node that's already on a dlist!")
|
||||
}
|
||||
|
@ -152,7 +157,7 @@ priv impl<T> DList<T> {
|
|||
// Link two nodes together. If either of them are 'none', also sets
|
||||
// the head and/or tail pointers appropriately.
|
||||
#[inline(always)]
|
||||
fn link(before: DListLink<T>, after: DListLink<T>) {
|
||||
fn link(&mut self, before: DListLink<T>, after: DListLink<T>) {
|
||||
match before {
|
||||
Some(neighbour) => neighbour.next = after,
|
||||
None => self.hd = after
|
||||
|
@ -163,7 +168,7 @@ priv impl<T> DList<T> {
|
|||
}
|
||||
}
|
||||
// Remove a node from the list.
|
||||
fn unlink(@self, nobe: @DListNode<T>) {
|
||||
fn unlink(@mut self, nobe: @mut DListNode<T>) {
|
||||
self.assert_mine(nobe);
|
||||
assert self.size > 0;
|
||||
self.link(nobe.prev, nobe.next);
|
||||
|
@ -173,24 +178,28 @@ priv impl<T> DList<T> {
|
|||
self.size -= 1;
|
||||
}
|
||||
|
||||
fn add_head(@self, nobe: DListLink<T>) {
|
||||
fn add_head(@mut self, nobe: DListLink<T>) {
|
||||
self.link(nobe, self.hd); // Might set tail too.
|
||||
self.hd = nobe;
|
||||
self.size += 1;
|
||||
}
|
||||
fn add_tail(@self, nobe: DListLink<T>) {
|
||||
fn add_tail(@mut self, nobe: DListLink<T>) {
|
||||
self.link(self.tl, nobe); // Might set head too.
|
||||
self.tl = nobe;
|
||||
self.size += 1;
|
||||
}
|
||||
fn insert_left(@self, nobe: DListLink<T>, neighbour: @DListNode<T>) {
|
||||
fn insert_left(@mut self,
|
||||
nobe: DListLink<T>,
|
||||
neighbour: @mut DListNode<T>) {
|
||||
self.assert_mine(neighbour);
|
||||
assert self.size > 0;
|
||||
self.link(neighbour.prev, nobe);
|
||||
self.link(nobe, Some(neighbour));
|
||||
self.size += 1;
|
||||
}
|
||||
fn insert_right(@self, neighbour: @DListNode<T>, nobe: DListLink<T>) {
|
||||
fn insert_right(@mut self,
|
||||
neighbour: @mut DListNode<T>,
|
||||
nobe: DListLink<T>) {
|
||||
self.assert_mine(neighbour);
|
||||
assert self.size > 0;
|
||||
self.link(nobe, neighbour.next);
|
||||
|
@ -201,32 +210,32 @@ priv impl<T> DList<T> {
|
|||
|
||||
impl<T> DList<T> {
|
||||
/// Get the size of the list. O(1).
|
||||
pure fn len(@self) -> uint { self.size }
|
||||
pure fn len(@mut self) -> uint { self.size }
|
||||
/// Returns true if the list is empty. O(1).
|
||||
pure fn is_empty(@self) -> bool { self.len() == 0 }
|
||||
pure fn is_empty(@mut self) -> bool { self.len() == 0 }
|
||||
|
||||
/// Add data to the head of the list. O(1).
|
||||
fn push_head(@self, data: T) {
|
||||
fn push_head(@mut self, data: T) {
|
||||
self.add_head(DList::new_link(data));
|
||||
}
|
||||
/**
|
||||
* Add data to the head of the list, and get the new containing
|
||||
* node. O(1).
|
||||
*/
|
||||
fn push_head_n(@self, data: T) -> @DListNode<T> {
|
||||
fn push_head_n(@mut self, data: T) -> @mut DListNode<T> {
|
||||
let mut nobe = DList::new_link(data);
|
||||
self.add_head(nobe);
|
||||
nobe.get()
|
||||
}
|
||||
/// Add data to the tail of the list. O(1).
|
||||
fn push(@self, data: T) {
|
||||
fn push(@mut self, data: T) {
|
||||
self.add_tail(DList::new_link(data));
|
||||
}
|
||||
/**
|
||||
* Add data to the tail of the list, and get the new containing
|
||||
* node. O(1).
|
||||
*/
|
||||
fn push_n(@self, data: T) -> @DListNode<T> {
|
||||
fn push_n(@mut self, data: T) -> @mut DListNode<T> {
|
||||
let mut nobe = DList::new_link(data);
|
||||
self.add_tail(nobe);
|
||||
nobe.get()
|
||||
|
@ -235,14 +244,16 @@ impl<T> DList<T> {
|
|||
* Insert data into the middle of the list, left of the given node.
|
||||
* O(1).
|
||||
*/
|
||||
fn insert_before(@self, data: T, neighbour: @DListNode<T>) {
|
||||
fn insert_before(@mut self, data: T, neighbour: @mut DListNode<T>) {
|
||||
self.insert_left(DList::new_link(data), neighbour);
|
||||
}
|
||||
/**
|
||||
* Insert an existing node in the middle of the list, left of the
|
||||
* given node. O(1).
|
||||
*/
|
||||
fn insert_n_before(@self, nobe: @DListNode<T>, neighbour: @DListNode<T>) {
|
||||
fn insert_n_before(@mut self,
|
||||
nobe: @mut DListNode<T>,
|
||||
neighbour: @mut DListNode<T>) {
|
||||
self.make_mine(nobe);
|
||||
self.insert_left(Some(nobe), neighbour);
|
||||
}
|
||||
|
@ -251,10 +262,10 @@ impl<T> DList<T> {
|
|||
* and get its containing node. O(1).
|
||||
*/
|
||||
fn insert_before_n(
|
||||
@self,
|
||||
@mut self,
|
||||
data: T,
|
||||
neighbour: @DListNode<T>
|
||||
) -> @DListNode<T> {
|
||||
neighbour: @mut DListNode<T>
|
||||
) -> @mut DListNode<T> {
|
||||
let mut nobe = DList::new_link(data);
|
||||
self.insert_left(nobe, neighbour);
|
||||
nobe.get()
|
||||
|
@ -263,14 +274,16 @@ impl<T> DList<T> {
|
|||
* Insert data into the middle of the list, right of the given node.
|
||||
* O(1).
|
||||
*/
|
||||
fn insert_after(@self, data: T, neighbour: @DListNode<T>) {
|
||||
fn insert_after(@mut self, data: T, neighbour: @mut DListNode<T>) {
|
||||
self.insert_right(neighbour, DList::new_link(data));
|
||||
}
|
||||
/**
|
||||
* Insert an existing node in the middle of the list, right of the
|
||||
* given node. O(1).
|
||||
*/
|
||||
fn insert_n_after(@self, nobe: @DListNode<T>, neighbour: @DListNode<T>) {
|
||||
fn insert_n_after(@mut self,
|
||||
nobe: @mut DListNode<T>,
|
||||
neighbour: @mut DListNode<T>) {
|
||||
self.make_mine(nobe);
|
||||
self.insert_right(neighbour, Some(nobe));
|
||||
}
|
||||
|
@ -279,34 +292,34 @@ impl<T> DList<T> {
|
|||
* and get its containing node. O(1).
|
||||
*/
|
||||
fn insert_after_n(
|
||||
@self,
|
||||
@mut self,
|
||||
data: T,
|
||||
neighbour: @DListNode<T>
|
||||
) -> @DListNode<T> {
|
||||
neighbour: @mut DListNode<T>
|
||||
) -> @mut DListNode<T> {
|
||||
let mut nobe = DList::new_link(data);
|
||||
self.insert_right(neighbour, nobe);
|
||||
nobe.get()
|
||||
}
|
||||
|
||||
/// Remove a node from the head of the list. O(1).
|
||||
fn pop_n(@self) -> DListLink<T> {
|
||||
fn pop_n(@mut self) -> DListLink<T> {
|
||||
let hd = self.peek_n();
|
||||
hd.map(|nobe| self.unlink(*nobe));
|
||||
hd
|
||||
}
|
||||
/// Remove a node from the tail of the list. O(1).
|
||||
fn pop_tail_n(@self) -> DListLink<T> {
|
||||
fn pop_tail_n(@mut self) -> DListLink<T> {
|
||||
let tl = self.peek_tail_n();
|
||||
tl.map(|nobe| self.unlink(*nobe));
|
||||
tl
|
||||
}
|
||||
/// Get the node at the list's head. O(1).
|
||||
pure fn peek_n(@self) -> DListLink<T> { self.hd }
|
||||
pure fn peek_n(@mut self) -> DListLink<T> { self.hd }
|
||||
/// Get the node at the list's tail. O(1).
|
||||
pure fn peek_tail_n(@self) -> DListLink<T> { self.tl }
|
||||
pure fn peek_tail_n(@mut self) -> DListLink<T> { self.tl }
|
||||
|
||||
/// Get the node at the list's head, failing if empty. O(1).
|
||||
pure fn head_n(@self) -> @DListNode<T> {
|
||||
pure fn head_n(@mut self) -> @mut DListNode<T> {
|
||||
match self.hd {
|
||||
Some(nobe) => nobe,
|
||||
None => die!(
|
||||
|
@ -314,7 +327,7 @@ impl<T> DList<T> {
|
|||
}
|
||||
}
|
||||
/// Get the node at the list's tail, failing if empty. O(1).
|
||||
pure fn tail_n(@self) -> @DListNode<T> {
|
||||
pure fn tail_n(@mut self) -> @mut DListNode<T> {
|
||||
match self.tl {
|
||||
Some(nobe) => nobe,
|
||||
None => die!(
|
||||
|
@ -323,14 +336,14 @@ impl<T> DList<T> {
|
|||
}
|
||||
|
||||
/// Remove a node from anywhere in the list. O(1).
|
||||
fn remove(@self, nobe: @DListNode<T>) { self.unlink(nobe); }
|
||||
fn remove(@mut self, nobe: @mut DListNode<T>) { self.unlink(nobe); }
|
||||
|
||||
/**
|
||||
* Empty another list onto the end of this list, joining this list's tail
|
||||
* to the other list's head. O(1).
|
||||
*/
|
||||
fn append(@self, them: @DList<T>) {
|
||||
if managed::ptr_eq(self, them) {
|
||||
fn append(@mut self, them: @mut DList<T>) {
|
||||
if managed::mut_ptr_eq(self, them) {
|
||||
die!(~"Cannot append a dlist to itself!")
|
||||
}
|
||||
if them.len() > 0 {
|
||||
|
@ -346,8 +359,8 @@ impl<T> DList<T> {
|
|||
* Empty another list onto the start of this list, joining the other
|
||||
* list's tail to this list's head. O(1).
|
||||
*/
|
||||
fn prepend(@self, them: @DList<T>) {
|
||||
if managed::ptr_eq(self, them) {
|
||||
fn prepend(@mut self, them: @mut DList<T>) {
|
||||
if managed::mut_ptr_eq(self, them) {
|
||||
die!(~"Cannot prepend a dlist to itself!")
|
||||
}
|
||||
if them.len() > 0 {
|
||||
|
@ -361,7 +374,7 @@ impl<T> DList<T> {
|
|||
}
|
||||
|
||||
/// Reverse the list's elements in place. O(n).
|
||||
fn reverse(@self) {
|
||||
fn reverse(@mut self) {
|
||||
do option::while_some(self.hd) |nobe| {
|
||||
let next_nobe = nobe.next;
|
||||
self.remove(nobe);
|
||||
|
@ -375,7 +388,7 @@ impl<T> DList<T> {
|
|||
* Remove everything from the list. This is important because the cyclic
|
||||
* links won't otherwise be automatically refcounted-collected. O(n).
|
||||
*/
|
||||
fn clear(@self) {
|
||||
fn clear(@mut self) {
|
||||
// Cute as it would be to simply detach the list and proclaim "O(1)!",
|
||||
// the GC would still be a hidden O(n). Better to be honest about it.
|
||||
while !self.is_empty() {
|
||||
|
@ -384,7 +397,7 @@ impl<T> DList<T> {
|
|||
}
|
||||
|
||||
/// Iterate over nodes.
|
||||
pure fn each_node(@self, f: fn(@DListNode<T>) -> bool) {
|
||||
pure fn each_node(@mut self, f: fn(@mut DListNode<T>) -> bool) {
|
||||
let mut link = self.peek_n();
|
||||
while link.is_some() {
|
||||
let nobe = link.get();
|
||||
|
@ -394,7 +407,7 @@ impl<T> DList<T> {
|
|||
}
|
||||
|
||||
/// Check data structure integrity. O(n).
|
||||
fn assert_consistent(@self) {
|
||||
fn assert_consistent(@mut self) {
|
||||
if self.hd.is_none() || self.tl.is_none() {
|
||||
assert self.hd.is_none() && self.tl.is_none();
|
||||
}
|
||||
|
@ -413,7 +426,7 @@ impl<T> DList<T> {
|
|||
rabbit = rabbit.get().next;
|
||||
}
|
||||
if rabbit.is_some() {
|
||||
assert !managed::ptr_eq(rabbit.get(), nobe);
|
||||
assert !managed::mut_ptr_eq(rabbit.get(), nobe);
|
||||
}
|
||||
// advance
|
||||
link = nobe.next_link();
|
||||
|
@ -434,7 +447,7 @@ impl<T> DList<T> {
|
|||
rabbit = rabbit.get().prev;
|
||||
}
|
||||
if rabbit.is_some() {
|
||||
assert !managed::ptr_eq(rabbit.get(), nobe);
|
||||
assert !managed::mut_ptr_eq(rabbit.get(), nobe);
|
||||
}
|
||||
// advance
|
||||
link = nobe.prev_link();
|
||||
|
@ -446,33 +459,33 @@ impl<T> DList<T> {
|
|||
|
||||
impl<T: Copy> DList<T> {
|
||||
/// Remove data from the head of the list. O(1).
|
||||
fn pop(@self) -> Option<T> {
|
||||
fn pop(@mut self) -> Option<T> {
|
||||
self.pop_n().map(|nobe| nobe.data)
|
||||
}
|
||||
|
||||
/// Remove data from the tail of the list. O(1).
|
||||
fn pop_tail(@self) -> Option<T> {
|
||||
fn pop_tail(@mut self) -> Option<T> {
|
||||
self.pop_tail_n().map(|nobe| nobe.data)
|
||||
}
|
||||
|
||||
/// Get data at the list's head. O(1).
|
||||
pure fn peek(@self) -> Option<T> {
|
||||
pure fn peek(@mut self) -> Option<T> {
|
||||
self.peek_n().map(|nobe| nobe.data)
|
||||
}
|
||||
|
||||
/// Get data at the list's tail. O(1).
|
||||
pure fn peek_tail(@self) -> Option<T> {
|
||||
pure fn peek_tail(@mut self) -> Option<T> {
|
||||
self.peek_tail_n().map (|nobe| nobe.data)
|
||||
}
|
||||
|
||||
/// Get data at the list's head, failing if empty. O(1).
|
||||
pure fn head(@self) -> T { self.head_n().data }
|
||||
pure fn head(@mut self) -> T { self.head_n().data }
|
||||
|
||||
/// Get data at the list's tail, failing if empty. O(1).
|
||||
pure fn tail(@self) -> T { self.tail_n().data }
|
||||
pure fn tail(@mut self) -> T { self.tail_n().data }
|
||||
|
||||
/// Get the elements of the list as a vector. O(n).
|
||||
pure fn to_vec(@self) -> ~[T] {
|
||||
pure fn to_vec(@mut self) -> ~[T] {
|
||||
let mut v = vec::with_capacity(self.size);
|
||||
unsafe {
|
||||
// Take this out of the unchecked when iter's functions are pure
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
// except according to those terms.
|
||||
|
||||
mod inst {
|
||||
use cast;
|
||||
use dlist;
|
||||
use dlist::DList;
|
||||
use managed;
|
||||
|
@ -16,7 +17,7 @@ mod inst {
|
|||
use option;
|
||||
|
||||
#[allow(non_camel_case_types)]
|
||||
pub type IMPL_T<A> = @DList<A>;
|
||||
pub type IMPL_T<A> = @mut DList<A>;
|
||||
|
||||
/**
|
||||
* Iterates through the current contents.
|
||||
|
@ -30,18 +31,23 @@ mod inst {
|
|||
while option::is_some(&link) {
|
||||
let nobe = option::get(link);
|
||||
assert nobe.linked;
|
||||
if !f(&nobe.data) { break; }
|
||||
|
||||
{
|
||||
let frozen_nobe = &*nobe;
|
||||
if !f(&frozen_nobe.data) { break; }
|
||||
}
|
||||
|
||||
// Check (weakly) that the user didn't do a remove.
|
||||
if self.size == 0 {
|
||||
die!(~"The dlist became empty during iteration??")
|
||||
}
|
||||
if !nobe.linked ||
|
||||
(!((nobe.prev.is_some()
|
||||
|| managed::ptr_eq(self.hd.expect(~"headless dlist?"),
|
||||
nobe))
|
||||
|| managed::mut_ptr_eq(self.hd.expect(~"headless dlist?"),
|
||||
nobe))
|
||||
&& (nobe.next.is_some()
|
||||
|| managed::ptr_eq(self.tl.expect(~"tailless dlist?"),
|
||||
nobe)))) {
|
||||
|| managed::mut_ptr_eq(self.tl.expect(~"tailless dlist?"),
|
||||
nobe)))) {
|
||||
die!(~"Removing a dlist node during iteration is forbidden!")
|
||||
}
|
||||
link = nobe.next_link();
|
||||
|
|
|
@ -39,6 +39,12 @@ pub pure fn ptr_eq<T>(a: @T, b: @T) -> bool {
|
|||
unsafe { ptr::addr_of(&(*a)) == ptr::addr_of(&(*b)) }
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub pure fn mut_ptr_eq<T>(a: @mut T, b: @mut T) -> bool {
|
||||
//! Determine if two mutable shared boxes point to the same object
|
||||
unsafe { ptr::addr_of(&(*a)) == ptr::addr_of(&(*b)) }
|
||||
}
|
||||
|
||||
#[cfg(notest)]
|
||||
impl<T:Eq> @const T : Eq {
|
||||
#[inline(always)]
|
||||
|
|
|
@ -321,8 +321,7 @@ pub fn waitpid(pid: pid_t) -> c_int {
|
|||
#[cfg(unix)]
|
||||
pub fn pipe() -> {in: c_int, out: c_int} {
|
||||
unsafe {
|
||||
let fds = {mut in: 0 as c_int,
|
||||
mut out: 0 as c_int };
|
||||
let mut fds = {in: 0 as c_int, out: 0 as c_int};
|
||||
assert (libc::pipe(ptr::mut_addr_of(&(fds.in))) == (0 as c_int));
|
||||
return {in: fds.in, out: fds.out};
|
||||
}
|
||||
|
@ -338,8 +337,7 @@ pub fn pipe() -> {in: c_int, out: c_int} {
|
|||
// fully understand. Here we explicitly make the pipe non-inheritable,
|
||||
// which means to pass it to a subprocess they need to be duplicated
|
||||
// first, as in rust_run_program.
|
||||
let fds = { mut in: 0 as c_int,
|
||||
mut out: 0 as c_int };
|
||||
let mut fds = { in: 0 as c_int, out: 0 as c_int };
|
||||
let res = libc::pipe(ptr::mut_addr_of(&(fds.in)),
|
||||
1024 as c_uint,
|
||||
(libc::O_BINARY | libc::O_NOINHERIT) as c_int);
|
||||
|
|
|
@ -11,6 +11,7 @@
|
|||
#[allow(structural_records)];
|
||||
|
||||
//! Process spawning
|
||||
use cast;
|
||||
use io;
|
||||
use io::ReaderUtil;
|
||||
use libc;
|
||||
|
@ -36,28 +37,28 @@ extern mod rustrt {
|
|||
/// A value representing a child process
|
||||
pub trait Program {
|
||||
/// Returns the process id of the program
|
||||
fn get_id() -> pid_t;
|
||||
fn get_id(&mut self) -> pid_t;
|
||||
|
||||
/// Returns an io::writer that can be used to write to stdin
|
||||
fn input() -> io::Writer;
|
||||
fn input(&mut self) -> io::Writer;
|
||||
|
||||
/// Returns an io::reader that can be used to read from stdout
|
||||
fn output() -> io::Reader;
|
||||
fn output(&mut self) -> io::Reader;
|
||||
|
||||
/// Returns an io::reader that can be used to read from stderr
|
||||
fn err() -> io::Reader;
|
||||
fn err(&mut self) -> io::Reader;
|
||||
|
||||
/// Closes the handle to the child processes standard input
|
||||
fn close_input();
|
||||
fn close_input(&mut self);
|
||||
|
||||
/**
|
||||
* Waits for the child process to terminate. Closes the handle
|
||||
* to stdin if necessary.
|
||||
*/
|
||||
fn finish() -> int;
|
||||
fn finish(&mut self) -> int;
|
||||
|
||||
/// Closes open handles
|
||||
fn destroy();
|
||||
fn destroy(&mut self);
|
||||
}
|
||||
|
||||
|
||||
|
@ -219,13 +220,13 @@ pub fn start_program(prog: &str, args: &[~str]) -> Program {
|
|||
|
||||
struct ProgRepr {
|
||||
pid: pid_t,
|
||||
mut in_fd: c_int,
|
||||
in_fd: c_int,
|
||||
out_file: *libc::FILE,
|
||||
err_file: *libc::FILE,
|
||||
mut finished: bool,
|
||||
finished: bool,
|
||||
}
|
||||
|
||||
fn close_repr_input(r: &ProgRepr) {
|
||||
fn close_repr_input(r: &mut ProgRepr) {
|
||||
let invalid_fd = -1i32;
|
||||
if r.in_fd != invalid_fd {
|
||||
unsafe {
|
||||
|
@ -234,22 +235,27 @@ pub fn start_program(prog: &str, args: &[~str]) -> Program {
|
|||
r.in_fd = invalid_fd;
|
||||
}
|
||||
}
|
||||
fn finish_repr(r: &ProgRepr) -> int {
|
||||
fn finish_repr(r: &mut ProgRepr) -> int {
|
||||
if r.finished { return 0; }
|
||||
r.finished = true;
|
||||
close_repr_input(r);
|
||||
close_repr_input(&mut *r);
|
||||
return waitpid(r.pid);
|
||||
}
|
||||
fn destroy_repr(r: &ProgRepr) {
|
||||
fn destroy_repr(r: &mut ProgRepr) {
|
||||
unsafe {
|
||||
finish_repr(r);
|
||||
finish_repr(&mut *r);
|
||||
libc::fclose(r.out_file);
|
||||
libc::fclose(r.err_file);
|
||||
}
|
||||
}
|
||||
struct ProgRes {
|
||||
r: ProgRepr,
|
||||
drop { destroy_repr(&self.r); }
|
||||
drop {
|
||||
unsafe {
|
||||
// XXX: This is bad.
|
||||
destroy_repr(cast::transmute(&self.r));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn ProgRes(r: ProgRepr) -> ProgRes {
|
||||
|
@ -259,21 +265,21 @@ pub fn start_program(prog: &str, args: &[~str]) -> Program {
|
|||
}
|
||||
|
||||
impl ProgRes: Program {
|
||||
fn get_id() -> pid_t { return self.r.pid; }
|
||||
fn input() -> io::Writer {
|
||||
fn get_id(&mut self) -> pid_t { return self.r.pid; }
|
||||
fn input(&mut self) -> io::Writer {
|
||||
io::fd_writer(self.r.in_fd, false)
|
||||
}
|
||||
fn output() -> io::Reader {
|
||||
fn output(&mut self) -> io::Reader {
|
||||
io::FILE_reader(self.r.out_file, false)
|
||||
}
|
||||
fn err() -> io::Reader {
|
||||
fn err(&mut self) -> io::Reader {
|
||||
io::FILE_reader(self.r.err_file, false)
|
||||
}
|
||||
fn close_input() { close_repr_input(&self.r); }
|
||||
fn finish() -> int { finish_repr(&self.r) }
|
||||
fn destroy() { destroy_repr(&self.r); }
|
||||
fn close_input(&mut self) { close_repr_input(&mut self.r); }
|
||||
fn finish(&mut self) -> int { finish_repr(&mut self.r) }
|
||||
fn destroy(&mut self) { destroy_repr(&mut self.r); }
|
||||
}
|
||||
let repr = ProgRepr {
|
||||
let mut repr = ProgRepr {
|
||||
pid: pid,
|
||||
in_fd: pipe_input.out,
|
||||
out_file: os::fdopen(pipe_output.in),
|
||||
|
|
|
@ -523,13 +523,13 @@ pub fn build_link_meta(sess: Session, c: &ast::crate, output: &Path,
|
|||
}
|
||||
|
||||
fn warn_missing(sess: Session, name: &str, default: &str) {
|
||||
if !sess.building_library { return; }
|
||||
if !*sess.building_library { return; }
|
||||
sess.warn(fmt!("missing crate link meta `%s`, using `%s` as default",
|
||||
name, default));
|
||||
}
|
||||
|
||||
fn crate_meta_name(sess: Session, output: &Path, -opt_name: Option<@str>)
|
||||
-> @str {
|
||||
-> @str {
|
||||
return match opt_name {
|
||||
Some(v) => v,
|
||||
None => {
|
||||
|
@ -730,7 +730,7 @@ pub fn link_binary(sess: Session,
|
|||
}
|
||||
}
|
||||
|
||||
let output = if sess.building_library {
|
||||
let output = if *sess.building_library {
|
||||
let long_libname = output_dll_filename(sess.targ_cfg.os, lm);
|
||||
debug!("link_meta.name: %s", lm.name);
|
||||
debug!("long_libname: %s", long_libname);
|
||||
|
@ -806,7 +806,7 @@ pub fn link_binary(sess: Session,
|
|||
let used_libs = cstore::get_used_libraries(cstore);
|
||||
for used_libs.each |l| { cc_args.push(~"-l" + *l); }
|
||||
|
||||
if sess.building_library {
|
||||
if *sess.building_library {
|
||||
cc_args.push(lib_cmd);
|
||||
|
||||
// On mac we need to tell the linker to let this library
|
||||
|
|
|
@ -199,7 +199,7 @@ pub fn compile_upto(sess: Session, cfg: ast::crate_cfg,
|
|||
|| parse_input(sess, copy cfg, input) );
|
||||
if upto == cu_parse { return {crate: crate, tcx: None}; }
|
||||
|
||||
sess.building_library = session::building_library(
|
||||
*sess.building_library = session::building_library(
|
||||
sess.opts.crate_type, crate, sess.opts.test);
|
||||
|
||||
crate = time(time_passes, ~"configuration", ||
|
||||
|
@ -335,7 +335,7 @@ pub fn compile_upto(sess: Session, cfg: ast::crate_cfg,
|
|||
|
||||
let stop_after_codegen =
|
||||
sess.opts.output_type != link::output_type_exe ||
|
||||
(sess.opts.static && sess.building_library) ||
|
||||
(sess.opts.static && *sess.building_library) ||
|
||||
sess.opts.jit;
|
||||
|
||||
if stop_after_codegen { return {crate: crate, tcx: None}; }
|
||||
|
@ -466,7 +466,7 @@ pub fn get_arch(triple: ~str) -> Option<session::arch> {
|
|||
}
|
||||
|
||||
pub fn build_target_config(sopts: @session::options,
|
||||
demitter: diagnostic::emitter)
|
||||
demitter: diagnostic::Emitter)
|
||||
-> @session::config {
|
||||
let os = match get_os(sopts.target_triple) {
|
||||
Some(os) => os,
|
||||
|
@ -512,7 +512,7 @@ pub fn host_triple() -> ~str {
|
|||
|
||||
pub fn build_session_options(+binary: ~str,
|
||||
matches: &getopts::Matches,
|
||||
demitter: diagnostic::emitter)
|
||||
demitter: diagnostic::Emitter)
|
||||
-> @session::options {
|
||||
let crate_type = if opt_present(matches, ~"lib") {
|
||||
session::lib_crate
|
||||
|
@ -651,7 +651,7 @@ pub fn build_session_options(+binary: ~str,
|
|||
}
|
||||
|
||||
pub fn build_session(sopts: @session::options,
|
||||
demitter: diagnostic::emitter) -> Session {
|
||||
demitter: diagnostic::Emitter) -> Session {
|
||||
let codemap = @codemap::CodeMap::new();
|
||||
let diagnostic_handler =
|
||||
diagnostic::mk_handler(Some(demitter));
|
||||
|
@ -662,30 +662,32 @@ pub fn build_session(sopts: @session::options,
|
|||
|
||||
pub fn build_session_(sopts: @session::options,
|
||||
cm: @codemap::CodeMap,
|
||||
demitter: diagnostic::emitter,
|
||||
demitter: diagnostic::Emitter,
|
||||
span_diagnostic_handler: diagnostic::span_handler)
|
||||
-> Session {
|
||||
let target_cfg = build_target_config(sopts, demitter);
|
||||
let p_s = parse::new_parse_sess_special_handler(span_diagnostic_handler,
|
||||
cm);
|
||||
let cstore = cstore::mk_cstore(p_s.interner);
|
||||
let cstore = @mut cstore::mk_cstore(p_s.interner);
|
||||
let filesearch = filesearch::mk_filesearch(
|
||||
sopts.maybe_sysroot,
|
||||
sopts.target_triple,
|
||||
/*bad*/copy sopts.addl_lib_search_paths);
|
||||
let lint_settings = lint::mk_lint_settings();
|
||||
Session_(@{targ_cfg: target_cfg,
|
||||
opts: sopts,
|
||||
cstore: cstore,
|
||||
parse_sess: p_s,
|
||||
codemap: cm,
|
||||
// For a library crate, this is always none
|
||||
mut main_fn: None,
|
||||
span_diagnostic: span_diagnostic_handler,
|
||||
filesearch: filesearch,
|
||||
mut building_library: false,
|
||||
working_dir: os::getcwd(),
|
||||
lint_settings: lint_settings})
|
||||
@Session_ {
|
||||
targ_cfg: target_cfg,
|
||||
opts: sopts,
|
||||
cstore: cstore,
|
||||
parse_sess: p_s,
|
||||
codemap: cm,
|
||||
// For a library crate, this is always none
|
||||
main_fn: @mut None,
|
||||
span_diagnostic: span_diagnostic_handler,
|
||||
filesearch: filesearch,
|
||||
building_library: @mut false,
|
||||
working_dir: os::getcwd(),
|
||||
lint_settings: lint_settings
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_pretty(sess: Session, &&name: ~str) -> pp_mode {
|
||||
|
@ -780,7 +782,7 @@ pub fn build_output_filenames(input: input,
|
|||
let sopts = sess.opts;
|
||||
let stop_after_codegen =
|
||||
sopts.output_type != link::output_type_exe ||
|
||||
sopts.static && sess.building_library;
|
||||
sopts.static && *sess.building_library;
|
||||
|
||||
|
||||
let obj_suffix =
|
||||
|
@ -811,7 +813,7 @@ pub fn build_output_filenames(input: input,
|
|||
str_input(_) => ~"rust_out"
|
||||
};
|
||||
|
||||
if sess.building_library {
|
||||
if *sess.building_library {
|
||||
out_path = dirpath.push(os::dll_filename(stem));
|
||||
obj_path = dirpath.push(stem).with_filetype(obj_suffix);
|
||||
} else {
|
||||
|
@ -828,7 +830,7 @@ pub fn build_output_filenames(input: input,
|
|||
(*out_file).with_filetype(obj_suffix)
|
||||
};
|
||||
|
||||
if sess.building_library {
|
||||
if *sess.building_library {
|
||||
// FIXME (#2401): We might want to warn here; we're actually not
|
||||
// going to respect the user's choice of library name when it
|
||||
// comes time to link, we'll be linking to
|
||||
|
@ -844,7 +846,7 @@ pub fn build_output_filenames(input: input,
|
|||
obj_filename: obj_path};
|
||||
}
|
||||
|
||||
pub fn early_error(emitter: diagnostic::emitter, msg: ~str) -> ! {
|
||||
pub fn early_error(emitter: diagnostic::Emitter, msg: ~str) -> ! {
|
||||
emitter(None, msg, diagnostic::fatal);
|
||||
die!();
|
||||
}
|
||||
|
|
|
@ -149,23 +149,23 @@ pub type options =
|
|||
|
||||
pub type crate_metadata = {name: ~str, data: ~[u8]};
|
||||
|
||||
pub type Session_ = {targ_cfg: @config,
|
||||
opts: @options,
|
||||
cstore: metadata::cstore::CStore,
|
||||
parse_sess: parse_sess,
|
||||
codemap: @codemap::CodeMap,
|
||||
// For a library crate, this is always none
|
||||
mut main_fn: Option<(node_id, codemap::span)>,
|
||||
span_diagnostic: diagnostic::span_handler,
|
||||
filesearch: filesearch::FileSearch,
|
||||
mut building_library: bool,
|
||||
working_dir: Path,
|
||||
lint_settings: lint::lint_settings};
|
||||
|
||||
pub enum Session {
|
||||
Session_(@Session_)
|
||||
pub struct Session_ {
|
||||
targ_cfg: @config,
|
||||
opts: @options,
|
||||
cstore: @mut metadata::cstore::CStore,
|
||||
parse_sess: parse_sess,
|
||||
codemap: @codemap::CodeMap,
|
||||
// For a library crate, this is always none
|
||||
main_fn: @mut Option<(node_id, codemap::span)>,
|
||||
span_diagnostic: diagnostic::span_handler,
|
||||
filesearch: filesearch::FileSearch,
|
||||
building_library: @mut bool,
|
||||
working_dir: Path,
|
||||
lint_settings: lint::lint_settings
|
||||
}
|
||||
|
||||
pub type Session = @Session_;
|
||||
|
||||
pub impl Session {
|
||||
fn span_fatal(sp: span, msg: ~str) -> ! {
|
||||
self.span_diagnostic.span_fatal(sp, msg)
|
||||
|
@ -303,7 +303,8 @@ pub fn expect<T: Copy>(sess: Session,
|
|||
diagnostic::expect(sess.diagnostic(), opt, msg)
|
||||
}
|
||||
|
||||
pub fn building_library(req_crate_type: crate_type, crate: @ast::crate,
|
||||
pub fn building_library(req_crate_type: crate_type,
|
||||
crate: @ast::crate,
|
||||
testing: bool) -> bool {
|
||||
match req_crate_type {
|
||||
bin_crate => false,
|
||||
|
|
|
@ -30,20 +30,25 @@ use syntax::attr::attrs_contains_name;
|
|||
|
||||
type node_id_gen = fn@() -> ast::node_id;
|
||||
|
||||
type test = {span: span, path: ~[ast::ident],
|
||||
ignore: bool, should_fail: bool};
|
||||
type test = {
|
||||
span: span,
|
||||
path: ~[ast::ident],
|
||||
ignore: bool,
|
||||
should_fail: bool
|
||||
};
|
||||
|
||||
type test_ctxt =
|
||||
@{sess: session::Session,
|
||||
crate: @ast::crate,
|
||||
mut path: ~[ast::ident],
|
||||
testfns: DVec<test>};
|
||||
struct TestCtxt {
|
||||
sess: session::Session,
|
||||
crate: @ast::crate,
|
||||
path: ~[ast::ident],
|
||||
testfns: ~[test]
|
||||
}
|
||||
|
||||
// Traverse the crate, collecting all the test functions, eliding any
|
||||
// existing main functions, and synthesizing a main test harness
|
||||
pub fn modify_for_testing(sess: session::Session,
|
||||
crate: @ast::crate) -> @ast::crate {
|
||||
|
||||
crate: @ast::crate)
|
||||
-> @ast::crate {
|
||||
// We generate the test harness when building in the 'test'
|
||||
// configuration, either with the '--test' or '--cfg test'
|
||||
// command line options.
|
||||
|
@ -58,12 +63,14 @@ pub fn modify_for_testing(sess: session::Session,
|
|||
}
|
||||
|
||||
fn generate_test_harness(sess: session::Session,
|
||||
crate: @ast::crate) -> @ast::crate {
|
||||
let cx: test_ctxt =
|
||||
@{sess: sess,
|
||||
crate: crate,
|
||||
mut path: ~[],
|
||||
testfns: DVec()};
|
||||
crate: @ast::crate)
|
||||
-> @ast::crate {
|
||||
let cx: @mut TestCtxt = @mut TestCtxt {
|
||||
sess: sess,
|
||||
crate: crate,
|
||||
path: ~[],
|
||||
testfns: ~[]
|
||||
};
|
||||
|
||||
let precursor = @fold::AstFoldFns {
|
||||
fold_crate: fold::wrap(|a,b| fold_crate(cx, a, b) ),
|
||||
|
@ -83,13 +90,15 @@ fn strip_test_functions(crate: @ast::crate) -> @ast::crate {
|
|||
}
|
||||
}
|
||||
|
||||
fn fold_mod(cx: test_ctxt, m: ast::_mod, fld: fold::ast_fold) -> ast::_mod {
|
||||
|
||||
fn fold_mod(cx: @mut TestCtxt,
|
||||
m: ast::_mod,
|
||||
fld: fold::ast_fold)
|
||||
-> ast::_mod {
|
||||
// Remove any #[main] from the AST so it doesn't clash with
|
||||
// the one we're going to add. Only if compiling an executable.
|
||||
|
||||
fn nomain(cx: test_ctxt, item: @ast::item) -> @ast::item {
|
||||
if !cx.sess.building_library {
|
||||
fn nomain(cx: @mut TestCtxt, item: @ast::item) -> @ast::item {
|
||||
if !*cx.sess.building_library {
|
||||
@ast::item{attrs: item.attrs.filtered(|attr| {
|
||||
attr::get_attr_name(*attr) != ~"main"
|
||||
}),.. copy *item}
|
||||
|
@ -104,8 +113,10 @@ fn fold_mod(cx: test_ctxt, m: ast::_mod, fld: fold::ast_fold) -> ast::_mod {
|
|||
fold::noop_fold_mod(mod_nomain, fld)
|
||||
}
|
||||
|
||||
fn fold_crate(cx: test_ctxt, c: ast::crate_, fld: fold::ast_fold) ->
|
||||
ast::crate_ {
|
||||
fn fold_crate(cx: @mut TestCtxt,
|
||||
c: ast::crate_,
|
||||
fld: fold::ast_fold)
|
||||
-> ast::crate_ {
|
||||
let folded = fold::noop_fold_crate(c, fld);
|
||||
|
||||
// Add a special __test module to the crate that will contain code
|
||||
|
@ -115,9 +126,8 @@ fn fold_crate(cx: test_ctxt, c: ast::crate_, fld: fold::ast_fold) ->
|
|||
}
|
||||
|
||||
|
||||
fn fold_item(cx: test_ctxt, &&i: @ast::item, fld: fold::ast_fold) ->
|
||||
Option<@ast::item> {
|
||||
|
||||
fn fold_item(cx: @mut TestCtxt, &&i: @ast::item, fld: fold::ast_fold)
|
||||
-> Option<@ast::item> {
|
||||
cx.path.push(i.ident);
|
||||
debug!("current path: %s",
|
||||
ast_util::path_name_i(cx.path, cx.sess.parse_sess.interner));
|
||||
|
@ -125,7 +135,8 @@ fn fold_item(cx: test_ctxt, &&i: @ast::item, fld: fold::ast_fold) ->
|
|||
if is_test_fn(i) {
|
||||
match i.node {
|
||||
ast::item_fn(_, purity, _, _) if purity == ast::unsafe_fn => {
|
||||
cx.sess.span_fatal(
|
||||
let sess = cx.sess;
|
||||
sess.span_fatal(
|
||||
i.span,
|
||||
~"unsafe functions cannot be used for tests");
|
||||
}
|
||||
|
@ -165,7 +176,7 @@ fn is_test_fn(i: @ast::item) -> bool {
|
|||
return has_test_attr && has_test_signature(i);
|
||||
}
|
||||
|
||||
fn is_ignored(cx: test_ctxt, i: @ast::item) -> bool {
|
||||
fn is_ignored(cx: @mut TestCtxt, i: @ast::item) -> bool {
|
||||
let ignoreattrs = attr::find_attrs_by_name(i.attrs, "ignore");
|
||||
let ignoreitems = attr::attr_metas(ignoreattrs);
|
||||
return if !ignoreitems.is_empty() {
|
||||
|
@ -183,7 +194,7 @@ fn should_fail(i: @ast::item) -> bool {
|
|||
vec::len(attr::find_attrs_by_name(i.attrs, ~"should_fail")) > 0u
|
||||
}
|
||||
|
||||
fn add_test_module(cx: test_ctxt, +m: ast::_mod) -> ast::_mod {
|
||||
fn add_test_module(cx: @mut TestCtxt, +m: ast::_mod) -> ast::_mod {
|
||||
let testmod = mk_test_module(cx);
|
||||
ast::_mod {
|
||||
items: vec::append_one(/*bad*/copy m.items, testmod),
|
||||
|
@ -207,7 +218,7 @@ mod __test {
|
|||
|
||||
*/
|
||||
|
||||
fn mk_test_module(cx: test_ctxt) -> @ast::item {
|
||||
fn mk_test_module(cx: @mut TestCtxt) -> @ast::item {
|
||||
// Link to std
|
||||
let std = mk_std(cx);
|
||||
let view_items = if is_std(cx) { ~[] } else { ~[std] };
|
||||
|
@ -225,17 +236,18 @@ fn mk_test_module(cx: test_ctxt) -> @ast::item {
|
|||
// This attribute tells resolve to let us call unexported functions
|
||||
let resolve_unexported_attr =
|
||||
attr::mk_attr(attr::mk_word_item(~"!resolve_unexported"));
|
||||
let sess = cx.sess;
|
||||
let item = ast::item {
|
||||
ident: cx.sess.ident_of(~"__test"),
|
||||
ident: sess.ident_of(~"__test"),
|
||||
attrs: ~[resolve_unexported_attr],
|
||||
id: cx.sess.next_node_id(),
|
||||
id: sess.next_node_id(),
|
||||
node: item_,
|
||||
vis: ast::public,
|
||||
span: dummy_sp(),
|
||||
};
|
||||
|
||||
debug!("Synthetic test module:\n%s\n",
|
||||
pprust::item_to_str(@copy item, cx.sess.intr()));
|
||||
pprust::item_to_str(@copy item, sess.intr()));
|
||||
|
||||
return @item;
|
||||
}
|
||||
|
@ -260,14 +272,15 @@ fn path_node_global(+ids: ~[ast::ident]) -> @ast::path {
|
|||
types: ~[] }
|
||||
}
|
||||
|
||||
fn mk_std(cx: test_ctxt) -> @ast::view_item {
|
||||
fn mk_std(cx: @mut TestCtxt) -> @ast::view_item {
|
||||
let vers = ast::lit_str(@~"0.6");
|
||||
let vers = nospan(vers);
|
||||
let mi = ast::meta_name_value(~"vers", vers);
|
||||
let mi = nospan(mi);
|
||||
let vi = ast::view_item_use(cx.sess.ident_of(~"std"),
|
||||
let sess = cx.sess;
|
||||
let vi = ast::view_item_use(sess.ident_of(~"std"),
|
||||
~[@mi],
|
||||
cx.sess.next_node_id());
|
||||
sess.next_node_id());
|
||||
let vi = ast::view_item {
|
||||
node: vi,
|
||||
attrs: ~[],
|
||||
|
@ -278,7 +291,7 @@ fn mk_std(cx: test_ctxt) -> @ast::view_item {
|
|||
return @vi;
|
||||
}
|
||||
|
||||
fn mk_tests(cx: test_ctxt) -> @ast::item {
|
||||
fn mk_tests(cx: @mut TestCtxt) -> @ast::item {
|
||||
let ret_ty = mk_test_desc_and_fn_vec_ty(cx);
|
||||
|
||||
let decl = ast::fn_decl {
|
||||
|
@ -290,15 +303,17 @@ fn mk_tests(cx: test_ctxt) -> @ast::item {
|
|||
// The vector of test_descs for this crate
|
||||
let test_descs = mk_test_desc_and_fn_vec(cx);
|
||||
|
||||
let body_: ast::blk_ =
|
||||
default_block(~[], option::Some(test_descs), cx.sess.next_node_id());
|
||||
let sess = cx.sess;
|
||||
let body_: ast::blk_ = default_block(~[],
|
||||
option::Some(test_descs),
|
||||
sess.next_node_id());
|
||||
let body = nospan(body_);
|
||||
|
||||
let item_ = ast::item_fn(decl, ast::impure_fn, ~[], body);
|
||||
let item = ast::item {
|
||||
ident: cx.sess.ident_of(~"tests"),
|
||||
ident: sess.ident_of(~"tests"),
|
||||
attrs: ~[],
|
||||
id: cx.sess.next_node_id(),
|
||||
id: sess.next_node_id(),
|
||||
node: item_,
|
||||
vis: ast::public,
|
||||
span: dummy_sp(),
|
||||
|
@ -306,7 +321,7 @@ fn mk_tests(cx: test_ctxt) -> @ast::item {
|
|||
return @item;
|
||||
}
|
||||
|
||||
fn is_std(cx: test_ctxt) -> bool {
|
||||
fn is_std(cx: @mut TestCtxt) -> bool {
|
||||
let is_std = {
|
||||
let items = attr::find_linkage_metas(cx.crate.node.attrs);
|
||||
match attr::last_meta_item_value_str_by_name(items, ~"name") {
|
||||
|
@ -317,28 +332,28 @@ fn is_std(cx: test_ctxt) -> bool {
|
|||
return is_std;
|
||||
}
|
||||
|
||||
fn mk_path(cx: test_ctxt, +path: ~[ast::ident]) -> @ast::path {
|
||||
fn mk_path(cx: @mut TestCtxt, +path: ~[ast::ident]) -> @ast::path {
|
||||
// For tests that are inside of std we don't want to prefix
|
||||
// the paths with std::
|
||||
if is_std(cx) { path_node_global(path) }
|
||||
else {
|
||||
path_node(
|
||||
~[cx.sess.ident_of(~"self"),
|
||||
cx.sess.ident_of(~"std")]
|
||||
+ path)
|
||||
let sess = cx.sess;
|
||||
if is_std(cx) {
|
||||
path_node_global(path)
|
||||
} else {
|
||||
path_node(~[ sess.ident_of(~"self"), sess.ident_of(~"std") ] + path)
|
||||
}
|
||||
}
|
||||
|
||||
// The ast::Ty of ~[std::test::TestDescAndFn]
|
||||
fn mk_test_desc_and_fn_vec_ty(cx: test_ctxt) -> @ast::Ty {
|
||||
let test_desc_and_fn_ty_path =
|
||||
mk_path(cx, ~[cx.sess.ident_of(~"test"),
|
||||
cx.sess.ident_of(~"TestDescAndFn")]);
|
||||
// The ast::Ty of ~[std::test::test_desc]
|
||||
fn mk_test_desc_and_fn_vec_ty(cx: @mut TestCtxt) -> @ast::Ty {
|
||||
let sess = cx.sess;
|
||||
let test_desc_and_fn_ty_path = mk_path(cx, ~[
|
||||
sess.ident_of(~"test"),
|
||||
sess.ident_of(~"TestDescAndFn")
|
||||
]);
|
||||
|
||||
let test_desc_and_fn_ty = ast::Ty {
|
||||
id: cx.sess.next_node_id(),
|
||||
node: ast::ty_path(test_desc_and_fn_ty_path,
|
||||
cx.sess.next_node_id()),
|
||||
id: sess.next_node_id(),
|
||||
node: ast::ty_path(test_desc_and_fn_ty_path, sess.next_node_id()),
|
||||
span: dummy_sp(),
|
||||
};
|
||||
|
||||
|
@ -346,104 +361,109 @@ fn mk_test_desc_and_fn_vec_ty(cx: test_ctxt) -> @ast::Ty {
|
|||
mutbl: ast::m_imm};
|
||||
|
||||
let inner_ty = @ast::Ty {
|
||||
id: cx.sess.next_node_id(),
|
||||
id: sess.next_node_id(),
|
||||
node: ast::ty_vec(vec_mt),
|
||||
span: dummy_sp(),
|
||||
};
|
||||
|
||||
@ast::Ty {
|
||||
id: cx.sess.next_node_id(),
|
||||
id: sess.next_node_id(),
|
||||
node: ast::ty_uniq(ast::mt { ty: inner_ty, mutbl: ast::m_imm }),
|
||||
span: dummy_sp(),
|
||||
}
|
||||
}
|
||||
|
||||
fn mk_test_desc_and_fn_vec(cx: test_ctxt) -> @ast::expr {
|
||||
fn mk_test_desc_and_fn_vec(cx: @mut TestCtxt) -> @ast::expr {
|
||||
debug!("building test vector from %u tests", cx.testfns.len());
|
||||
let mut descs = ~[];
|
||||
for cx.testfns.each |test| {
|
||||
descs.push(mk_test_desc_and_fn_rec(cx, *test));
|
||||
{
|
||||
let testfns = &mut cx.testfns;
|
||||
for testfns.each |test| {
|
||||
descs.push(mk_test_desc_and_fn_rec(cx, *test));
|
||||
}
|
||||
}
|
||||
|
||||
let sess = cx.sess;
|
||||
let inner_expr = @ast::expr {
|
||||
id: cx.sess.next_node_id(),
|
||||
callee_id: cx.sess.next_node_id(),
|
||||
id: sess.next_node_id(),
|
||||
callee_id: sess.next_node_id(),
|
||||
node: ast::expr_vec(descs, ast::m_imm),
|
||||
span: dummy_sp(),
|
||||
};
|
||||
|
||||
@ast::expr {
|
||||
id: cx.sess.next_node_id(),
|
||||
callee_id: cx.sess.next_node_id(),
|
||||
id: sess.next_node_id(),
|
||||
callee_id: sess.next_node_id(),
|
||||
node: ast::expr_vstore(inner_expr, ast::expr_vstore_uniq),
|
||||
span: dummy_sp(),
|
||||
}
|
||||
}
|
||||
|
||||
fn mk_test_desc_and_fn_rec(cx: test_ctxt, test: test) -> @ast::expr {
|
||||
fn mk_test_desc_and_fn_rec(cx: @mut TestCtxt, test: test) -> @ast::expr {
|
||||
let span = test.span;
|
||||
let path = /*bad*/copy test.path;
|
||||
|
||||
debug!("encoding %s", ast_util::path_name_i(path,
|
||||
cx.sess.parse_sess.interner));
|
||||
let sess = cx.sess;
|
||||
debug!("encoding %s",
|
||||
ast_util::path_name_i(path, sess.parse_sess.interner));
|
||||
|
||||
let name_lit: ast::lit =
|
||||
nospan(ast::lit_str(@ast_util::path_name_i(
|
||||
path, cx.sess.parse_sess.interner)));
|
||||
path,
|
||||
sess.parse_sess.interner)));
|
||||
|
||||
let name_expr_inner = @ast::expr {
|
||||
id: cx.sess.next_node_id(),
|
||||
callee_id: cx.sess.next_node_id(),
|
||||
id: sess.next_node_id(),
|
||||
callee_id: sess.next_node_id(),
|
||||
node: ast::expr_lit(@name_lit),
|
||||
span: span,
|
||||
};
|
||||
|
||||
let name_expr = ast::expr {
|
||||
id: cx.sess.next_node_id(),
|
||||
callee_id: cx.sess.next_node_id(),
|
||||
id: sess.next_node_id(),
|
||||
callee_id: sess.next_node_id(),
|
||||
node: ast::expr_vstore(name_expr_inner, ast::expr_vstore_uniq),
|
||||
span: dummy_sp(),
|
||||
};
|
||||
|
||||
let name_field = nospan(ast::field_ {
|
||||
mutbl: ast::m_imm,
|
||||
ident: cx.sess.ident_of(~"name"),
|
||||
ident: sess.ident_of(~"name"),
|
||||
expr: @name_expr,
|
||||
});
|
||||
|
||||
let ignore_lit: ast::lit = nospan(ast::lit_bool(test.ignore));
|
||||
|
||||
let ignore_expr = ast::expr {
|
||||
id: cx.sess.next_node_id(),
|
||||
callee_id: cx.sess.next_node_id(),
|
||||
id: sess.next_node_id(),
|
||||
callee_id: sess.next_node_id(),
|
||||
node: ast::expr_lit(@ignore_lit),
|
||||
span: span,
|
||||
};
|
||||
|
||||
let ignore_field = nospan(ast::field_ {
|
||||
mutbl: ast::m_imm,
|
||||
ident: cx.sess.ident_of(~"ignore"),
|
||||
ident: sess.ident_of(~"ignore"),
|
||||
expr: @ignore_expr,
|
||||
});
|
||||
|
||||
let fail_lit: ast::lit = nospan(ast::lit_bool(test.should_fail));
|
||||
|
||||
let fail_expr = ast::expr {
|
||||
id: cx.sess.next_node_id(),
|
||||
callee_id: cx.sess.next_node_id(),
|
||||
id: sess.next_node_id(),
|
||||
callee_id: sess.next_node_id(),
|
||||
node: ast::expr_lit(@fail_lit),
|
||||
span: span,
|
||||
};
|
||||
|
||||
let fail_field = nospan(ast::field_ {
|
||||
mutbl: ast::m_imm,
|
||||
ident: cx.sess.ident_of(~"should_fail"),
|
||||
ident: sess.ident_of(~"should_fail"),
|
||||
expr: @fail_expr,
|
||||
});
|
||||
|
||||
let test_desc_path =
|
||||
mk_path(cx, ~[cx.sess.ident_of(~"test"),
|
||||
cx.sess.ident_of(~"TestDesc")]);
|
||||
mk_path(cx, ~[ sess.ident_of(~"test"), sess.ident_of(~"TestDesc") ]);
|
||||
|
||||
let desc_rec_ = ast::expr_struct(
|
||||
test_desc_path,
|
||||
|
@ -452,40 +472,40 @@ fn mk_test_desc_and_fn_rec(cx: test_ctxt, test: test) -> @ast::expr {
|
|||
);
|
||||
|
||||
let desc_rec = @ast::expr {
|
||||
id: cx.sess.next_node_id(),
|
||||
callee_id: cx.sess.next_node_id(),
|
||||
id: sess.next_node_id(),
|
||||
callee_id: sess.next_node_id(),
|
||||
node: desc_rec_,
|
||||
span: span,
|
||||
};
|
||||
|
||||
let desc_field = nospan(ast::field_ {
|
||||
mutbl: ast::m_imm,
|
||||
ident: cx.sess.ident_of(~"desc"),
|
||||
ident: sess.ident_of(~"desc"),
|
||||
expr: desc_rec
|
||||
});
|
||||
|
||||
let fn_path = path_node_global(path);
|
||||
|
||||
let fn_expr = @ast::expr {
|
||||
id: cx.sess.next_node_id(),
|
||||
callee_id: cx.sess.next_node_id(),
|
||||
id: sess.next_node_id(),
|
||||
callee_id: sess.next_node_id(),
|
||||
node: ast::expr_path(fn_path),
|
||||
span: span,
|
||||
};
|
||||
|
||||
let fn_field = nospan(ast::field_ {
|
||||
mutbl: ast::m_imm,
|
||||
ident: cx.sess.ident_of(~"testfn"),
|
||||
ident: sess.ident_of(~"testfn"),
|
||||
expr: fn_expr,
|
||||
});
|
||||
|
||||
let test_desc_and_fn_path =
|
||||
mk_path(cx, ~[cx.sess.ident_of(~"test"),
|
||||
cx.sess.ident_of(~"TestDescAndFn")]);
|
||||
mk_path(cx, ~[sess.ident_of(~"test"),
|
||||
sess.ident_of(~"TestDescAndFn")]);
|
||||
|
||||
let desc_and_fn_rec = @ast::expr {
|
||||
id: cx.sess.next_node_id(),
|
||||
callee_id: cx.sess.next_node_id(),
|
||||
id: sess.next_node_id(),
|
||||
callee_id: sess.next_node_id(),
|
||||
node: ast::expr_struct(test_desc_and_fn_path,
|
||||
~[fn_field, desc_field],
|
||||
option::None),
|
||||
|
@ -495,9 +515,10 @@ fn mk_test_desc_and_fn_rec(cx: test_ctxt, test: test) -> @ast::expr {
|
|||
return desc_and_fn_rec;
|
||||
}
|
||||
|
||||
fn mk_main(cx: test_ctxt) -> @ast::item {
|
||||
fn mk_main(cx: @mut TestCtxt) -> @ast::item {
|
||||
let sess = cx.sess;
|
||||
let ret_ty = ast::Ty {
|
||||
id: cx.sess.next_node_id(),
|
||||
id: sess.next_node_id(),
|
||||
node: ast::ty_nil,
|
||||
span: dummy_sp(),
|
||||
};
|
||||
|
@ -511,15 +532,16 @@ fn mk_main(cx: test_ctxt) -> @ast::item {
|
|||
let test_main_call_expr = mk_test_main_call(cx);
|
||||
|
||||
let body_: ast::blk_ =
|
||||
default_block(~[], option::Some(test_main_call_expr),
|
||||
cx.sess.next_node_id());
|
||||
default_block(~[],
|
||||
option::Some(test_main_call_expr),
|
||||
sess.next_node_id());
|
||||
let body = codemap::spanned { node: body_, span: dummy_sp() };
|
||||
|
||||
let item_ = ast::item_fn(decl, ast::impure_fn, ~[], body);
|
||||
let item = ast::item {
|
||||
ident: cx.sess.ident_of(~"main"),
|
||||
ident: sess.ident_of(~"main"),
|
||||
attrs: ~[attr::mk_attr(attr::mk_word_item(~"main"))],
|
||||
id: cx.sess.next_node_id(),
|
||||
id: sess.next_node_id(),
|
||||
node: item_,
|
||||
vis: ast::public,
|
||||
span: dummy_sp(),
|
||||
|
@ -527,64 +549,64 @@ fn mk_main(cx: test_ctxt) -> @ast::item {
|
|||
return @item;
|
||||
}
|
||||
|
||||
fn mk_test_main_call(cx: test_ctxt) -> @ast::expr {
|
||||
fn mk_test_main_call(cx: @mut TestCtxt) -> @ast::expr {
|
||||
// Call os::args to generate the vector of test_descs
|
||||
let sess = cx.sess;
|
||||
let args_path = path_node_global(~[
|
||||
cx.sess.ident_of(~"os"),
|
||||
cx.sess.ident_of(~"args")
|
||||
sess.ident_of(~"os"),
|
||||
sess.ident_of(~"args")
|
||||
]);
|
||||
|
||||
let args_path_expr = ast::expr {
|
||||
id: cx.sess.next_node_id(),
|
||||
callee_id: cx.sess.next_node_id(),
|
||||
id: sess.next_node_id(),
|
||||
callee_id: sess.next_node_id(),
|
||||
node: ast::expr_path(args_path),
|
||||
span: dummy_sp(),
|
||||
};
|
||||
|
||||
let args_call_expr = ast::expr {
|
||||
id: cx.sess.next_node_id(),
|
||||
callee_id: cx.sess.next_node_id(),
|
||||
id: sess.next_node_id(),
|
||||
callee_id: sess.next_node_id(),
|
||||
node: ast::expr_call(@args_path_expr, ~[], ast::NoSugar),
|
||||
span: dummy_sp(),
|
||||
};
|
||||
|
||||
// Call __test::test to generate the vector of test_descs
|
||||
let test_path = path_node(~[cx.sess.ident_of(~"tests")]);
|
||||
let test_path = path_node(~[ sess.ident_of(~"tests") ]);
|
||||
|
||||
let test_path_expr = ast::expr {
|
||||
id: cx.sess.next_node_id(),
|
||||
callee_id: cx.sess.next_node_id(),
|
||||
id: sess.next_node_id(),
|
||||
callee_id: sess.next_node_id(),
|
||||
node: ast::expr_path(test_path),
|
||||
span: dummy_sp(),
|
||||
};
|
||||
|
||||
let test_call_expr = ast::expr {
|
||||
id: cx.sess.next_node_id(),
|
||||
callee_id: cx.sess.next_node_id(),
|
||||
id: sess.next_node_id(),
|
||||
callee_id: sess.next_node_id(),
|
||||
node: ast::expr_call(@test_path_expr, ~[], ast::NoSugar),
|
||||
span: dummy_sp(),
|
||||
};
|
||||
|
||||
// Call std::test::test_main
|
||||
let test_main_path =
|
||||
mk_path(cx, ~[cx.sess.ident_of(~"test"),
|
||||
cx.sess.ident_of(~"test_main")]);
|
||||
let test_main_path = mk_path(cx, ~[
|
||||
sess.ident_of(~"test"),
|
||||
sess.ident_of(~"test_main")
|
||||
]);
|
||||
|
||||
let test_main_path_expr = ast::expr {
|
||||
id: cx.sess.next_node_id(),
|
||||
callee_id: cx.sess.next_node_id(),
|
||||
id: sess.next_node_id(),
|
||||
callee_id: sess.next_node_id(),
|
||||
node: ast::expr_path(test_main_path),
|
||||
span: dummy_sp(),
|
||||
};
|
||||
|
||||
let test_main_call_expr = ast::expr {
|
||||
id: cx.sess.next_node_id(),
|
||||
callee_id: cx.sess.next_node_id(),
|
||||
node: ast::expr_call(
|
||||
@test_main_path_expr,
|
||||
~[@args_call_expr, @test_call_expr],
|
||||
ast::NoSugar
|
||||
),
|
||||
id: sess.next_node_id(),
|
||||
callee_id: sess.next_node_id(),
|
||||
node: ast::expr_call(@test_main_path_expr,
|
||||
~[@args_call_expr, @test_call_expr],
|
||||
ast::NoSugar),
|
||||
span: dummy_sp(),
|
||||
};
|
||||
|
||||
|
|
|
@ -19,7 +19,6 @@ use metadata::decoder;
|
|||
use metadata::filesearch::FileSearch;
|
||||
use metadata::loader;
|
||||
|
||||
use core::dvec::DVec;
|
||||
use core::either;
|
||||
use core::option;
|
||||
use core::vec;
|
||||
|
@ -36,19 +35,21 @@ use std::oldmap::HashMap;
|
|||
// libraries necessary for later resolving, typechecking, linking, etc.
|
||||
pub fn read_crates(diag: span_handler,
|
||||
crate: ast::crate,
|
||||
cstore: cstore::CStore,
|
||||
cstore: @mut cstore::CStore,
|
||||
filesearch: FileSearch,
|
||||
os: loader::os,
|
||||
static: bool,
|
||||
statik: bool,
|
||||
intr: @ident_interner) {
|
||||
let e = @{diag: diag,
|
||||
filesearch: filesearch,
|
||||
cstore: cstore,
|
||||
os: os,
|
||||
static: static,
|
||||
crate_cache: DVec(),
|
||||
mut next_crate_num: 1,
|
||||
intr: intr};
|
||||
let e = @mut Env {
|
||||
diag: diag,
|
||||
filesearch: filesearch,
|
||||
cstore: cstore,
|
||||
os: os,
|
||||
statik: statik,
|
||||
crate_cache: @mut ~[],
|
||||
next_crate_num: 1,
|
||||
intr: intr
|
||||
};
|
||||
let v =
|
||||
visit::mk_simple_visitor(@visit::SimpleVisitor {
|
||||
visit_view_item: |a| visit_view_item(e, a),
|
||||
|
@ -56,7 +57,7 @@ pub fn read_crates(diag: span_handler,
|
|||
.. *visit::default_simple_visitor()});
|
||||
visit::visit_crate(crate, (), v);
|
||||
dump_crates(e.crate_cache);
|
||||
warn_if_multiple_versions(e, diag, e.crate_cache.get());
|
||||
warn_if_multiple_versions(e, diag, e.crate_cache);
|
||||
}
|
||||
|
||||
type cache_entry = {
|
||||
|
@ -66,7 +67,7 @@ type cache_entry = {
|
|||
metas: @~[@ast::meta_item]
|
||||
};
|
||||
|
||||
fn dump_crates(crate_cache: DVec<cache_entry>) {
|
||||
fn dump_crates(+crate_cache: @mut ~[cache_entry]) {
|
||||
debug!("resolved crates:");
|
||||
for crate_cache.each |entry| {
|
||||
debug!("cnum: %?", entry.cnum);
|
||||
|
@ -75,8 +76,9 @@ fn dump_crates(crate_cache: DVec<cache_entry>) {
|
|||
}
|
||||
}
|
||||
|
||||
fn warn_if_multiple_versions(e: env, diag: span_handler,
|
||||
crate_cache: ~[cache_entry]) {
|
||||
fn warn_if_multiple_versions(e: @mut Env,
|
||||
diag: span_handler,
|
||||
crate_cache: @mut ~[cache_entry]) {
|
||||
use either::*;
|
||||
|
||||
if crate_cache.len() != 0u {
|
||||
|
@ -108,20 +110,22 @@ fn warn_if_multiple_versions(e: env, diag: span_handler,
|
|||
}
|
||||
}
|
||||
|
||||
warn_if_multiple_versions(e, diag, non_matches);
|
||||
warn_if_multiple_versions(e, diag, @mut non_matches);
|
||||
}
|
||||
}
|
||||
|
||||
type env = @{diag: span_handler,
|
||||
filesearch: FileSearch,
|
||||
cstore: cstore::CStore,
|
||||
os: loader::os,
|
||||
static: bool,
|
||||
crate_cache: DVec<cache_entry>,
|
||||
mut next_crate_num: ast::crate_num,
|
||||
intr: @ident_interner};
|
||||
struct Env {
|
||||
diag: span_handler,
|
||||
filesearch: FileSearch,
|
||||
cstore: @mut cstore::CStore,
|
||||
os: loader::os,
|
||||
statik: bool,
|
||||
crate_cache: @mut ~[cache_entry],
|
||||
next_crate_num: ast::crate_num,
|
||||
intr: @ident_interner
|
||||
}
|
||||
|
||||
fn visit_view_item(e: env, i: @ast::view_item) {
|
||||
fn visit_view_item(e: @mut Env, i: @ast::view_item) {
|
||||
match /*bad*/copy i.node {
|
||||
ast::view_item_use(ident, meta_items, id) => {
|
||||
debug!("resolving use stmt. ident: %?, meta: %?", ident, meta_items);
|
||||
|
@ -132,7 +136,7 @@ fn visit_view_item(e: env, i: @ast::view_item) {
|
|||
}
|
||||
}
|
||||
|
||||
fn visit_item(e: env, i: @ast::item) {
|
||||
fn visit_item(e: @mut Env, i: @ast::item) {
|
||||
match /*bad*/copy i.node {
|
||||
ast::item_foreign_mod(fm) => {
|
||||
match attr::foreign_abi(i.attrs) {
|
||||
|
@ -202,9 +206,8 @@ fn metas_with_ident(+ident: ~str, +metas: ~[@ast::meta_item])
|
|||
metas_with(ident, ~"name", metas)
|
||||
}
|
||||
|
||||
fn existing_match(e: env, metas: ~[@ast::meta_item], hash: ~str) ->
|
||||
Option<int> {
|
||||
|
||||
fn existing_match(e: @mut Env, metas: ~[@ast::meta_item], hash: ~str)
|
||||
-> Option<int> {
|
||||
for e.crate_cache.each |c| {
|
||||
if loader::metadata_matches(*c.metas, metas)
|
||||
&& (hash.is_empty() || c.hash == hash) {
|
||||
|
@ -214,8 +217,12 @@ fn existing_match(e: env, metas: ~[@ast::meta_item], hash: ~str) ->
|
|||
return None;
|
||||
}
|
||||
|
||||
fn resolve_crate(e: env, ident: ast::ident, +metas: ~[@ast::meta_item],
|
||||
+hash: ~str, span: span) -> ast::crate_num {
|
||||
fn resolve_crate(e: @mut Env,
|
||||
ident: ast::ident,
|
||||
+metas: ~[@ast::meta_item],
|
||||
+hash: ~str,
|
||||
span: span)
|
||||
-> ast::crate_num {
|
||||
let metas = metas_with_ident(/*bad*/copy *e.intr.get(ident), metas);
|
||||
|
||||
match existing_match(e, metas, hash) {
|
||||
|
@ -228,7 +235,7 @@ fn resolve_crate(e: env, ident: ast::ident, +metas: ~[@ast::meta_item],
|
|||
metas: metas,
|
||||
hash: hash,
|
||||
os: e.os,
|
||||
static: e.static,
|
||||
static: e.statik,
|
||||
intr: e.intr
|
||||
};
|
||||
let cinfo = loader::load_library_crate(load_ctxt);
|
||||
|
@ -270,7 +277,7 @@ fn resolve_crate(e: env, ident: ast::ident, +metas: ~[@ast::meta_item],
|
|||
}
|
||||
|
||||
// Go through the crate metadata and load any crates that it references
|
||||
fn resolve_crate_deps(e: env, cdata: @~[u8]) -> cstore::cnum_map {
|
||||
fn resolve_crate_deps(e: @mut Env, cdata: @~[u8]) -> cstore::cnum_map {
|
||||
debug!("resolving deps of external crate");
|
||||
// The map from crate numbers in the crate we're resolving to local crate
|
||||
// numbers
|
||||
|
|
|
@ -42,19 +42,19 @@ pub struct StaticMethodInfo {
|
|||
purity: ast::purity
|
||||
}
|
||||
|
||||
pub fn get_symbol(cstore: cstore::CStore, def: ast::def_id) -> ~str {
|
||||
pub fn get_symbol(cstore: @mut cstore::CStore, def: ast::def_id) -> ~str {
|
||||
let cdata = cstore::get_crate_data(cstore, def.crate).data;
|
||||
return decoder::get_symbol(cdata, def.node);
|
||||
}
|
||||
|
||||
pub fn get_type_param_count(cstore: cstore::CStore, def: ast::def_id)
|
||||
pub fn get_type_param_count(cstore: @mut cstore::CStore, def: ast::def_id)
|
||||
-> uint {
|
||||
let cdata = cstore::get_crate_data(cstore, def.crate).data;
|
||||
return decoder::get_type_param_count(cdata, def.node);
|
||||
}
|
||||
|
||||
/// Iterates over all the language items in the given crate.
|
||||
pub fn each_lang_item(cstore: cstore::CStore,
|
||||
pub fn each_lang_item(cstore: @mut cstore::CStore,
|
||||
cnum: ast::crate_num,
|
||||
f: &fn(ast::node_id, uint) -> bool) {
|
||||
let crate_data = cstore::get_crate_data(cstore, cnum);
|
||||
|
@ -62,8 +62,9 @@ pub fn each_lang_item(cstore: cstore::CStore,
|
|||
}
|
||||
|
||||
/// Iterates over all the paths in the given crate.
|
||||
pub fn each_path(cstore: cstore::CStore, cnum: ast::crate_num,
|
||||
f: fn(&str, decoder::def_like) -> bool) {
|
||||
pub fn each_path(cstore: @mut cstore::CStore,
|
||||
cnum: ast::crate_num,
|
||||
f: &fn(&str, decoder::def_like) -> bool) {
|
||||
let crate_data = cstore::get_crate_data(cstore, cnum);
|
||||
let get_crate_data: decoder::GetCrateDataCb = |cnum| {
|
||||
cstore::get_crate_data(cstore, cnum)
|
||||
|
@ -107,7 +108,7 @@ pub fn get_enum_variants(tcx: ty::ctxt, def: ast::def_id)
|
|||
return decoder::get_enum_variants(cstore.intr, cdata, def.node, tcx)
|
||||
}
|
||||
|
||||
pub fn get_impls_for_mod(cstore: cstore::CStore, def: ast::def_id,
|
||||
pub fn get_impls_for_mod(cstore: @mut cstore::CStore, def: ast::def_id,
|
||||
name: Option<ast::ident>)
|
||||
-> @~[@decoder::_impl] {
|
||||
let cdata = cstore::get_crate_data(cstore, def.crate);
|
||||
|
@ -138,27 +139,27 @@ pub fn get_supertraits(tcx: ty::ctxt, def: ast::def_id) -> ~[ty::t] {
|
|||
decoder::get_supertraits(cdata, def.node, tcx)
|
||||
}
|
||||
|
||||
pub fn get_method_names_if_trait(cstore: cstore::CStore, def: ast::def_id)
|
||||
-> Option<@DVec<(ast::ident, ast::self_ty_)>> {
|
||||
|
||||
pub fn get_method_names_if_trait(cstore: @mut cstore::CStore,
|
||||
def: ast::def_id)
|
||||
-> Option<@DVec<(ast::ident, ast::self_ty_)>> {
|
||||
let cdata = cstore::get_crate_data(cstore, def.crate);
|
||||
return decoder::get_method_names_if_trait(cstore.intr, cdata, def.node);
|
||||
}
|
||||
|
||||
pub fn get_type_name_if_impl(cstore: cstore::CStore, def: ast::def_id) ->
|
||||
Option<ast::ident> {
|
||||
pub fn get_type_name_if_impl(cstore: @mut cstore::CStore, def: ast::def_id)
|
||||
-> Option<ast::ident> {
|
||||
let cdata = cstore::get_crate_data(cstore, def.crate);
|
||||
decoder::get_type_name_if_impl(cstore.intr, cdata, def.node)
|
||||
}
|
||||
|
||||
pub fn get_static_methods_if_impl(cstore: cstore::CStore,
|
||||
pub fn get_static_methods_if_impl(cstore: @mut cstore::CStore,
|
||||
def: ast::def_id)
|
||||
-> Option<~[StaticMethodInfo]> {
|
||||
let cdata = cstore::get_crate_data(cstore, def.crate);
|
||||
decoder::get_static_methods_if_impl(cstore.intr, cdata, def.node)
|
||||
}
|
||||
|
||||
pub fn get_item_attrs(cstore: cstore::CStore,
|
||||
pub fn get_item_attrs(cstore: @mut cstore::CStore,
|
||||
def_id: ast::def_id,
|
||||
f: fn(~[@ast::meta_item])) {
|
||||
let cdata = cstore::get_crate_data(cstore, def_id.crate);
|
||||
|
@ -179,7 +180,7 @@ pub fn get_type(tcx: ty::ctxt,
|
|||
decoder::get_type(cdata, def.node, tcx)
|
||||
}
|
||||
|
||||
pub fn get_region_param(cstore: metadata::cstore::CStore,
|
||||
pub fn get_region_param(cstore: @mut metadata::cstore::CStore,
|
||||
def: ast::def_id) -> Option<ty::region_variance> {
|
||||
let cdata = cstore::get_crate_data(cstore, def.crate);
|
||||
return decoder::get_region_param(cdata, def.node);
|
||||
|
@ -216,7 +217,7 @@ pub fn get_impl_traits(tcx: ty::ctxt, def: ast::def_id) -> ~[ty::t] {
|
|||
decoder::get_impl_traits(cdata, def.node, tcx)
|
||||
}
|
||||
|
||||
pub fn get_impl_method(cstore: cstore::CStore,
|
||||
pub fn get_impl_method(cstore: @mut cstore::CStore,
|
||||
def: ast::def_id,
|
||||
mname: ast::ident)
|
||||
-> ast::def_id {
|
||||
|
@ -225,7 +226,7 @@ pub fn get_impl_method(cstore: cstore::CStore,
|
|||
}
|
||||
|
||||
/* If def names a class with a dtor, return it. Otherwise, return none. */
|
||||
pub fn struct_dtor(cstore: cstore::CStore, def: ast::def_id)
|
||||
pub fn struct_dtor(cstore: @mut cstore::CStore, def: ast::def_id)
|
||||
-> Option<ast::def_id> {
|
||||
let cdata = cstore::get_crate_data(cstore, def.crate);
|
||||
decoder::struct_dtor(cdata, def.node)
|
||||
|
|
|
@ -38,129 +38,137 @@ pub type crate_metadata = @{name: ~str,
|
|||
cnum_map: cnum_map,
|
||||
cnum: ast::crate_num};
|
||||
|
||||
// This is a bit of an experiment at encapsulating the data in cstore. By
|
||||
// keeping all the data in a non-exported enum variant, it's impossible for
|
||||
// other modules to access the cstore's private data. This could also be
|
||||
// achieved with an obj, but at the expense of a vtable. Not sure if this is a
|
||||
// good pattern or not.
|
||||
pub enum CStore { private(cstore_private), }
|
||||
|
||||
type cstore_private =
|
||||
@{metas: oldmap::HashMap<ast::crate_num, crate_metadata>,
|
||||
use_crate_map: use_crate_map,
|
||||
mut used_crate_files: ~[Path],
|
||||
mut used_libraries: ~[~str],
|
||||
mut used_link_args: ~[~str],
|
||||
intr: @ident_interner};
|
||||
pub struct CStore {
|
||||
priv metas: oldmap::HashMap<ast::crate_num, crate_metadata>,
|
||||
priv use_crate_map: use_crate_map,
|
||||
priv used_crate_files: ~[Path],
|
||||
priv used_libraries: ~[~str],
|
||||
priv used_link_args: ~[~str],
|
||||
intr: @ident_interner
|
||||
}
|
||||
|
||||
// Map from node_id's of local use statements to crate numbers
|
||||
type use_crate_map = oldmap::HashMap<ast::node_id, ast::crate_num>;
|
||||
|
||||
// Internal method to retrieve the data from the cstore
|
||||
pure fn p(cstore: CStore) -> cstore_private {
|
||||
match cstore { private(p) => p }
|
||||
}
|
||||
|
||||
pub fn mk_cstore(intr: @ident_interner) -> CStore {
|
||||
let meta_cache = oldmap::HashMap();
|
||||
let crate_map = oldmap::HashMap();
|
||||
return private(@{metas: meta_cache,
|
||||
use_crate_map: crate_map,
|
||||
mut used_crate_files: ~[],
|
||||
mut used_libraries: ~[],
|
||||
mut used_link_args: ~[],
|
||||
intr: intr});
|
||||
return CStore {
|
||||
metas: meta_cache,
|
||||
use_crate_map: crate_map,
|
||||
used_crate_files: ~[],
|
||||
used_libraries: ~[],
|
||||
used_link_args: ~[],
|
||||
intr: intr
|
||||
};
|
||||
}
|
||||
|
||||
pub fn get_crate_data(cstore: CStore, cnum: ast::crate_num)
|
||||
pub fn get_crate_data(cstore: @mut CStore, cnum: ast::crate_num)
|
||||
-> crate_metadata {
|
||||
return p(cstore).metas.get(&cnum);
|
||||
return cstore.metas.get(&cnum);
|
||||
}
|
||||
|
||||
pub fn get_crate_hash(cstore: CStore, cnum: ast::crate_num) -> ~str {
|
||||
pub fn get_crate_hash(cstore: @mut CStore, cnum: ast::crate_num) -> ~str {
|
||||
let cdata = get_crate_data(cstore, cnum);
|
||||
return decoder::get_crate_hash(cdata.data);
|
||||
}
|
||||
|
||||
pub fn get_crate_vers(cstore: CStore, cnum: ast::crate_num) -> ~str {
|
||||
pub fn get_crate_vers(cstore: @mut CStore, cnum: ast::crate_num) -> ~str {
|
||||
let cdata = get_crate_data(cstore, cnum);
|
||||
return decoder::get_crate_vers(cdata.data);
|
||||
}
|
||||
|
||||
pub fn set_crate_data(cstore: CStore,
|
||||
pub fn set_crate_data(cstore: @mut CStore,
|
||||
cnum: ast::crate_num,
|
||||
data: crate_metadata) {
|
||||
p(cstore).metas.insert(cnum, data);
|
||||
let metas = cstore.metas;
|
||||
metas.insert(cnum, data);
|
||||
}
|
||||
|
||||
pub fn have_crate_data(cstore: CStore, cnum: ast::crate_num) -> bool {
|
||||
return p(cstore).metas.contains_key_ref(&cnum);
|
||||
pub fn have_crate_data(cstore: @mut CStore, cnum: ast::crate_num) -> bool {
|
||||
return cstore.metas.contains_key_ref(&cnum);
|
||||
}
|
||||
|
||||
pub fn iter_crate_data(cstore: CStore,
|
||||
pub fn iter_crate_data(cstore: @mut CStore,
|
||||
i: fn(ast::crate_num, crate_metadata)) {
|
||||
for p(cstore).metas.each_ref |&k, &v| { i(k, v);};
|
||||
}
|
||||
|
||||
pub fn add_used_crate_file(cstore: CStore, lib: &Path) {
|
||||
if !vec::contains(p(cstore).used_crate_files, lib) {
|
||||
p(cstore).used_crate_files.push(copy *lib);
|
||||
let metas = cstore.metas;
|
||||
for metas.each_ref |&k, &v| {
|
||||
i(k, v);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_used_crate_files(cstore: CStore) -> ~[Path] {
|
||||
return /*bad*/copy p(cstore).used_crate_files;
|
||||
pub fn add_used_crate_file(cstore: @mut CStore, lib: &Path) {
|
||||
if !vec::contains(cstore.used_crate_files, lib) {
|
||||
cstore.used_crate_files.push(copy *lib);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_used_library(cstore: CStore, +lib: ~str) -> bool {
|
||||
pub fn get_used_crate_files(cstore: @mut CStore) -> ~[Path] {
|
||||
return /*bad*/copy cstore.used_crate_files;
|
||||
}
|
||||
|
||||
pub fn add_used_library(cstore: @mut CStore, +lib: ~str) -> bool {
|
||||
assert lib != ~"";
|
||||
|
||||
if vec::contains(p(cstore).used_libraries, &lib) { return false; }
|
||||
p(cstore).used_libraries.push(lib);
|
||||
if vec::contains(cstore.used_libraries, &lib) { return false; }
|
||||
cstore.used_libraries.push(lib);
|
||||
return true;
|
||||
}
|
||||
|
||||
pub fn get_used_libraries(cstore: CStore) -> ~[~str] {
|
||||
return /*bad*/copy p(cstore).used_libraries;
|
||||
pub fn get_used_libraries(cstore: @mut CStore) -> ~[~str] {
|
||||
return /*bad*/copy cstore.used_libraries;
|
||||
}
|
||||
|
||||
pub fn add_used_link_args(cstore: CStore, args: ~str) {
|
||||
p(cstore).used_link_args.push_all(str::split_char(args, ' '));
|
||||
pub fn add_used_link_args(cstore: @mut CStore, args: ~str) {
|
||||
cstore.used_link_args.push_all(str::split_char(args, ' '));
|
||||
}
|
||||
|
||||
pub fn get_used_link_args(cstore: CStore) -> ~[~str] {
|
||||
return /*bad*/copy p(cstore).used_link_args;
|
||||
pub fn get_used_link_args(cstore: @mut CStore) -> ~[~str] {
|
||||
return /*bad*/copy cstore.used_link_args;
|
||||
}
|
||||
|
||||
pub fn add_use_stmt_cnum(cstore: CStore, use_id: ast::node_id,
|
||||
pub fn add_use_stmt_cnum(cstore: @mut CStore,
|
||||
use_id: ast::node_id,
|
||||
cnum: ast::crate_num) {
|
||||
p(cstore).use_crate_map.insert(use_id, cnum);
|
||||
let use_crate_map = cstore.use_crate_map;
|
||||
use_crate_map.insert(use_id, cnum);
|
||||
}
|
||||
|
||||
pub fn find_use_stmt_cnum(cstore: CStore,
|
||||
use_id: ast::node_id) -> Option<ast::crate_num> {
|
||||
p(cstore).use_crate_map.find(&use_id)
|
||||
pub fn find_use_stmt_cnum(cstore: @mut CStore,
|
||||
use_id: ast::node_id)
|
||||
-> Option<ast::crate_num> {
|
||||
let use_crate_map = cstore.use_crate_map;
|
||||
use_crate_map.find(&use_id)
|
||||
}
|
||||
|
||||
// returns hashes of crates directly used by this crate. Hashes are
|
||||
// sorted by crate name.
|
||||
pub fn get_dep_hashes(cstore: CStore) -> ~[~str] {
|
||||
pub fn get_dep_hashes(cstore: @mut CStore) -> ~[~str] {
|
||||
type crate_hash = {name: ~str, hash: ~str};
|
||||
let mut result = ~[];
|
||||
|
||||
for p(cstore).use_crate_map.each_value_ref |&cnum| {
|
||||
let use_crate_map = cstore.use_crate_map;
|
||||
for use_crate_map.each_value_ref |&cnum| {
|
||||
let cdata = cstore::get_crate_data(cstore, cnum);
|
||||
let hash = decoder::get_crate_hash(cdata.data);
|
||||
debug!("Add hash[%s]: %s", cdata.name, hash);
|
||||
result.push({name: /*bad*/copy cdata.name, hash: hash});
|
||||
};
|
||||
pure fn lteq(a: &crate_hash, b: &crate_hash) -> bool {a.name <= b.name}
|
||||
}
|
||||
|
||||
pure fn lteq(a: &crate_hash, b: &crate_hash) -> bool {
|
||||
a.name <= b.name
|
||||
}
|
||||
|
||||
let sorted = std::sort::merge_sort(result, lteq);
|
||||
debug!("sorted:");
|
||||
for sorted.each |x| {
|
||||
debug!(" hash[%s]: %s", x.name, x.hash);
|
||||
}
|
||||
fn mapper(ch: &crate_hash) -> ~str { return /*bad*/copy ch.hash; }
|
||||
|
||||
fn mapper(ch: &crate_hash) -> ~str {
|
||||
return /*bad*/copy ch.hash;
|
||||
}
|
||||
|
||||
return vec::map(sorted, mapper);
|
||||
}
|
||||
|
||||
|
|
|
@ -67,33 +67,33 @@ pub type encode_parms = {
|
|||
item_symbols: HashMap<ast::node_id, ~str>,
|
||||
discrim_symbols: HashMap<ast::node_id, ~str>,
|
||||
link_meta: link_meta,
|
||||
cstore: cstore::CStore,
|
||||
cstore: @mut cstore::CStore,
|
||||
encode_inlined_item: encode_inlined_item
|
||||
};
|
||||
|
||||
type stats = {
|
||||
mut inline_bytes: uint,
|
||||
mut attr_bytes: uint,
|
||||
mut dep_bytes: uint,
|
||||
mut lang_item_bytes: uint,
|
||||
mut item_bytes: uint,
|
||||
mut index_bytes: uint,
|
||||
mut zero_bytes: uint,
|
||||
mut total_bytes: uint,
|
||||
struct Stats {
|
||||
inline_bytes: uint,
|
||||
attr_bytes: uint,
|
||||
dep_bytes: uint,
|
||||
lang_item_bytes: uint,
|
||||
item_bytes: uint,
|
||||
index_bytes: uint,
|
||||
zero_bytes: uint,
|
||||
total_bytes: uint,
|
||||
|
||||
mut n_inlines: uint
|
||||
};
|
||||
n_inlines: uint
|
||||
}
|
||||
|
||||
pub enum encode_ctxt = {
|
||||
diag: span_handler,
|
||||
tcx: ty::ctxt,
|
||||
stats: stats,
|
||||
stats: @mut Stats,
|
||||
reachable: HashMap<ast::node_id, ()>,
|
||||
reexports2: middle::resolve::ExportMap2,
|
||||
item_symbols: HashMap<ast::node_id, ~str>,
|
||||
discrim_symbols: HashMap<ast::node_id, ~str>,
|
||||
link_meta: link_meta,
|
||||
cstore: cstore::CStore,
|
||||
cstore: @mut cstore::CStore,
|
||||
encode_inlined_item: encode_inlined_item,
|
||||
type_abbrevs: abbrev_map
|
||||
};
|
||||
|
@ -1067,12 +1067,11 @@ fn synthesize_crate_attrs(ecx: @encode_ctxt, crate: &crate) -> ~[attribute] {
|
|||
return attrs;
|
||||
}
|
||||
|
||||
fn encode_crate_deps(ecx: @encode_ctxt, ebml_w: writer::Encoder,
|
||||
cstore: cstore::CStore) {
|
||||
|
||||
fn get_ordered_deps(ecx: @encode_ctxt, cstore: cstore::CStore)
|
||||
-> ~[decoder::crate_dep] {
|
||||
|
||||
fn encode_crate_deps(ecx: @encode_ctxt,
|
||||
ebml_w: writer::Encoder,
|
||||
cstore: @mut cstore::CStore) {
|
||||
fn get_ordered_deps(ecx: @encode_ctxt, cstore: @mut cstore::CStore)
|
||||
-> ~[decoder::crate_dep] {
|
||||
type hashkv = @{key: crate_num, val: cstore::crate_metadata};
|
||||
type numdep = decoder::crate_dep;
|
||||
|
||||
|
@ -1168,20 +1167,21 @@ pub const metadata_encoding_version : &[u8] = &[0x72, //'r' as u8,
|
|||
|
||||
pub fn encode_metadata(parms: encode_parms, crate: &crate) -> ~[u8] {
|
||||
let wr = @io::BytesWriter();
|
||||
let stats =
|
||||
{mut inline_bytes: 0,
|
||||
mut attr_bytes: 0,
|
||||
mut dep_bytes: 0,
|
||||
mut lang_item_bytes: 0,
|
||||
mut item_bytes: 0,
|
||||
mut index_bytes: 0,
|
||||
mut zero_bytes: 0,
|
||||
mut total_bytes: 0,
|
||||
mut n_inlines: 0};
|
||||
let mut stats = Stats {
|
||||
inline_bytes: 0,
|
||||
attr_bytes: 0,
|
||||
dep_bytes: 0,
|
||||
lang_item_bytes: 0,
|
||||
item_bytes: 0,
|
||||
index_bytes: 0,
|
||||
zero_bytes: 0,
|
||||
total_bytes: 0,
|
||||
n_inlines: 0
|
||||
};
|
||||
let ecx: @encode_ctxt = @encode_ctxt({
|
||||
diag: parms.diag,
|
||||
tcx: parms.tcx,
|
||||
stats: move stats,
|
||||
stats: @mut move stats,
|
||||
reachable: parms.reachable,
|
||||
reexports2: parms.reexports2,
|
||||
item_symbols: parms.item_symbols,
|
||||
|
|
|
@ -53,30 +53,35 @@ pub enum DefIdSource {
|
|||
}
|
||||
type conv_did = fn(source: DefIdSource, ast::def_id) -> ast::def_id;
|
||||
|
||||
pub type pstate = {data: @~[u8], crate: int, mut pos: uint, tcx: ty::ctxt};
|
||||
pub struct PState {
|
||||
data: @~[u8],
|
||||
crate: int,
|
||||
pos: uint,
|
||||
tcx: ty::ctxt
|
||||
}
|
||||
|
||||
fn peek(st: @pstate) -> char {
|
||||
fn peek(st: @mut PState) -> char {
|
||||
st.data[st.pos] as char
|
||||
}
|
||||
|
||||
fn next(st: @pstate) -> char {
|
||||
fn next(st: @mut PState) -> char {
|
||||
let ch = st.data[st.pos] as char;
|
||||
st.pos = st.pos + 1u;
|
||||
return ch;
|
||||
}
|
||||
|
||||
fn next_byte(st: @pstate) -> u8 {
|
||||
fn next_byte(st: @mut PState) -> u8 {
|
||||
let b = st.data[st.pos];
|
||||
st.pos = st.pos + 1u;
|
||||
return b;
|
||||
}
|
||||
|
||||
pub fn parse_ident(st: @pstate, last: char) -> ast::ident {
|
||||
pub fn parse_ident(st: @mut PState, last: char) -> ast::ident {
|
||||
fn is_last(b: char, c: char) -> bool { return c == b; }
|
||||
return parse_ident_(st, |a| is_last(last, a) );
|
||||
}
|
||||
|
||||
fn parse_ident_(st: @pstate, is_last: fn@(char) -> bool) ->
|
||||
fn parse_ident_(st: @mut PState, is_last: fn@(char) -> bool) ->
|
||||
ast::ident {
|
||||
let mut rslt = ~"";
|
||||
while !is_last(peek(st)) {
|
||||
|
@ -86,8 +91,13 @@ fn parse_ident_(st: @pstate, is_last: fn@(char) -> bool) ->
|
|||
}
|
||||
|
||||
pub fn parse_state_from_data(data: @~[u8], crate_num: int,
|
||||
pos: uint, tcx: ty::ctxt) -> @pstate {
|
||||
@{data: data, crate: crate_num, mut pos: pos, tcx: tcx}
|
||||
pos: uint, tcx: ty::ctxt) -> @mut PState {
|
||||
@mut PState {
|
||||
data: data,
|
||||
crate: crate_num,
|
||||
pos: pos,
|
||||
tcx: tcx
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_ty_data(data: @~[u8], crate_num: int, pos: uint, tcx: ty::ctxt,
|
||||
|
@ -102,7 +112,7 @@ pub fn parse_arg_data(data: @~[u8], crate_num: int, pos: uint, tcx: ty::ctxt,
|
|||
parse_arg(st, conv)
|
||||
}
|
||||
|
||||
fn parse_path(st: @pstate) -> @ast::path {
|
||||
fn parse_path(st: @mut PState) -> @ast::path {
|
||||
let mut idents: ~[ast::ident] = ~[];
|
||||
fn is_last(c: char) -> bool { return c == '(' || c == ':'; }
|
||||
idents.push(parse_ident_(st, is_last));
|
||||
|
@ -122,7 +132,7 @@ fn parse_path(st: @pstate) -> @ast::path {
|
|||
};
|
||||
}
|
||||
|
||||
fn parse_sigil(st: @pstate) -> ast::Sigil {
|
||||
fn parse_sigil(st: @mut PState) -> ast::Sigil {
|
||||
match next(st) {
|
||||
'@' => ast::ManagedSigil,
|
||||
'~' => ast::OwnedSigil,
|
||||
|
@ -131,7 +141,7 @@ fn parse_sigil(st: @pstate) -> ast::Sigil {
|
|||
}
|
||||
}
|
||||
|
||||
fn parse_vstore(st: @pstate) -> ty::vstore {
|
||||
fn parse_vstore(st: @mut PState) -> ty::vstore {
|
||||
assert next(st) == '/';
|
||||
|
||||
let c = peek(st);
|
||||
|
@ -149,7 +159,7 @@ fn parse_vstore(st: @pstate) -> ty::vstore {
|
|||
}
|
||||
}
|
||||
|
||||
fn parse_substs(st: @pstate, conv: conv_did) -> ty::substs {
|
||||
fn parse_substs(st: @mut PState, conv: conv_did) -> ty::substs {
|
||||
let self_r = parse_opt(st, || parse_region(st) );
|
||||
|
||||
let self_ty = parse_opt(st, || parse_ty(st, conv) );
|
||||
|
@ -166,7 +176,7 @@ fn parse_substs(st: @pstate, conv: conv_did) -> ty::substs {
|
|||
};
|
||||
}
|
||||
|
||||
fn parse_bound_region(st: @pstate) -> ty::bound_region {
|
||||
fn parse_bound_region(st: @mut PState) -> ty::bound_region {
|
||||
match next(st) {
|
||||
's' => ty::br_self,
|
||||
'a' => {
|
||||
|
@ -184,7 +194,7 @@ fn parse_bound_region(st: @pstate) -> ty::bound_region {
|
|||
}
|
||||
}
|
||||
|
||||
fn parse_region(st: @pstate) -> ty::Region {
|
||||
fn parse_region(st: @mut PState) -> ty::Region {
|
||||
match next(st) {
|
||||
'b' => {
|
||||
ty::re_bound(parse_bound_region(st))
|
||||
|
@ -209,7 +219,7 @@ fn parse_region(st: @pstate) -> ty::Region {
|
|||
}
|
||||
}
|
||||
|
||||
fn parse_opt<T>(st: @pstate, f: fn() -> T) -> Option<T> {
|
||||
fn parse_opt<T>(st: @mut PState, f: fn() -> T) -> Option<T> {
|
||||
match next(st) {
|
||||
'n' => None,
|
||||
's' => Some(f()),
|
||||
|
@ -217,7 +227,7 @@ fn parse_opt<T>(st: @pstate, f: fn() -> T) -> Option<T> {
|
|||
}
|
||||
}
|
||||
|
||||
fn parse_str(st: @pstate, term: char) -> ~str {
|
||||
fn parse_str(st: @mut PState, term: char) -> ~str {
|
||||
let mut result = ~"";
|
||||
while peek(st) != term {
|
||||
result += str::from_byte(next_byte(st));
|
||||
|
@ -226,7 +236,7 @@ fn parse_str(st: @pstate, term: char) -> ~str {
|
|||
return result;
|
||||
}
|
||||
|
||||
fn parse_ty(st: @pstate, conv: conv_did) -> ty::t {
|
||||
fn parse_ty(st: @mut PState, conv: conv_did) -> ty::t {
|
||||
match next(st) {
|
||||
'n' => return ty::mk_nil(st.tcx),
|
||||
'z' => return ty::mk_bot(st.tcx),
|
||||
|
@ -330,7 +340,7 @@ fn parse_ty(st: @pstate, conv: conv_did) -> ty::t {
|
|||
match st.tcx.rcache.find(&key) {
|
||||
Some(tt) => return tt,
|
||||
None => {
|
||||
let ps = @{pos: pos ,.. copy *st};
|
||||
let ps = @mut PState {pos: pos ,.. copy *st};
|
||||
let tt = parse_ty(ps, conv);
|
||||
st.tcx.rcache.insert(key, tt);
|
||||
return tt;
|
||||
|
@ -354,7 +364,7 @@ fn parse_ty(st: @pstate, conv: conv_did) -> ty::t {
|
|||
}
|
||||
}
|
||||
|
||||
fn parse_mt(st: @pstate, conv: conv_did) -> ty::mt {
|
||||
fn parse_mt(st: @mut PState, conv: conv_did) -> ty::mt {
|
||||
let mut m;
|
||||
match peek(st) {
|
||||
'm' => { next(st); m = ast::m_mutbl; }
|
||||
|
@ -364,7 +374,7 @@ fn parse_mt(st: @pstate, conv: conv_did) -> ty::mt {
|
|||
ty::mt { ty: parse_ty(st, conv), mutbl: m }
|
||||
}
|
||||
|
||||
fn parse_def(st: @pstate, source: DefIdSource,
|
||||
fn parse_def(st: @mut PState, source: DefIdSource,
|
||||
conv: conv_did) -> ast::def_id {
|
||||
let mut def = ~[];
|
||||
while peek(st) != '|' { def.push(next_byte(st)); }
|
||||
|
@ -372,7 +382,7 @@ fn parse_def(st: @pstate, source: DefIdSource,
|
|||
return conv(source, parse_def_id(def));
|
||||
}
|
||||
|
||||
fn parse_int(st: @pstate) -> int {
|
||||
fn parse_int(st: @mut PState) -> int {
|
||||
let mut n = 0;
|
||||
loop {
|
||||
let cur = peek(st);
|
||||
|
@ -383,7 +393,7 @@ fn parse_int(st: @pstate) -> int {
|
|||
};
|
||||
}
|
||||
|
||||
fn parse_hex(st: @pstate) -> uint {
|
||||
fn parse_hex(st: @mut PState) -> uint {
|
||||
let mut n = 0u;
|
||||
loop {
|
||||
let cur = peek(st);
|
||||
|
@ -421,11 +431,11 @@ fn parse_onceness(c: char) -> ast::Onceness {
|
|||
}
|
||||
}
|
||||
|
||||
fn parse_arg(st: @pstate, conv: conv_did) -> ty::arg {
|
||||
fn parse_arg(st: @mut PState, conv: conv_did) -> ty::arg {
|
||||
ty::arg { mode: parse_mode(st), ty: parse_ty(st, conv) }
|
||||
}
|
||||
|
||||
fn parse_mode(st: @pstate) -> ast::mode {
|
||||
fn parse_mode(st: @mut PState) -> ast::mode {
|
||||
let m = ast::expl(match next(st) {
|
||||
'+' => ast::by_copy,
|
||||
'=' => ast::by_ref,
|
||||
|
@ -435,7 +445,7 @@ fn parse_mode(st: @pstate) -> ast::mode {
|
|||
return m;
|
||||
}
|
||||
|
||||
fn parse_closure_ty(st: @pstate, conv: conv_did) -> ty::ClosureTy {
|
||||
fn parse_closure_ty(st: @mut PState, conv: conv_did) -> ty::ClosureTy {
|
||||
let sigil = parse_sigil(st);
|
||||
let purity = parse_purity(next(st));
|
||||
let onceness = parse_onceness(next(st));
|
||||
|
@ -450,7 +460,7 @@ fn parse_closure_ty(st: @pstate, conv: conv_did) -> ty::ClosureTy {
|
|||
}
|
||||
}
|
||||
|
||||
fn parse_bare_fn_ty(st: @pstate, conv: conv_did) -> ty::BareFnTy {
|
||||
fn parse_bare_fn_ty(st: @mut PState, conv: conv_did) -> ty::BareFnTy {
|
||||
let purity = parse_purity(next(st));
|
||||
let abi = parse_abi(next(st));
|
||||
let sig = parse_sig(st, conv);
|
||||
|
@ -461,7 +471,7 @@ fn parse_bare_fn_ty(st: @pstate, conv: conv_did) -> ty::BareFnTy {
|
|||
}
|
||||
}
|
||||
|
||||
fn parse_sig(st: @pstate, conv: conv_did) -> ty::FnSig {
|
||||
fn parse_sig(st: @mut PState, conv: conv_did) -> ty::FnSig {
|
||||
assert (next(st) == '[');
|
||||
let mut inputs: ~[ty::arg] = ~[];
|
||||
while peek(st) != ']' {
|
||||
|
@ -506,7 +516,7 @@ pub fn parse_bounds_data(data: @~[u8], start: uint,
|
|||
parse_bounds(st, conv)
|
||||
}
|
||||
|
||||
fn parse_bounds(st: @pstate, conv: conv_did) -> @~[ty::param_bound] {
|
||||
fn parse_bounds(st: @mut PState, conv: conv_did) -> @~[ty::param_bound] {
|
||||
let mut bounds = ~[];
|
||||
loop {
|
||||
bounds.push(match next(st) {
|
||||
|
|
|
@ -1274,13 +1274,13 @@ fn test_simplification() {
|
|||
let item_in = ast::ii_item(quote_item!(
|
||||
fn new_int_alist<B: Copy>() -> alist<int, B> {
|
||||
fn eq_int(&&a: int, &&b: int) -> bool { a == b }
|
||||
return {eq_fn: eq_int, mut data: ~[]};
|
||||
return {eq_fn: eq_int, data: ~[]};
|
||||
}
|
||||
).get());
|
||||
let item_out = simplify_ast(item_in);
|
||||
let item_exp = ast::ii_item(quote_item!(
|
||||
fn new_int_alist<B: Copy>() -> alist<int, B> {
|
||||
return {eq_fn: eq_int, mut data: ~[]};
|
||||
return {eq_fn: eq_int, data: ~[]};
|
||||
}
|
||||
).get());
|
||||
match (item_out, item_exp) {
|
||||
|
|
|
@ -21,7 +21,7 @@ use core::prelude::*;
|
|||
|
||||
use middle::moves;
|
||||
use middle::borrowck::{Loan, bckerr, BorrowckCtxt, inherent_mutability};
|
||||
use middle::borrowck::{req_maps, root_map_key, save_and_restore};
|
||||
use middle::borrowck::{req_maps, root_map_key, save_and_restore_managed};
|
||||
use middle::borrowck::{MoveError, MoveOk, MoveFromIllegalCmt};
|
||||
use middle::borrowck::{MoveWhileBorrowed};
|
||||
use middle::mem_categorization::{cat_arg, cat_binding, cat_comp, cat_deref};
|
||||
|
@ -43,15 +43,15 @@ use syntax::codemap::span;
|
|||
use syntax::print::pprust;
|
||||
use syntax::visit;
|
||||
|
||||
enum check_loan_ctxt = @{
|
||||
struct CheckLoanCtxt {
|
||||
bccx: @BorrowckCtxt,
|
||||
req_maps: req_maps,
|
||||
|
||||
reported: HashMap<ast::node_id, ()>,
|
||||
|
||||
mut declared_purity: ast::purity,
|
||||
mut fn_args: @~[ast::node_id]
|
||||
};
|
||||
declared_purity: @mut ast::purity,
|
||||
fn_args: @mut @~[ast::node_id]
|
||||
}
|
||||
|
||||
// if we are enforcing purity, why are we doing so?
|
||||
#[deriving_eq]
|
||||
|
@ -66,13 +66,15 @@ enum purity_cause {
|
|||
}
|
||||
|
||||
pub fn check_loans(bccx: @BorrowckCtxt,
|
||||
req_maps: req_maps,
|
||||
crate: @ast::crate) {
|
||||
let clcx = check_loan_ctxt(@{bccx: bccx,
|
||||
req_maps: req_maps,
|
||||
reported: HashMap(),
|
||||
mut declared_purity: ast::impure_fn,
|
||||
mut fn_args: @~[]});
|
||||
req_maps: req_maps,
|
||||
crate: @ast::crate) {
|
||||
let clcx = @mut CheckLoanCtxt {
|
||||
bccx: bccx,
|
||||
req_maps: req_maps,
|
||||
reported: HashMap(),
|
||||
declared_purity: @mut ast::impure_fn,
|
||||
fn_args: @mut @~[]
|
||||
};
|
||||
let vt = visit::mk_vt(@visit::Visitor {visit_expr: check_loans_in_expr,
|
||||
visit_local: check_loans_in_local,
|
||||
visit_block: check_loans_in_block,
|
||||
|
@ -104,11 +106,11 @@ impl assignment_type {
|
|||
}
|
||||
}
|
||||
|
||||
impl check_loan_ctxt {
|
||||
fn tcx() -> ty::ctxt { self.bccx.tcx }
|
||||
impl CheckLoanCtxt {
|
||||
fn tcx(@mut self) -> ty::ctxt { self.bccx.tcx }
|
||||
|
||||
fn purity(scope_id: ast::node_id) -> Option<purity_cause> {
|
||||
let default_purity = match self.declared_purity {
|
||||
fn purity(@mut self, scope_id: ast::node_id) -> Option<purity_cause> {
|
||||
let default_purity = match *self.declared_purity {
|
||||
// an unsafe declaration overrides all
|
||||
ast::unsafe_fn => return None,
|
||||
|
||||
|
@ -138,7 +140,9 @@ impl check_loan_ctxt {
|
|||
}
|
||||
}
|
||||
|
||||
fn walk_loans(scope_id: ast::node_id, f: fn(v: &Loan) -> bool) {
|
||||
fn walk_loans(@mut self,
|
||||
scope_id: ast::node_id,
|
||||
f: &fn(v: &Loan) -> bool) {
|
||||
let mut scope_id = scope_id;
|
||||
let region_map = self.tcx().region_map;
|
||||
let req_loan_map = self.req_maps.req_loan_map;
|
||||
|
@ -157,9 +161,10 @@ impl check_loan_ctxt {
|
|||
}
|
||||
}
|
||||
|
||||
fn walk_loans_of(scope_id: ast::node_id,
|
||||
fn walk_loans_of(@mut self,
|
||||
scope_id: ast::node_id,
|
||||
lp: @loan_path,
|
||||
f: fn(v: &Loan) -> bool) {
|
||||
f: &fn(v: &Loan) -> bool) {
|
||||
for self.walk_loans(scope_id) |loan| {
|
||||
if loan.lp == lp {
|
||||
if !f(loan) { return; }
|
||||
|
@ -173,7 +178,8 @@ impl check_loan_ctxt {
|
|||
// note: we take opt_expr and expr_id separately because for
|
||||
// overloaded operators the callee has an id but no expr.
|
||||
// annoying.
|
||||
fn check_pure_callee_or_arg(pc: purity_cause,
|
||||
fn check_pure_callee_or_arg(@mut self,
|
||||
pc: purity_cause,
|
||||
opt_expr: Option<@ast::expr>,
|
||||
callee_id: ast::node_id,
|
||||
callee_span: span) {
|
||||
|
@ -239,7 +245,7 @@ impl check_loan_ctxt {
|
|||
|
||||
// True if the expression with the given `id` is a stack closure.
|
||||
// The expression must be an expr_fn(*) or expr_fn_block(*)
|
||||
fn is_stack_closure(id: ast::node_id) -> bool {
|
||||
fn is_stack_closure(@mut self, id: ast::node_id) -> bool {
|
||||
let fn_ty = ty::node_id_to_type(self.tcx(), id);
|
||||
match ty::get(fn_ty).sty {
|
||||
ty::ty_closure(ty::ClosureTy {sigil: ast::BorrowedSigil,
|
||||
|
@ -248,7 +254,7 @@ impl check_loan_ctxt {
|
|||
}
|
||||
}
|
||||
|
||||
fn is_allowed_pure_arg(expr: @ast::expr) -> bool {
|
||||
fn is_allowed_pure_arg(@mut self, expr: @ast::expr) -> bool {
|
||||
return match expr.node {
|
||||
ast::expr_path(_) => {
|
||||
let def = self.tcx().def_map.get(&expr.id);
|
||||
|
@ -263,7 +269,7 @@ impl check_loan_ctxt {
|
|||
};
|
||||
}
|
||||
|
||||
fn check_for_conflicting_loans(scope_id: ast::node_id) {
|
||||
fn check_for_conflicting_loans(@mut self, scope_id: ast::node_id) {
|
||||
debug!("check_for_conflicting_loans(scope_id=%?)", scope_id);
|
||||
|
||||
let new_loans = match self.req_maps.req_loan_map.find(&scope_id) {
|
||||
|
@ -292,7 +298,7 @@ impl check_loan_ctxt {
|
|||
}
|
||||
}
|
||||
|
||||
fn report_error_if_loans_conflict(&self,
|
||||
fn report_error_if_loans_conflict(@mut self,
|
||||
old_loan: &Loan,
|
||||
new_loan: &Loan) {
|
||||
if old_loan.lp != new_loan.lp {
|
||||
|
@ -319,14 +325,14 @@ impl check_loan_ctxt {
|
|||
}
|
||||
}
|
||||
|
||||
fn is_local_variable(cmt: cmt) -> bool {
|
||||
fn is_local_variable(@mut self, cmt: cmt) -> bool {
|
||||
match cmt.cat {
|
||||
cat_local(_) => true,
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
|
||||
fn check_assignment(at: assignment_type, ex: @ast::expr) {
|
||||
fn check_assignment(@mut self, at: assignment_type, ex: @ast::expr) {
|
||||
// We don't use cat_expr() here because we don't want to treat
|
||||
// auto-ref'd parameters in overloaded operators as rvalues.
|
||||
let cmt = match self.bccx.tcx.adjustments.find(&ex.id) {
|
||||
|
@ -393,7 +399,7 @@ impl check_loan_ctxt {
|
|||
self.add_write_guards_if_necessary(cmt);
|
||||
}
|
||||
|
||||
fn add_write_guards_if_necessary(cmt: cmt) {
|
||||
fn add_write_guards_if_necessary(@mut self, cmt: cmt) {
|
||||
match cmt.cat {
|
||||
cat_deref(base, deref_count, ptr_kind) => {
|
||||
self.add_write_guards_if_necessary(base);
|
||||
|
@ -416,12 +422,11 @@ impl check_loan_ctxt {
|
|||
}
|
||||
}
|
||||
|
||||
fn check_for_loan_conflicting_with_assignment(
|
||||
at: assignment_type,
|
||||
ex: @ast::expr,
|
||||
cmt: cmt,
|
||||
lp: @loan_path) {
|
||||
|
||||
fn check_for_loan_conflicting_with_assignment(@mut self,
|
||||
at: assignment_type,
|
||||
ex: @ast::expr,
|
||||
cmt: cmt,
|
||||
lp: @loan_path) {
|
||||
for self.walk_loans_of(ex.id, lp) |loan| {
|
||||
match loan.mutbl {
|
||||
m_const => { /*ok*/ }
|
||||
|
@ -455,7 +460,7 @@ impl check_loan_ctxt {
|
|||
}
|
||||
}
|
||||
|
||||
fn report_purity_error(pc: purity_cause, sp: span, msg: ~str) {
|
||||
fn report_purity_error(@mut self, pc: purity_cause, sp: span, msg: ~str) {
|
||||
match pc {
|
||||
pc_pure_fn => {
|
||||
self.tcx().sess.span_err(
|
||||
|
@ -463,7 +468,8 @@ impl check_loan_ctxt {
|
|||
fmt!("%s prohibited in pure context", msg));
|
||||
}
|
||||
pc_cmt(ref e) => {
|
||||
if self.reported.insert((*e).cmt.id, ()) {
|
||||
let reported = self.reported;
|
||||
if reported.insert((*e).cmt.id, ()) {
|
||||
self.tcx().sess.span_err(
|
||||
(*e).cmt.span,
|
||||
fmt!("illegal borrow unless pure: %s",
|
||||
|
@ -477,7 +483,7 @@ impl check_loan_ctxt {
|
|||
}
|
||||
}
|
||||
|
||||
fn check_move_out_from_expr(ex: @ast::expr) {
|
||||
fn check_move_out_from_expr(@mut self, ex: @ast::expr) {
|
||||
match ex.node {
|
||||
ast::expr_paren(*) => {
|
||||
/* In the case of an expr_paren(), the expression inside
|
||||
|
@ -510,7 +516,7 @@ impl check_loan_ctxt {
|
|||
}
|
||||
}
|
||||
|
||||
fn analyze_move_out_from_cmt(cmt: cmt) -> MoveError {
|
||||
fn analyze_move_out_from_cmt(@mut self, cmt: cmt) -> MoveError {
|
||||
debug!("check_move_out_from_cmt(cmt=%s)",
|
||||
self.bccx.cmt_to_repr(cmt));
|
||||
|
||||
|
@ -543,7 +549,8 @@ impl check_loan_ctxt {
|
|||
return MoveOk;
|
||||
}
|
||||
|
||||
fn check_call(expr: @ast::expr,
|
||||
fn check_call(@mut self,
|
||||
expr: @ast::expr,
|
||||
callee: Option<@ast::expr>,
|
||||
callee_id: ast::node_id,
|
||||
callee_span: span,
|
||||
|
@ -562,10 +569,13 @@ impl check_loan_ctxt {
|
|||
}
|
||||
}
|
||||
|
||||
fn check_loans_in_fn(fk: visit::fn_kind, decl: ast::fn_decl, body: ast::blk,
|
||||
sp: span, id: ast::node_id, &&self: check_loan_ctxt,
|
||||
visitor: visit::vt<check_loan_ctxt>)
|
||||
{
|
||||
fn check_loans_in_fn(fk: visit::fn_kind,
|
||||
decl: ast::fn_decl,
|
||||
body: ast::blk,
|
||||
sp: span,
|
||||
id: ast::node_id,
|
||||
&&self: @mut CheckLoanCtxt,
|
||||
visitor: visit::vt<@mut CheckLoanCtxt>) {
|
||||
let is_stack_closure = self.is_stack_closure(id);
|
||||
let fty = ty::node_id_to_type(self.tcx(), id);
|
||||
|
||||
|
@ -580,15 +590,16 @@ fn check_loans_in_fn(fk: visit::fn_kind, decl: ast::fn_decl, body: ast::blk,
|
|||
let fty_sigil = ty::ty_closure_sigil(fty);
|
||||
check_moves_from_captured_variables(self, id, fty_sigil);
|
||||
declared_purity = ty::determine_inherited_purity(
|
||||
copy self.declared_purity, ty::ty_fn_purity(fty),
|
||||
*self.declared_purity,
|
||||
ty::ty_fn_purity(fty),
|
||||
fty_sigil);
|
||||
}
|
||||
}
|
||||
|
||||
debug!("purity on entry=%?", copy self.declared_purity);
|
||||
do save_and_restore(&mut(self.declared_purity)) {
|
||||
do save_and_restore(&mut(self.fn_args)) {
|
||||
self.declared_purity = declared_purity;
|
||||
do save_and_restore_managed(self.declared_purity) {
|
||||
do save_and_restore_managed(self.fn_args) {
|
||||
*self.declared_purity = declared_purity;
|
||||
|
||||
match fk {
|
||||
visit::fk_anon(*) |
|
||||
|
@ -611,7 +622,7 @@ fn check_loans_in_fn(fk: visit::fn_kind, decl: ast::fn_decl, body: ast::blk,
|
|||
_ => {} // Ignore this argument.
|
||||
}
|
||||
}
|
||||
self.fn_args = @move fn_args;
|
||||
*self.fn_args = @move fn_args;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -620,10 +631,9 @@ fn check_loans_in_fn(fk: visit::fn_kind, decl: ast::fn_decl, body: ast::blk,
|
|||
}
|
||||
debug!("purity on exit=%?", copy self.declared_purity);
|
||||
|
||||
fn check_moves_from_captured_variables(&&self: check_loan_ctxt,
|
||||
fn check_moves_from_captured_variables(self: @mut CheckLoanCtxt,
|
||||
id: ast::node_id,
|
||||
fty_sigil: ast::Sigil)
|
||||
{
|
||||
fty_sigil: ast::Sigil) {
|
||||
match fty_sigil {
|
||||
ast::ManagedSigil | ast::OwnedSigil => {
|
||||
let cap_vars = self.bccx.capture_map.get(&id);
|
||||
|
@ -666,14 +676,14 @@ fn check_loans_in_fn(fk: visit::fn_kind, decl: ast::fn_decl, body: ast::blk,
|
|||
}
|
||||
|
||||
fn check_loans_in_local(local: @ast::local,
|
||||
&&self: check_loan_ctxt,
|
||||
vt: visit::vt<check_loan_ctxt>) {
|
||||
&&self: @mut CheckLoanCtxt,
|
||||
vt: visit::vt<@mut CheckLoanCtxt>) {
|
||||
visit::visit_local(local, self, vt);
|
||||
}
|
||||
|
||||
fn check_loans_in_expr(expr: @ast::expr,
|
||||
&&self: check_loan_ctxt,
|
||||
vt: visit::vt<check_loan_ctxt>) {
|
||||
&&self: @mut CheckLoanCtxt,
|
||||
vt: visit::vt<@mut CheckLoanCtxt>) {
|
||||
debug!("check_loans_in_expr(expr=%?/%s)",
|
||||
expr.id, pprust::expr_to_str(expr, self.tcx().sess.intr()));
|
||||
|
||||
|
@ -734,16 +744,16 @@ fn check_loans_in_expr(expr: @ast::expr,
|
|||
}
|
||||
|
||||
fn check_loans_in_block(blk: ast::blk,
|
||||
&&self: check_loan_ctxt,
|
||||
vt: visit::vt<check_loan_ctxt>) {
|
||||
do save_and_restore(&mut(self.declared_purity)) {
|
||||
&&self: @mut CheckLoanCtxt,
|
||||
vt: visit::vt<@mut CheckLoanCtxt>) {
|
||||
do save_and_restore_managed(self.declared_purity) {
|
||||
self.check_for_conflicting_loans(blk.node.id);
|
||||
|
||||
match blk.node.rules {
|
||||
ast::default_blk => {
|
||||
}
|
||||
ast::unsafe_blk => {
|
||||
self.declared_purity = ast::unsafe_fn;
|
||||
*self.declared_purity = ast::unsafe_fn;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -68,19 +68,22 @@ use syntax::visit;
|
|||
/// No good. Instead what will happen is that `root_ub` will be set to the
|
||||
/// body of the while loop and we will refuse to root the pointer `&*x`
|
||||
/// because it would have to be rooted for a region greater than `root_ub`.
|
||||
enum gather_loan_ctxt = @{bccx: @BorrowckCtxt,
|
||||
req_maps: req_maps,
|
||||
mut item_ub: ast::node_id,
|
||||
mut root_ub: ast::node_id,
|
||||
mut ignore_adjustments: LinearSet<ast::node_id>};
|
||||
struct GatherLoanCtxt {
|
||||
bccx: @BorrowckCtxt,
|
||||
req_maps: req_maps,
|
||||
item_ub: ast::node_id,
|
||||
root_ub: ast::node_id,
|
||||
ignore_adjustments: LinearSet<ast::node_id>
|
||||
}
|
||||
|
||||
pub fn gather_loans(bccx: @BorrowckCtxt, crate: @ast::crate) -> req_maps {
|
||||
let glcx = gather_loan_ctxt(@{bccx: bccx,
|
||||
req_maps: {req_loan_map: HashMap(),
|
||||
pure_map: HashMap()},
|
||||
mut item_ub: 0,
|
||||
mut root_ub: 0,
|
||||
mut ignore_adjustments: LinearSet::new()});
|
||||
let glcx = @mut GatherLoanCtxt {
|
||||
bccx: bccx,
|
||||
req_maps: {req_loan_map: HashMap(), pure_map: HashMap()},
|
||||
item_ub: 0,
|
||||
root_ub: 0,
|
||||
ignore_adjustments: LinearSet::new()
|
||||
};
|
||||
let v = visit::mk_vt(@visit::Visitor {visit_expr: req_loans_in_expr,
|
||||
visit_fn: req_loans_in_fn,
|
||||
visit_stmt: add_stmt_to_map,
|
||||
|
@ -94,8 +97,8 @@ fn req_loans_in_fn(fk: visit::fn_kind,
|
|||
body: ast::blk,
|
||||
sp: span,
|
||||
id: ast::node_id,
|
||||
&&self: gather_loan_ctxt,
|
||||
v: visit::vt<gather_loan_ctxt>) {
|
||||
&&self: @mut GatherLoanCtxt,
|
||||
v: visit::vt<@mut GatherLoanCtxt>) {
|
||||
// see explanation attached to the `root_ub` field:
|
||||
let old_item_id = self.item_ub;
|
||||
let old_root_ub = self.root_ub;
|
||||
|
@ -115,8 +118,8 @@ fn req_loans_in_fn(fk: visit::fn_kind,
|
|||
}
|
||||
|
||||
fn req_loans_in_expr(ex: @ast::expr,
|
||||
&&self: gather_loan_ctxt,
|
||||
vt: visit::vt<gather_loan_ctxt>) {
|
||||
&&self: @mut GatherLoanCtxt,
|
||||
vt: visit::vt<@mut GatherLoanCtxt>) {
|
||||
let bccx = self.bccx;
|
||||
let tcx = bccx.tcx;
|
||||
let old_root_ub = self.root_ub;
|
||||
|
@ -283,10 +286,10 @@ fn req_loans_in_expr(ex: @ast::expr,
|
|||
self.root_ub = old_root_ub;
|
||||
}
|
||||
|
||||
impl gather_loan_ctxt {
|
||||
fn tcx(&self) -> ty::ctxt { self.bccx.tcx }
|
||||
impl GatherLoanCtxt {
|
||||
fn tcx(@mut self) -> ty::ctxt { self.bccx.tcx }
|
||||
|
||||
fn guarantee_adjustments(&self,
|
||||
fn guarantee_adjustments(@mut self,
|
||||
expr: @ast::expr,
|
||||
adjustment: &ty::AutoAdjustment) {
|
||||
debug!("guarantee_adjustments(expr=%s, adjustment=%?)",
|
||||
|
@ -334,12 +337,12 @@ impl gather_loan_ctxt {
|
|||
// out loans, which will be added to the `req_loan_map`. This can
|
||||
// also entail "rooting" GC'd pointers, which means ensuring
|
||||
// dynamically that they are not freed.
|
||||
fn guarantee_valid(&self,
|
||||
fn guarantee_valid(@mut self,
|
||||
cmt: cmt,
|
||||
req_mutbl: ast::mutability,
|
||||
scope_r: ty::Region) {
|
||||
|
||||
self.bccx.guaranteed_paths += 1;
|
||||
self.bccx.stats.guaranteed_paths += 1;
|
||||
|
||||
debug!("guarantee_valid(cmt=%s, req_mutbl=%s, scope_r=%s)",
|
||||
self.bccx.cmt_to_repr(cmt),
|
||||
|
@ -391,7 +394,7 @@ impl gather_loan_ctxt {
|
|||
// we were able guarantee the validity of the ptr,
|
||||
// perhaps by rooting or because it is immutably
|
||||
// rooted. good.
|
||||
self.bccx.stable_paths += 1;
|
||||
self.bccx.stats.stable_paths += 1;
|
||||
}
|
||||
Ok(PcIfPure(ref e)) => {
|
||||
debug!("result of preserve: %?", PcIfPure((*e)));
|
||||
|
@ -403,8 +406,9 @@ impl gather_loan_ctxt {
|
|||
// if the scope is some block/expr in the
|
||||
// fn, then just require that this scope
|
||||
// be pure
|
||||
self.req_maps.pure_map.insert(pure_id, (*e));
|
||||
self.bccx.req_pure_paths += 1;
|
||||
let pure_map = self.req_maps.pure_map;
|
||||
pure_map.insert(pure_id, *e);
|
||||
self.bccx.stats.req_pure_paths += 1;
|
||||
|
||||
debug!("requiring purity for scope %?",
|
||||
scope_r);
|
||||
|
@ -441,9 +445,10 @@ impl gather_loan_ctxt {
|
|||
// has type `@mut{f:int}`, this check might fail because `&x.f`
|
||||
// reqires an immutable pointer, but `f` lives in (aliased)
|
||||
// mutable memory.
|
||||
fn check_mutbl(&self,
|
||||
fn check_mutbl(@mut self,
|
||||
req_mutbl: ast::mutability,
|
||||
cmt: cmt) -> bckres<PreserveCondition> {
|
||||
cmt: cmt)
|
||||
-> bckres<PreserveCondition> {
|
||||
debug!("check_mutbl(req_mutbl=%?, cmt.mutbl=%?)",
|
||||
req_mutbl, cmt.mutbl);
|
||||
|
||||
|
@ -469,7 +474,7 @@ impl gather_loan_ctxt {
|
|||
}
|
||||
}
|
||||
|
||||
fn add_loans(&self,
|
||||
fn add_loans(@mut self,
|
||||
cmt: cmt,
|
||||
req_mutbl: ast::mutability,
|
||||
scope_r: ty::Region,
|
||||
|
@ -522,7 +527,7 @@ impl gather_loan_ctxt {
|
|||
self.add_loans_to_scope_id(scope_id, move loans);
|
||||
|
||||
if req_mutbl == m_imm && cmt.mutbl != m_imm {
|
||||
self.bccx.loaned_paths_imm += 1;
|
||||
self.bccx.stats.loaned_paths_imm += 1;
|
||||
|
||||
if self.tcx().sess.borrowck_note_loan() {
|
||||
self.bccx.span_note(
|
||||
|
@ -530,11 +535,13 @@ impl gather_loan_ctxt {
|
|||
fmt!("immutable loan required"));
|
||||
}
|
||||
} else {
|
||||
self.bccx.loaned_paths_same += 1;
|
||||
self.bccx.stats.loaned_paths_same += 1;
|
||||
}
|
||||
}
|
||||
|
||||
fn add_loans_to_scope_id(&self, scope_id: ast::node_id, +loans: ~[Loan]) {
|
||||
fn add_loans_to_scope_id(@mut self,
|
||||
scope_id: ast::node_id,
|
||||
+loans: ~[Loan]) {
|
||||
debug!("adding %u loans to scope_id %?", loans.len(), scope_id);
|
||||
match self.req_maps.req_loan_map.find(&scope_id) {
|
||||
Some(req_loans) => {
|
||||
|
@ -542,12 +549,13 @@ impl gather_loan_ctxt {
|
|||
}
|
||||
None => {
|
||||
let dvec = @dvec::from_vec(move loans);
|
||||
self.req_maps.req_loan_map.insert(scope_id, dvec);
|
||||
let req_loan_map = self.req_maps.req_loan_map;
|
||||
req_loan_map.insert(scope_id, dvec);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn gather_pat(&self,
|
||||
fn gather_pat(@mut self,
|
||||
discr_cmt: cmt,
|
||||
root_pat: @ast::pat,
|
||||
arm_id: ast::node_id,
|
||||
|
@ -602,10 +610,9 @@ impl gather_loan_ctxt {
|
|||
}
|
||||
}
|
||||
|
||||
fn vec_slice_info(&self,
|
||||
fn vec_slice_info(@mut self,
|
||||
pat: @ast::pat,
|
||||
tail_ty: ty::t) -> (ast::mutability, ty::Region)
|
||||
{
|
||||
tail_ty: ty::t) -> (ast::mutability, ty::Region) {
|
||||
/*!
|
||||
*
|
||||
* In a pattern like [a, b, ..c], normally `c` has slice type,
|
||||
|
@ -631,11 +638,11 @@ impl gather_loan_ctxt {
|
|||
}
|
||||
}
|
||||
|
||||
fn pat_is_variant_or_struct(&self, pat: @ast::pat) -> bool {
|
||||
fn pat_is_variant_or_struct(@mut self, pat: @ast::pat) -> bool {
|
||||
pat_util::pat_is_variant_or_struct(self.bccx.tcx.def_map, pat)
|
||||
}
|
||||
|
||||
fn pat_is_binding(&self, pat: @ast::pat) -> bool {
|
||||
fn pat_is_binding(@mut self, pat: @ast::pat) -> bool {
|
||||
pat_util::pat_is_binding(self.bccx.tcx.def_map, pat)
|
||||
}
|
||||
}
|
||||
|
@ -643,8 +650,8 @@ impl gather_loan_ctxt {
|
|||
// Setting up info that preserve needs.
|
||||
// This is just the most convenient place to do it.
|
||||
fn add_stmt_to_map(stmt: @ast::stmt,
|
||||
&&self: gather_loan_ctxt,
|
||||
vt: visit::vt<gather_loan_ctxt>) {
|
||||
&&self: @mut GatherLoanCtxt,
|
||||
vt: visit::vt<@mut GatherLoanCtxt>) {
|
||||
match stmt.node {
|
||||
ast::stmt_expr(_, id) | ast::stmt_semi(_, id) => {
|
||||
self.bccx.stmt_map.insert(id, ());
|
||||
|
@ -653,3 +660,4 @@ fn add_stmt_to_map(stmt: @ast::stmt,
|
|||
}
|
||||
visit::visit_stmt(stmt, self, vt);
|
||||
}
|
||||
|
||||
|
|
|
@ -62,18 +62,17 @@ impl BorrowckCtxt {
|
|||
cmt: cmt,
|
||||
scope_region: ty::Region,
|
||||
mutbl: ast::mutability) -> bckres<~[Loan]> {
|
||||
let lc = LoanContext {
|
||||
let mut lc = LoanContext {
|
||||
bccx: self,
|
||||
scope_region: scope_region,
|
||||
loans: ~[]
|
||||
};
|
||||
match lc.loan(cmt, mutbl, true) {
|
||||
Err(ref e) => Err((*e)),
|
||||
Ok(()) => {
|
||||
let LoanContext {loans, _} = move lc;
|
||||
Ok(loans)
|
||||
}
|
||||
Err(ref e) => return Err((*e)),
|
||||
Ok(()) => {}
|
||||
}
|
||||
// XXX: Workaround for borrow check bug.
|
||||
Ok(copy lc.loans)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -84,17 +83,16 @@ struct LoanContext {
|
|||
scope_region: ty::Region,
|
||||
|
||||
// accumulated list of loans that will be required
|
||||
mut loans: ~[Loan]
|
||||
loans: ~[Loan]
|
||||
}
|
||||
|
||||
impl LoanContext {
|
||||
fn tcx(&self) -> ty::ctxt { self.bccx.tcx }
|
||||
fn tcx(&mut self) -> ty::ctxt { self.bccx.tcx }
|
||||
|
||||
fn loan(&self,
|
||||
fn loan(&mut self,
|
||||
cmt: cmt,
|
||||
req_mutbl: ast::mutability,
|
||||
owns_lent_data: bool) -> bckres<()>
|
||||
{
|
||||
owns_lent_data: bool) -> bckres<()> {
|
||||
/*!
|
||||
*
|
||||
* The main routine.
|
||||
|
@ -198,7 +196,7 @@ impl LoanContext {
|
|||
// A "stable component" is one where assigning the base of the
|
||||
// component cannot cause the component itself to change types.
|
||||
// Example: record fields.
|
||||
fn loan_stable_comp(&self,
|
||||
fn loan_stable_comp(&mut self,
|
||||
cmt: cmt,
|
||||
cmt_base: cmt,
|
||||
req_mutbl: ast::mutability,
|
||||
|
@ -268,12 +266,11 @@ impl LoanContext {
|
|||
// An "unstable deref" means a deref of a ptr/comp where, if the
|
||||
// base of the deref is assigned to, pointers into the result of the
|
||||
// deref would be invalidated. Examples: interior of variants, uniques.
|
||||
fn loan_unstable_deref(&self,
|
||||
fn loan_unstable_deref(&mut self,
|
||||
cmt: cmt,
|
||||
cmt_base: cmt,
|
||||
req_mutbl: ast::mutability,
|
||||
owns_lent_data: bool) -> bckres<()>
|
||||
{
|
||||
owns_lent_data: bool) -> bckres<()> {
|
||||
// Variant components: the base must be immutable, because
|
||||
// if it is overwritten, the types of the embedded data
|
||||
// could change.
|
||||
|
@ -284,12 +281,11 @@ impl LoanContext {
|
|||
}
|
||||
}
|
||||
|
||||
fn issue_loan(&self,
|
||||
fn issue_loan(&mut self,
|
||||
cmt: cmt,
|
||||
scope_ub: ty::Region,
|
||||
req_mutbl: ast::mutability,
|
||||
owns_lent_data: bool) -> bckres<()>
|
||||
{
|
||||
owns_lent_data: bool) -> bckres<()> {
|
||||
// Subtle: the `scope_ub` is the maximal lifetime of `cmt`.
|
||||
// Therefore, if `cmt` owns the data being lent, then the
|
||||
// scope of the loan must be less than `scope_ub`, or else the
|
||||
|
@ -301,8 +297,8 @@ impl LoanContext {
|
|||
// reborrowed.
|
||||
|
||||
if !owns_lent_data ||
|
||||
self.bccx.is_subregion_of(self.scope_region, scope_ub)
|
||||
{
|
||||
self.bccx.is_subregion_of(/*bad*/copy self.scope_region,
|
||||
scope_ub) {
|
||||
match req_mutbl {
|
||||
m_mutbl => {
|
||||
// We do not allow non-mutable data to be loaned
|
||||
|
@ -340,3 +336,4 @@ impl LoanContext {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -262,19 +262,23 @@ pub fn check_crate(
|
|||
capture_map: moves::CaptureMap,
|
||||
crate: @ast::crate) -> (root_map, mutbl_map, write_guard_map)
|
||||
{
|
||||
let bccx = @BorrowckCtxt {tcx: tcx,
|
||||
method_map: method_map,
|
||||
moves_map: moves_map,
|
||||
capture_map: capture_map,
|
||||
root_map: root_map(),
|
||||
mutbl_map: HashMap(),
|
||||
write_guard_map: HashMap(),
|
||||
stmt_map: HashMap(),
|
||||
mut loaned_paths_same: 0,
|
||||
mut loaned_paths_imm: 0,
|
||||
mut stable_paths: 0,
|
||||
mut req_pure_paths: 0,
|
||||
mut guaranteed_paths: 0};
|
||||
let bccx = @BorrowckCtxt {
|
||||
tcx: tcx,
|
||||
method_map: method_map,
|
||||
moves_map: moves_map,
|
||||
capture_map: capture_map,
|
||||
root_map: root_map(),
|
||||
mutbl_map: HashMap(),
|
||||
write_guard_map: HashMap(),
|
||||
stmt_map: HashMap(),
|
||||
stats: @mut BorrowStats {
|
||||
loaned_paths_same: 0,
|
||||
loaned_paths_imm: 0,
|
||||
stable_paths: 0,
|
||||
req_pure_paths: 0,
|
||||
guaranteed_paths: 0,
|
||||
}
|
||||
};
|
||||
|
||||
let req_maps = gather_loans::gather_loans(bccx, crate);
|
||||
check_loans::check_loans(bccx, req_maps, crate);
|
||||
|
@ -282,22 +286,22 @@ pub fn check_crate(
|
|||
if tcx.sess.borrowck_stats() {
|
||||
io::println(~"--- borrowck stats ---");
|
||||
io::println(fmt!("paths requiring guarantees: %u",
|
||||
bccx.guaranteed_paths));
|
||||
bccx.stats.guaranteed_paths));
|
||||
io::println(fmt!("paths requiring loans : %s",
|
||||
make_stat(bccx, bccx.loaned_paths_same)));
|
||||
make_stat(bccx, bccx.stats.loaned_paths_same)));
|
||||
io::println(fmt!("paths requiring imm loans : %s",
|
||||
make_stat(bccx, bccx.loaned_paths_imm)));
|
||||
make_stat(bccx, bccx.stats.loaned_paths_imm)));
|
||||
io::println(fmt!("stable paths : %s",
|
||||
make_stat(bccx, bccx.stable_paths)));
|
||||
make_stat(bccx, bccx.stats.stable_paths)));
|
||||
io::println(fmt!("paths requiring purity : %s",
|
||||
make_stat(bccx, bccx.req_pure_paths)));
|
||||
make_stat(bccx, bccx.stats.req_pure_paths)));
|
||||
}
|
||||
|
||||
return (bccx.root_map, bccx.mutbl_map, bccx.write_guard_map);
|
||||
|
||||
fn make_stat(bccx: &BorrowckCtxt, stat: uint) -> ~str {
|
||||
let stat_f = stat as float;
|
||||
let total = bccx.guaranteed_paths as float;
|
||||
let total = bccx.stats.guaranteed_paths as float;
|
||||
fmt!("%u (%.0f%%)", stat , stat_f * 100f / total)
|
||||
}
|
||||
}
|
||||
|
@ -316,11 +320,15 @@ pub struct BorrowckCtxt {
|
|||
stmt_map: stmt_set,
|
||||
|
||||
// Statistics:
|
||||
mut loaned_paths_same: uint,
|
||||
mut loaned_paths_imm: uint,
|
||||
mut stable_paths: uint,
|
||||
mut req_pure_paths: uint,
|
||||
mut guaranteed_paths: uint
|
||||
stats: @mut BorrowStats
|
||||
}
|
||||
|
||||
pub struct BorrowStats {
|
||||
loaned_paths_same: uint,
|
||||
loaned_paths_imm: uint,
|
||||
stable_paths: uint,
|
||||
req_pure_paths: uint,
|
||||
guaranteed_paths: uint
|
||||
}
|
||||
|
||||
pub struct RootInfo {
|
||||
|
@ -397,7 +405,15 @@ pub type req_maps = {
|
|||
};
|
||||
|
||||
pub fn save_and_restore<T:Copy,U>(save_and_restore_t: &mut T,
|
||||
f: fn() -> U) -> U {
|
||||
f: &fn() -> U) -> U {
|
||||
let old_save_and_restore_t = *save_and_restore_t;
|
||||
let u = f();
|
||||
*save_and_restore_t = old_save_and_restore_t;
|
||||
move u
|
||||
}
|
||||
|
||||
pub fn save_and_restore_managed<T:Copy,U>(save_and_restore_t: @mut T,
|
||||
f: &fn() -> U) -> U {
|
||||
let old_save_and_restore_t = *save_and_restore_t;
|
||||
let u = f();
|
||||
*save_and_restore_t = old_save_and_restore_t;
|
||||
|
|
|
@ -66,10 +66,10 @@
|
|||
* methods. It effectively does a reverse walk of the AST; whenever we
|
||||
* reach a loop node, we iterate until a fixed point is reached.
|
||||
*
|
||||
* ## The `users` struct
|
||||
* ## The `Users` struct
|
||||
*
|
||||
* At each live node `N`, we track three pieces of information for each
|
||||
* variable `V` (these are encapsulated in the `users` struct):
|
||||
* variable `V` (these are encapsulated in the `Users` struct):
|
||||
*
|
||||
* - `reader`: the `LiveNode` ID of some node which will read the value
|
||||
* that `V` holds on entry to `N`. Formally: a node `M` such
|
||||
|
@ -214,8 +214,11 @@ pub fn check_crate(tcx: ty::ctxt,
|
|||
});
|
||||
|
||||
let last_use_map = HashMap();
|
||||
let initial_maps = @IrMaps(tcx, method_map, variable_moves_map,
|
||||
capture_map, last_use_map);
|
||||
let initial_maps = @mut IrMaps(tcx,
|
||||
method_map,
|
||||
variable_moves_map,
|
||||
capture_map,
|
||||
last_use_map);
|
||||
visit::visit_crate(*crate, initial_maps, visitor);
|
||||
tcx.sess.abort_if_errors();
|
||||
return last_use_map;
|
||||
|
@ -300,20 +303,21 @@ struct IrMaps {
|
|||
capture_map: moves::CaptureMap,
|
||||
last_use_map: last_use_map,
|
||||
|
||||
mut num_live_nodes: uint,
|
||||
mut num_vars: uint,
|
||||
num_live_nodes: uint,
|
||||
num_vars: uint,
|
||||
live_node_map: HashMap<node_id, LiveNode>,
|
||||
variable_map: HashMap<node_id, Variable>,
|
||||
capture_info_map: HashMap<node_id, @~[CaptureInfo]>,
|
||||
mut var_kinds: ~[VarKind],
|
||||
mut lnks: ~[LiveNodeKind],
|
||||
var_kinds: ~[VarKind],
|
||||
lnks: ~[LiveNodeKind],
|
||||
}
|
||||
|
||||
fn IrMaps(tcx: ty::ctxt,
|
||||
method_map: typeck::method_map,
|
||||
variable_moves_map: moves::VariableMovesMap,
|
||||
capture_map: moves::CaptureMap,
|
||||
last_use_map: last_use_map) -> IrMaps {
|
||||
last_use_map: last_use_map)
|
||||
-> IrMaps {
|
||||
IrMaps {
|
||||
tcx: tcx,
|
||||
method_map: method_map,
|
||||
|
@ -331,7 +335,7 @@ fn IrMaps(tcx: ty::ctxt,
|
|||
}
|
||||
|
||||
impl IrMaps {
|
||||
fn add_live_node(lnk: LiveNodeKind) -> LiveNode {
|
||||
fn add_live_node(&mut self, lnk: LiveNodeKind) -> LiveNode {
|
||||
let ln = LiveNode(self.num_live_nodes);
|
||||
self.lnks.push(lnk);
|
||||
self.num_live_nodes += 1;
|
||||
|
@ -342,14 +346,16 @@ impl IrMaps {
|
|||
ln
|
||||
}
|
||||
|
||||
fn add_live_node_for_node(node_id: node_id, lnk: LiveNodeKind) {
|
||||
fn add_live_node_for_node(&mut self,
|
||||
node_id: node_id,
|
||||
lnk: LiveNodeKind) {
|
||||
let ln = self.add_live_node(lnk);
|
||||
self.live_node_map.insert(node_id, ln);
|
||||
|
||||
debug!("%s is node %d", ln.to_str(), node_id);
|
||||
}
|
||||
|
||||
fn add_variable(vk: VarKind) -> Variable {
|
||||
fn add_variable(&mut self, vk: VarKind) -> Variable {
|
||||
let v = Variable(self.num_vars);
|
||||
self.var_kinds.push(vk);
|
||||
self.num_vars += 1;
|
||||
|
@ -367,7 +373,7 @@ impl IrMaps {
|
|||
v
|
||||
}
|
||||
|
||||
fn variable(node_id: node_id, span: span) -> Variable {
|
||||
fn variable(&mut self, node_id: node_id, span: span) -> Variable {
|
||||
match self.variable_map.find(&node_id) {
|
||||
Some(var) => var,
|
||||
None => {
|
||||
|
@ -377,7 +383,7 @@ impl IrMaps {
|
|||
}
|
||||
}
|
||||
|
||||
fn variable_name(var: Variable) -> ~str {
|
||||
fn variable_name(&mut self, var: Variable) -> ~str {
|
||||
match copy self.var_kinds[*var] {
|
||||
Local(LocalInfo {ident: nm, _}) |
|
||||
Arg(_, nm, _) => self.tcx.sess.str_of(nm),
|
||||
|
@ -385,11 +391,11 @@ impl IrMaps {
|
|||
}
|
||||
}
|
||||
|
||||
fn set_captures(node_id: node_id, +cs: ~[CaptureInfo]) {
|
||||
fn set_captures(&mut self, node_id: node_id, +cs: ~[CaptureInfo]) {
|
||||
self.capture_info_map.insert(node_id, @cs);
|
||||
}
|
||||
|
||||
fn captures(expr: @expr) -> @~[CaptureInfo] {
|
||||
fn captures(&mut self, expr: @expr) -> @~[CaptureInfo] {
|
||||
match self.capture_info_map.find(&expr.id) {
|
||||
Some(caps) => caps,
|
||||
None => {
|
||||
|
@ -398,11 +404,11 @@ impl IrMaps {
|
|||
}
|
||||
}
|
||||
|
||||
fn lnk(ln: LiveNode) -> LiveNodeKind {
|
||||
fn lnk(&mut self, ln: LiveNode) -> LiveNodeKind {
|
||||
self.lnks[*ln]
|
||||
}
|
||||
|
||||
fn add_last_use(expr_id: node_id, var: Variable) {
|
||||
fn add_last_use(&mut self, expr_id: node_id, var: Variable) {
|
||||
let vk = self.var_kinds[*var];
|
||||
debug!("Node %d is a last use of variable %?", expr_id, vk);
|
||||
match vk {
|
||||
|
@ -429,17 +435,22 @@ impl IrMaps {
|
|||
}
|
||||
}
|
||||
|
||||
fn visit_fn(fk: visit::fn_kind, decl: fn_decl, body: blk,
|
||||
sp: span, id: node_id, &&self: @IrMaps, v: vt<@IrMaps>) {
|
||||
fn visit_fn(fk: visit::fn_kind,
|
||||
decl: fn_decl,
|
||||
body: blk,
|
||||
sp: span,
|
||||
id: node_id,
|
||||
&&self: @mut IrMaps,
|
||||
v: vt<@mut IrMaps>) {
|
||||
debug!("visit_fn: id=%d", id);
|
||||
let _i = ::util::common::indenter();
|
||||
|
||||
// swap in a new set of IR maps for this function body:
|
||||
let fn_maps = @IrMaps(self.tcx,
|
||||
self.method_map,
|
||||
self.variable_moves_map,
|
||||
self.capture_map,
|
||||
self.last_use_map);
|
||||
let fn_maps = @mut IrMaps(self.tcx,
|
||||
self.method_map,
|
||||
self.variable_moves_map,
|
||||
self.capture_map,
|
||||
self.last_use_map);
|
||||
|
||||
debug!("creating fn_maps: %x", ptr::addr_of(&(*fn_maps)) as uint);
|
||||
|
||||
|
@ -449,7 +460,7 @@ fn visit_fn(fk: visit::fn_kind, decl: fn_decl, body: blk,
|
|||
|_bm, arg_id, _x, path| {
|
||||
debug!("adding argument %d", arg_id);
|
||||
let ident = ast_util::path_to_ident(path);
|
||||
(*fn_maps).add_variable(Arg(arg_id, ident, mode));
|
||||
fn_maps.add_variable(Arg(arg_id, ident, mode));
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -486,10 +497,10 @@ fn visit_fn(fk: visit::fn_kind, decl: fn_decl, body: blk,
|
|||
// - exit_ln represents the end of the fn, either by return or fail
|
||||
// - implicit_ret_var is a pseudo-variable that represents
|
||||
// an implicit return
|
||||
let specials = {
|
||||
exit_ln: (*fn_maps).add_live_node(ExitNode),
|
||||
fallthrough_ln: (*fn_maps).add_live_node(ExitNode),
|
||||
no_ret_var: (*fn_maps).add_variable(ImplicitRet)
|
||||
let specials = Specials {
|
||||
exit_ln: fn_maps.add_live_node(ExitNode),
|
||||
fallthrough_ln: fn_maps.add_live_node(ExitNode),
|
||||
no_ret_var: fn_maps.add_variable(ImplicitRet)
|
||||
};
|
||||
|
||||
// compute liveness
|
||||
|
@ -509,7 +520,7 @@ fn visit_fn(fk: visit::fn_kind, decl: fn_decl, body: blk,
|
|||
lsets.warn_about_unused_args(decl, entry_ln);
|
||||
}
|
||||
|
||||
fn visit_local(local: @local, &&self: @IrMaps, vt: vt<@IrMaps>) {
|
||||
fn visit_local(local: @local, &&self: @mut IrMaps, vt: vt<@mut IrMaps>) {
|
||||
let def_map = self.tcx.def_map;
|
||||
do pat_util::pat_bindings(def_map, local.node.pat) |_bm, p_id, sp, path| {
|
||||
debug!("adding local variable %d", p_id);
|
||||
|
@ -529,7 +540,7 @@ fn visit_local(local: @local, &&self: @IrMaps, vt: vt<@IrMaps>) {
|
|||
visit::visit_local(local, self, vt);
|
||||
}
|
||||
|
||||
fn visit_arm(arm: arm, &&self: @IrMaps, vt: vt<@IrMaps>) {
|
||||
fn visit_arm(arm: arm, &&self: @mut IrMaps, vt: vt<@mut IrMaps>) {
|
||||
let def_map = self.tcx.def_map;
|
||||
for arm.pats.each |pat| {
|
||||
do pat_util::pat_bindings(def_map, *pat) |bm, p_id, sp, path| {
|
||||
|
@ -548,7 +559,7 @@ fn visit_arm(arm: arm, &&self: @IrMaps, vt: vt<@IrMaps>) {
|
|||
visit::visit_arm(arm, self, vt);
|
||||
}
|
||||
|
||||
fn visit_expr(expr: @expr, &&self: @IrMaps, vt: vt<@IrMaps>) {
|
||||
fn visit_expr(expr: @expr, &&self: @mut IrMaps, vt: vt<@mut IrMaps>) {
|
||||
match expr.node {
|
||||
// live nodes required for uses or definitions of variables:
|
||||
expr_path(_) => {
|
||||
|
@ -626,21 +637,25 @@ fn visit_expr(expr: @expr, &&self: @IrMaps, vt: vt<@IrMaps>) {
|
|||
// Actually we compute just a bit more than just liveness, but we use
|
||||
// the same basic propagation framework in all cases.
|
||||
|
||||
type users = {
|
||||
struct Users {
|
||||
reader: LiveNode,
|
||||
writer: LiveNode,
|
||||
used: bool
|
||||
};
|
||||
|
||||
fn invalid_users() -> users {
|
||||
{reader: invalid_node(), writer: invalid_node(), used: false}
|
||||
}
|
||||
|
||||
type Specials = {
|
||||
fn invalid_users() -> Users {
|
||||
Users {
|
||||
reader: invalid_node(),
|
||||
writer: invalid_node(),
|
||||
used: false
|
||||
}
|
||||
}
|
||||
|
||||
struct Specials {
|
||||
exit_ln: LiveNode,
|
||||
fallthrough_ln: LiveNode,
|
||||
no_ret_var: Variable
|
||||
};
|
||||
}
|
||||
|
||||
const ACC_READ: uint = 1u;
|
||||
const ACC_WRITE: uint = 2u;
|
||||
|
@ -650,10 +665,10 @@ type LiveNodeMap = HashMap<node_id, LiveNode>;
|
|||
|
||||
struct Liveness {
|
||||
tcx: ty::ctxt,
|
||||
ir: @IrMaps,
|
||||
ir: @mut IrMaps,
|
||||
s: Specials,
|
||||
successors: ~[mut LiveNode],
|
||||
users: ~[mut users],
|
||||
users: ~[mut Users],
|
||||
// The list of node IDs for the nested loop scopes
|
||||
// we're in.
|
||||
loop_scope: DVec<node_id>,
|
||||
|
@ -664,7 +679,7 @@ struct Liveness {
|
|||
cont_ln: LiveNodeMap
|
||||
}
|
||||
|
||||
fn Liveness(ir: @IrMaps, specials: Specials) -> Liveness {
|
||||
fn Liveness(ir: @mut IrMaps, specials: Specials) -> Liveness {
|
||||
Liveness {
|
||||
ir: ir,
|
||||
tcx: ir.tcx,
|
||||
|
|
|
@ -408,28 +408,24 @@ pub struct region_dep {
|
|||
|
||||
pub type dep_map = HashMap<ast::node_id, @DVec<region_dep>>;
|
||||
|
||||
pub type determine_rp_ctxt_ = {
|
||||
pub struct DetermineRpCtxt {
|
||||
sess: Session,
|
||||
ast_map: ast_map::map,
|
||||
def_map: resolve::DefMap,
|
||||
region_paramd_items: region_paramd_items,
|
||||
dep_map: dep_map,
|
||||
worklist: DVec<ast::node_id>,
|
||||
worklist: ~[ast::node_id],
|
||||
|
||||
// the innermost enclosing item id
|
||||
mut item_id: ast::node_id,
|
||||
item_id: ast::node_id,
|
||||
|
||||
// true when we are within an item but not within a method.
|
||||
// see long discussion on region_is_relevant()
|
||||
mut anon_implies_rp: bool,
|
||||
anon_implies_rp: bool,
|
||||
|
||||
// encodes the context of the current type; invariant if
|
||||
// mutable, covariant otherwise
|
||||
mut ambient_variance: region_variance,
|
||||
};
|
||||
|
||||
pub enum determine_rp_ctxt {
|
||||
determine_rp_ctxt_(@determine_rp_ctxt_)
|
||||
ambient_variance: region_variance,
|
||||
}
|
||||
|
||||
pub fn join_variance(++variance1: region_variance,
|
||||
|
@ -465,15 +461,15 @@ pub fn add_variance(+ambient_variance: region_variance,
|
|||
}
|
||||
}
|
||||
|
||||
pub impl determine_rp_ctxt {
|
||||
fn add_variance(variance: region_variance) -> region_variance {
|
||||
pub impl DetermineRpCtxt {
|
||||
fn add_variance(@mut self, variance: region_variance) -> region_variance {
|
||||
add_variance(self.ambient_variance, variance)
|
||||
}
|
||||
|
||||
/// Records that item `id` is region-parameterized with the
|
||||
/// variance `variance`. If `id` was already parameterized, then
|
||||
/// the new variance is joined with the old variance.
|
||||
fn add_rp(id: ast::node_id, variance: region_variance) {
|
||||
fn add_rp(@mut self, id: ast::node_id, variance: region_variance) {
|
||||
assert id != 0;
|
||||
let old_variance = self.region_paramd_items.find(&id);
|
||||
let joined_variance = match old_variance {
|
||||
|
@ -487,7 +483,8 @@ pub impl determine_rp_ctxt {
|
|||
joined_variance, old_variance, variance);
|
||||
|
||||
if Some(joined_variance) != old_variance {
|
||||
self.region_paramd_items.insert(id, joined_variance);
|
||||
let region_paramd_items = self.region_paramd_items;
|
||||
region_paramd_items.insert(id, joined_variance);
|
||||
self.worklist.push(id);
|
||||
}
|
||||
}
|
||||
|
@ -497,7 +494,7 @@ pub impl determine_rp_ctxt {
|
|||
/// `from`. Put another way, it indicates that the current item
|
||||
/// contains a value of type `from`, so if `from` is
|
||||
/// region-parameterized, so is the current item.
|
||||
fn add_dep(from: ast::node_id) {
|
||||
fn add_dep(@mut self, from: ast::node_id) {
|
||||
debug!("add dependency from %d -> %d (%s -> %s) with variance %?",
|
||||
from, self.item_id,
|
||||
ast_map::node_id_to_str(self.ast_map, from,
|
||||
|
@ -509,7 +506,8 @@ pub impl determine_rp_ctxt {
|
|||
Some(vec) => vec,
|
||||
None => {
|
||||
let vec = @DVec();
|
||||
self.dep_map.insert(from, vec);
|
||||
let dep_map = self.dep_map;
|
||||
dep_map.insert(from, vec);
|
||||
vec
|
||||
}
|
||||
};
|
||||
|
@ -552,7 +550,7 @@ pub impl determine_rp_ctxt {
|
|||
// case it is bound. We handle this by setting a flag
|
||||
// (anon_implies_rp) to true when we enter an item and setting
|
||||
// that flag to false when we enter a method.
|
||||
fn region_is_relevant(r: @ast::region) -> bool {
|
||||
fn region_is_relevant(@mut self, r: @ast::region) -> bool {
|
||||
match r.node {
|
||||
ast::re_static => false,
|
||||
ast::re_anon => self.anon_implies_rp,
|
||||
|
@ -567,7 +565,9 @@ pub impl determine_rp_ctxt {
|
|||
//
|
||||
// If the region is explicitly specified, then we follows the
|
||||
// normal rules.
|
||||
fn opt_region_is_relevant(opt_r: Option<@ast::region>) -> bool {
|
||||
fn opt_region_is_relevant(@mut self,
|
||||
opt_r: Option<@ast::region>)
|
||||
-> bool {
|
||||
debug!("opt_region_is_relevant: %? (anon_implies_rp=%b)",
|
||||
opt_r, self.anon_implies_rp);
|
||||
match opt_r {
|
||||
|
@ -576,9 +576,10 @@ pub impl determine_rp_ctxt {
|
|||
}
|
||||
}
|
||||
|
||||
fn with(item_id: ast::node_id,
|
||||
fn with(@mut self,
|
||||
item_id: ast::node_id,
|
||||
anon_implies_rp: bool,
|
||||
f: fn()) {
|
||||
f: &fn()) {
|
||||
let old_item_id = self.item_id;
|
||||
let old_anon_implies_rp = self.anon_implies_rp;
|
||||
self.item_id = item_id;
|
||||
|
@ -590,7 +591,7 @@ pub impl determine_rp_ctxt {
|
|||
self.anon_implies_rp = old_anon_implies_rp;
|
||||
}
|
||||
|
||||
fn with_ambient_variance(variance: region_variance, f: fn()) {
|
||||
fn with_ambient_variance(@mut self, variance: region_variance, f: &fn()) {
|
||||
let old_ambient_variance = self.ambient_variance;
|
||||
self.ambient_variance = self.add_variance(variance);
|
||||
f();
|
||||
|
@ -599,8 +600,8 @@ pub impl determine_rp_ctxt {
|
|||
}
|
||||
|
||||
pub fn determine_rp_in_item(item: @ast::item,
|
||||
&&cx: determine_rp_ctxt,
|
||||
visitor: visit::vt<determine_rp_ctxt>) {
|
||||
&&cx: @mut DetermineRpCtxt,
|
||||
visitor: visit::vt<@mut DetermineRpCtxt>) {
|
||||
do cx.with(item.id, true) {
|
||||
visit::visit_item(item, cx, visitor);
|
||||
}
|
||||
|
@ -609,10 +610,10 @@ pub fn determine_rp_in_item(item: @ast::item,
|
|||
pub fn determine_rp_in_fn(fk: visit::fn_kind,
|
||||
decl: ast::fn_decl,
|
||||
body: ast::blk,
|
||||
_sp: span,
|
||||
_id: ast::node_id,
|
||||
&&cx: determine_rp_ctxt,
|
||||
visitor: visit::vt<determine_rp_ctxt>) {
|
||||
_: span,
|
||||
_: ast::node_id,
|
||||
&&cx: @mut DetermineRpCtxt,
|
||||
visitor: visit::vt<@mut DetermineRpCtxt>) {
|
||||
do cx.with(cx.item_id, false) {
|
||||
do cx.with_ambient_variance(rv_contravariant) {
|
||||
for decl.inputs.each |a| {
|
||||
|
@ -626,16 +627,16 @@ pub fn determine_rp_in_fn(fk: visit::fn_kind,
|
|||
}
|
||||
|
||||
pub fn determine_rp_in_ty_method(ty_m: ast::ty_method,
|
||||
&&cx: determine_rp_ctxt,
|
||||
visitor: visit::vt<determine_rp_ctxt>) {
|
||||
&&cx: @mut DetermineRpCtxt,
|
||||
visitor: visit::vt<@mut DetermineRpCtxt>) {
|
||||
do cx.with(cx.item_id, false) {
|
||||
visit::visit_ty_method(ty_m, cx, visitor);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn determine_rp_in_ty(ty: @ast::Ty,
|
||||
&&cx: determine_rp_ctxt,
|
||||
visitor: visit::vt<determine_rp_ctxt>) {
|
||||
&&cx: @mut DetermineRpCtxt,
|
||||
visitor: visit::vt<@mut DetermineRpCtxt>) {
|
||||
// we are only interested in types that will require an item to
|
||||
// be region-parameterized. if cx.item_id is zero, then this type
|
||||
// is not a member of a type defn nor is it a constitutent of an
|
||||
|
@ -647,10 +648,11 @@ pub fn determine_rp_in_ty(ty: @ast::Ty,
|
|||
// respect to &r, because &r/ty can be used whereever a *smaller*
|
||||
// region is expected (and hence is a supertype of those
|
||||
// locations)
|
||||
let sess = cx.sess;
|
||||
match ty.node {
|
||||
ast::ty_rptr(r, _) => {
|
||||
debug!("referenced rptr type %s",
|
||||
pprust::ty_to_str(ty, cx.sess.intr()));
|
||||
pprust::ty_to_str(ty, sess.intr()));
|
||||
|
||||
if cx.region_is_relevant(r) {
|
||||
cx.add_rp(cx.item_id, cx.add_variance(rv_contravariant))
|
||||
|
@ -659,7 +661,7 @@ pub fn determine_rp_in_ty(ty: @ast::Ty,
|
|||
|
||||
ast::ty_closure(ref f) => {
|
||||
debug!("referenced fn type: %s",
|
||||
pprust::ty_to_str(ty, cx.sess.intr()));
|
||||
pprust::ty_to_str(ty, sess.intr()));
|
||||
match f.region {
|
||||
Some(r) => {
|
||||
if cx.region_is_relevant(r) {
|
||||
|
@ -692,12 +694,12 @@ pub fn determine_rp_in_ty(ty: @ast::Ty,
|
|||
cx.add_dep(did.node);
|
||||
}
|
||||
} else {
|
||||
let cstore = cx.sess.cstore;
|
||||
let cstore = sess.cstore;
|
||||
match csearch::get_region_param(cstore, did) {
|
||||
None => {}
|
||||
Some(variance) => {
|
||||
debug!("reference to external, rp'd type %s",
|
||||
pprust::ty_to_str(ty, cx.sess.intr()));
|
||||
pprust::ty_to_str(ty, sess.intr()));
|
||||
if cx.opt_region_is_relevant(path.rp) {
|
||||
cx.add_rp(cx.item_id, cx.add_variance(variance))
|
||||
}
|
||||
|
@ -752,8 +754,9 @@ pub fn determine_rp_in_ty(ty: @ast::Ty,
|
|||
}
|
||||
}
|
||||
|
||||
fn visit_mt(mt: ast::mt, &&cx: determine_rp_ctxt,
|
||||
visitor: visit::vt<determine_rp_ctxt>) {
|
||||
fn visit_mt(mt: ast::mt,
|
||||
&&cx: @mut DetermineRpCtxt,
|
||||
visitor: visit::vt<@mut DetermineRpCtxt>) {
|
||||
// mutability is invariant
|
||||
if mt.mutbl == ast::m_mutbl {
|
||||
do cx.with_ambient_variance(rv_invariant) {
|
||||
|
@ -765,9 +768,10 @@ pub fn determine_rp_in_ty(ty: @ast::Ty,
|
|||
}
|
||||
}
|
||||
|
||||
pub fn determine_rp_in_struct_field(cm: @ast::struct_field,
|
||||
&&cx: determine_rp_ctxt,
|
||||
visitor: visit::vt<determine_rp_ctxt>) {
|
||||
pub fn determine_rp_in_struct_field(
|
||||
cm: @ast::struct_field,
|
||||
&&cx: @mut DetermineRpCtxt,
|
||||
visitor: visit::vt<@mut DetermineRpCtxt>) {
|
||||
match cm.node.kind {
|
||||
ast::named_field(_, ast::struct_mutable, _) => {
|
||||
do cx.with_ambient_variance(rv_invariant) {
|
||||
|
@ -786,15 +790,17 @@ pub fn determine_rp_in_crate(sess: Session,
|
|||
def_map: resolve::DefMap,
|
||||
crate: @ast::crate)
|
||||
-> region_paramd_items {
|
||||
let cx = determine_rp_ctxt_(@{sess: sess,
|
||||
ast_map: ast_map,
|
||||
def_map: def_map,
|
||||
region_paramd_items: HashMap(),
|
||||
dep_map: HashMap(),
|
||||
worklist: DVec(),
|
||||
mut item_id: 0,
|
||||
mut anon_implies_rp: false,
|
||||
mut ambient_variance: rv_covariant});
|
||||
let cx = @mut DetermineRpCtxt {
|
||||
sess: sess,
|
||||
ast_map: ast_map,
|
||||
def_map: def_map,
|
||||
region_paramd_items: HashMap(),
|
||||
dep_map: HashMap(),
|
||||
worklist: ~[],
|
||||
item_id: 0,
|
||||
anon_implies_rp: false,
|
||||
ambient_variance: rv_covariant
|
||||
};
|
||||
|
||||
// Gather up the base set, worklist and dep_map
|
||||
let visitor = visit::mk_vt(@visit::Visitor {
|
||||
|
@ -833,7 +839,8 @@ pub fn determine_rp_in_crate(sess: Session,
|
|||
|
||||
debug!("%s", {
|
||||
debug!("Region variance results:");
|
||||
for cx.region_paramd_items.each_ref |&key, &value| {
|
||||
let region_paramd_items = cx.region_paramd_items;
|
||||
for region_paramd_items.each_ref |&key, &value| {
|
||||
debug!("item %? (%s) is parameterized with variance %?",
|
||||
key,
|
||||
ast_map::node_id_to_str(ast_map, key,
|
||||
|
|
|
@ -145,7 +145,7 @@ pub enum NamespaceResult {
|
|||
UnboundResult,
|
||||
/// Means that resolve has determined that the name is bound in the Module
|
||||
/// argument, and specified by the NameBindings argument.
|
||||
BoundResult(@Module, @NameBindings)
|
||||
BoundResult(@Module, @mut NameBindings)
|
||||
}
|
||||
|
||||
pub impl NamespaceResult {
|
||||
|
@ -364,10 +364,10 @@ pub fn ImportDirective(privacy: Privacy,
|
|||
/// The item that an import resolves to.
|
||||
pub struct Target {
|
||||
target_module: @Module,
|
||||
bindings: @NameBindings,
|
||||
bindings: @mut NameBindings,
|
||||
}
|
||||
|
||||
pub fn Target(target_module: @Module, bindings: @NameBindings) -> Target {
|
||||
pub fn Target(target_module: @Module, bindings: @mut NameBindings) -> Target {
|
||||
Target {
|
||||
target_module: target_module,
|
||||
bindings: bindings
|
||||
|
@ -385,18 +385,19 @@ pub struct ImportResolution {
|
|||
// zero, outside modules can count on the targets being correct. Before
|
||||
// then, all bets are off; future imports could override this name.
|
||||
|
||||
mut outstanding_references: uint,
|
||||
outstanding_references: uint,
|
||||
|
||||
/// The value that this `use` directive names, if there is one.
|
||||
mut value_target: Option<Target>,
|
||||
value_target: Option<Target>,
|
||||
/// The type that this `use` directive names, if there is one.
|
||||
mut type_target: Option<Target>,
|
||||
type_target: Option<Target>,
|
||||
|
||||
/// There exists one state per import statement
|
||||
state: @mut ImportState,
|
||||
}
|
||||
|
||||
pub fn ImportResolution(privacy: Privacy, span: span,
|
||||
pub fn ImportResolution(privacy: Privacy,
|
||||
+span: span,
|
||||
state: @mut ImportState) -> ImportResolution {
|
||||
ImportResolution {
|
||||
privacy: privacy,
|
||||
|
@ -447,7 +448,7 @@ pub struct Module {
|
|||
mut def_id: Option<def_id>,
|
||||
kind: ModuleKind,
|
||||
|
||||
children: HashMap<ident,@NameBindings>,
|
||||
children: HashMap<ident,@mut NameBindings>,
|
||||
imports: DVec<@ImportDirective>,
|
||||
|
||||
// The anonymous children of this node. Anonymous children are pseudo-
|
||||
|
@ -476,7 +477,7 @@ pub struct Module {
|
|||
exported_names: HashMap<ident,node_id>,
|
||||
|
||||
// The status of resolving each import in this module.
|
||||
import_resolutions: HashMap<ident,@ImportResolution>,
|
||||
import_resolutions: HashMap<ident,@mut ImportResolution>,
|
||||
|
||||
// The number of unresolved globs that this module exports.
|
||||
mut glob_count: uint,
|
||||
|
@ -521,9 +522,9 @@ pub fn unused_import_lint_level(session: Session) -> level {
|
|||
|
||||
// Records a possibly-private type definition.
|
||||
pub struct TypeNsDef {
|
||||
mut privacy: Privacy,
|
||||
mut module_def: Option<@Module>,
|
||||
mut type_def: Option<def>
|
||||
privacy: Privacy,
|
||||
module_def: Option<@Module>,
|
||||
type_def: Option<def>
|
||||
}
|
||||
|
||||
// Records a possibly-private value definition.
|
||||
|
@ -535,18 +536,19 @@ pub struct ValueNsDef {
|
|||
// Records the definitions (at most one for each namespace) that a name is
|
||||
// bound to.
|
||||
pub struct NameBindings {
|
||||
mut type_def: Option<TypeNsDef>, //< Meaning in type namespace.
|
||||
mut value_def: Option<ValueNsDef>, //< Meaning in value namespace.
|
||||
type_def: Option<TypeNsDef>, //< Meaning in type namespace.
|
||||
value_def: Option<ValueNsDef>, //< Meaning in value namespace.
|
||||
|
||||
// For error reporting
|
||||
// FIXME (#3783): Merge me into TypeNsDef and ValueNsDef.
|
||||
mut type_span: Option<span>,
|
||||
mut value_span: Option<span>,
|
||||
type_span: Option<span>,
|
||||
value_span: Option<span>,
|
||||
}
|
||||
|
||||
pub impl NameBindings {
|
||||
/// Creates a new module in this set of name bindings.
|
||||
fn define_module(privacy: Privacy,
|
||||
fn define_module(@mut self,
|
||||
privacy: Privacy,
|
||||
parent_link: ParentLink,
|
||||
def_id: Option<def_id>,
|
||||
kind: ModuleKind,
|
||||
|
@ -573,7 +575,7 @@ pub impl NameBindings {
|
|||
}
|
||||
|
||||
/// Records a type definition.
|
||||
fn define_type(privacy: Privacy, def: def, sp: span) {
|
||||
fn define_type(@mut self, privacy: Privacy, def: def, sp: span) {
|
||||
// Merges the type with the existing type def or creates a new one.
|
||||
match self.type_def {
|
||||
None => {
|
||||
|
@ -595,7 +597,7 @@ pub impl NameBindings {
|
|||
}
|
||||
|
||||
/// Records a value definition.
|
||||
fn define_value(privacy: Privacy, def: def, sp: span) {
|
||||
fn define_value(@mut self, privacy: Privacy, def: def, sp: span) {
|
||||
self.value_def = Some(ValueNsDef { privacy: privacy, def: def });
|
||||
self.value_span = Some(sp);
|
||||
}
|
||||
|
@ -612,7 +614,7 @@ pub impl NameBindings {
|
|||
* Returns the module node. Fails if this node does not have a module
|
||||
* definition.
|
||||
*/
|
||||
fn get_module() -> @Module {
|
||||
fn get_module(@mut self) -> @Module {
|
||||
match self.get_module_if_available() {
|
||||
None => {
|
||||
die!(~"get_module called on a node with no module \
|
||||
|
@ -750,15 +752,15 @@ pub fn Resolver(session: Session,
|
|||
lang_items: LanguageItems,
|
||||
crate: @crate)
|
||||
-> Resolver {
|
||||
let graph_root = @NameBindings();
|
||||
let graph_root = @mut NameBindings();
|
||||
|
||||
(*graph_root).define_module(Public,
|
||||
NoParentLink,
|
||||
Some(def_id { crate: 0, node: 0 }),
|
||||
NormalModuleKind,
|
||||
crate.span);
|
||||
graph_root.define_module(Public,
|
||||
NoParentLink,
|
||||
Some(def_id { crate: 0, node: 0 }),
|
||||
NormalModuleKind,
|
||||
crate.span);
|
||||
|
||||
let current_module = (*graph_root).get_module();
|
||||
let current_module = graph_root.get_module();
|
||||
|
||||
let self = Resolver {
|
||||
session: session,
|
||||
|
@ -814,7 +816,7 @@ pub struct Resolver {
|
|||
|
||||
intr: @ident_interner,
|
||||
|
||||
graph_root: @NameBindings,
|
||||
graph_root: @mut NameBindings,
|
||||
|
||||
unused_import_lint_level: level,
|
||||
|
||||
|
@ -894,7 +896,7 @@ pub impl Resolver {
|
|||
/// Constructs the reduced graph for the entire crate.
|
||||
fn build_reduced_graph(this: @Resolver) {
|
||||
let initial_parent =
|
||||
ModuleReducedGraphParent((*self.graph_root).get_module());
|
||||
ModuleReducedGraphParent(self.graph_root.get_module());
|
||||
visit_crate(*self.crate, initial_parent, mk_vt(@Visitor {
|
||||
visit_item: |item, context, visitor|
|
||||
(*this).build_reduced_graph_for_item(item, context, visitor),
|
||||
|
@ -943,7 +945,7 @@ pub impl Resolver {
|
|||
duplicate_checking_mode: DuplicateCheckingMode,
|
||||
// For printing errors
|
||||
sp: span)
|
||||
-> (@NameBindings, ReducedGraphParent) {
|
||||
-> (@mut NameBindings, ReducedGraphParent) {
|
||||
|
||||
// If this is the immediate descendant of a module, then we add the
|
||||
// child name directly. Otherwise, we create or reuse an anonymous
|
||||
|
@ -960,7 +962,7 @@ pub impl Resolver {
|
|||
let new_parent = ModuleReducedGraphParent(module_);
|
||||
match module_.children.find(&name) {
|
||||
None => {
|
||||
let child = @NameBindings();
|
||||
let child = @mut NameBindings();
|
||||
module_.children.insert(name, child);
|
||||
return (child, new_parent);
|
||||
}
|
||||
|
@ -1080,14 +1082,14 @@ pub impl Resolver {
|
|||
|
||||
let parent_link = self.get_parent_link(new_parent, ident);
|
||||
let def_id = def_id { crate: 0, node: item.id };
|
||||
(*name_bindings).define_module(privacy,
|
||||
parent_link,
|
||||
Some(def_id),
|
||||
NormalModuleKind,
|
||||
sp);
|
||||
name_bindings.define_module(privacy,
|
||||
parent_link,
|
||||
Some(def_id),
|
||||
NormalModuleKind,
|
||||
sp);
|
||||
|
||||
let new_parent =
|
||||
ModuleReducedGraphParent((*name_bindings).get_module());
|
||||
ModuleReducedGraphParent(name_bindings.get_module());
|
||||
|
||||
visit_mod(module_, sp, item.id, new_parent, visitor);
|
||||
}
|
||||
|
@ -1102,11 +1104,11 @@ pub impl Resolver {
|
|||
let parent_link = self.get_parent_link(new_parent,
|
||||
ident);
|
||||
let def_id = def_id { crate: 0, node: item.id };
|
||||
(*name_bindings).define_module(privacy,
|
||||
parent_link,
|
||||
Some(def_id),
|
||||
ExternModuleKind,
|
||||
sp);
|
||||
name_bindings.define_module(privacy,
|
||||
parent_link,
|
||||
Some(def_id),
|
||||
ExternModuleKind,
|
||||
sp);
|
||||
|
||||
ModuleReducedGraphParent(name_bindings.get_module())
|
||||
}
|
||||
|
@ -1124,7 +1126,7 @@ pub impl Resolver {
|
|||
let (name_bindings, _) =
|
||||
self.add_child(ident, parent, ForbidDuplicateValues, sp);
|
||||
|
||||
(*name_bindings).define_value
|
||||
name_bindings.define_value
|
||||
(privacy, def_const(local_def(item.id)), sp);
|
||||
}
|
||||
item_fn(_, purity, _, _) => {
|
||||
|
@ -1132,7 +1134,7 @@ pub impl Resolver {
|
|||
self.add_child(ident, parent, ForbidDuplicateValues, sp);
|
||||
|
||||
let def = def_fn(local_def(item.id), purity);
|
||||
(*name_bindings).define_value(privacy, def, sp);
|
||||
name_bindings.define_value(privacy, def, sp);
|
||||
visit_item(item, new_parent, visitor);
|
||||
}
|
||||
|
||||
|
@ -1141,7 +1143,7 @@ pub impl Resolver {
|
|||
let (name_bindings, _) =
|
||||
self.add_child(ident, parent, ForbidDuplicateTypes, sp);
|
||||
|
||||
(*name_bindings).define_type
|
||||
name_bindings.define_type
|
||||
(privacy, def_ty(local_def(item.id)), sp);
|
||||
}
|
||||
|
||||
|
@ -1149,7 +1151,7 @@ pub impl Resolver {
|
|||
let (name_bindings, new_parent) =
|
||||
self.add_child(ident, parent, ForbidDuplicateTypes, sp);
|
||||
|
||||
(*name_bindings).define_type
|
||||
name_bindings.define_type
|
||||
(privacy, def_ty(local_def(item.id)), sp);
|
||||
|
||||
for (*enum_definition).variants.each |variant| {
|
||||
|
@ -1329,10 +1331,7 @@ pub impl Resolver {
|
|||
let def_id = local_def(item.id);
|
||||
self.trait_info.insert(def_id, method_names);
|
||||
|
||||
(*name_bindings).define_type
|
||||
(privacy,
|
||||
def_ty(def_id),
|
||||
sp);
|
||||
name_bindings.define_type(privacy, def_ty(def_id), sp);
|
||||
visit_item(item, new_parent, visitor);
|
||||
}
|
||||
|
||||
|
@ -1363,22 +1362,22 @@ pub impl Resolver {
|
|||
|
||||
match variant.node.kind {
|
||||
tuple_variant_kind(_) => {
|
||||
(*child).define_value(privacy,
|
||||
def_variant(item_id,
|
||||
local_def(variant.node.id)),
|
||||
variant.span);
|
||||
child.define_value(privacy,
|
||||
def_variant(item_id,
|
||||
local_def(variant.node.id)),
|
||||
variant.span);
|
||||
}
|
||||
struct_variant_kind(_) => {
|
||||
(*child).define_type(privacy,
|
||||
def_variant(item_id,
|
||||
local_def(variant.node.id)),
|
||||
variant.span);
|
||||
child.define_type(privacy,
|
||||
def_variant(item_id,
|
||||
local_def(variant.node.id)),
|
||||
variant.span);
|
||||
self.structs.insert(local_def(variant.node.id), ());
|
||||
}
|
||||
enum_variant_kind(ref enum_definition) => {
|
||||
(*child).define_type(privacy,
|
||||
def_ty(local_def(variant.node.id)),
|
||||
variant.span);
|
||||
child.define_type(privacy,
|
||||
def_ty(local_def(variant.node.id)),
|
||||
variant.span);
|
||||
for (*enum_definition).variants.each |variant| {
|
||||
self.build_reduced_graph_for_variant(*variant, item_id,
|
||||
parent_privacy,
|
||||
|
@ -1488,7 +1487,7 @@ pub impl Resolver {
|
|||
NormalModuleKind,
|
||||
view_item.span);
|
||||
self.build_reduced_graph_for_external_crate
|
||||
((*child_name_bindings).get_module());
|
||||
(child_name_bindings.get_module());
|
||||
}
|
||||
None => {
|
||||
/* Ignore. */
|
||||
|
@ -1512,7 +1511,7 @@ pub impl Resolver {
|
|||
match /*bad*/copy foreign_item.node {
|
||||
foreign_item_fn(_, _, type_parameters) => {
|
||||
let def = def_fn(local_def(foreign_item.id), unsafe_fn);
|
||||
(*name_bindings).define_value(Public, def, foreign_item.span);
|
||||
name_bindings.define_value(Public, def, foreign_item.span);
|
||||
|
||||
do self.with_type_parameter_rib
|
||||
(HasTypeParameters(&type_parameters, foreign_item.id,
|
||||
|
@ -1522,7 +1521,7 @@ pub impl Resolver {
|
|||
}
|
||||
foreign_item_const(*) => {
|
||||
let def = def_const(local_def(foreign_item.id));
|
||||
(*name_bindings).define_value(Public, def, foreign_item.span);
|
||||
name_bindings.define_value(Public, def, foreign_item.span);
|
||||
|
||||
visit_foreign_item(foreign_item, new_parent, visitor);
|
||||
}
|
||||
|
@ -1554,10 +1553,12 @@ pub impl Resolver {
|
|||
visit_block(block, new_parent, visitor);
|
||||
}
|
||||
|
||||
fn handle_external_def(def: def, modules: HashMap<def_id, @Module>,
|
||||
child_name_bindings: @NameBindings,
|
||||
fn handle_external_def(def: def,
|
||||
modules: HashMap<def_id, @Module>,
|
||||
child_name_bindings: @mut NameBindings,
|
||||
final_ident: ~str,
|
||||
ident: ident, new_parent: ReducedGraphParent) {
|
||||
ident: ident,
|
||||
new_parent: ReducedGraphParent) {
|
||||
match def {
|
||||
def_mod(def_id) | def_foreign_mod(def_id) => {
|
||||
match copy child_name_bindings.type_def {
|
||||
|
@ -1588,8 +1589,10 @@ pub impl Resolver {
|
|||
// avoid creating cycles in the
|
||||
// module graph.
|
||||
|
||||
let resolution = @ImportResolution(Public, dummy_sp(),
|
||||
@mut ImportState());
|
||||
let resolution =
|
||||
@mut ImportResolution(Public,
|
||||
dummy_sp(),
|
||||
@mut ImportState());
|
||||
resolution.outstanding_references = 0;
|
||||
|
||||
match existing_module.parent_link {
|
||||
|
@ -1618,7 +1621,7 @@ pub impl Resolver {
|
|||
def_variant(*) => {
|
||||
debug!("(building reduced graph for external \
|
||||
crate) building value %s", final_ident);
|
||||
(*child_name_bindings).define_value(Public, def, dummy_sp());
|
||||
child_name_bindings.define_value(Public, def, dummy_sp());
|
||||
}
|
||||
def_ty(def_id) => {
|
||||
debug!("(building reduced graph for external \
|
||||
|
@ -1729,7 +1732,7 @@ pub impl Resolver {
|
|||
_ => {} // Fall through.
|
||||
}
|
||||
|
||||
current_module = (*child_name_bindings).get_module();
|
||||
current_module = child_name_bindings.get_module();
|
||||
}
|
||||
|
||||
match def_like {
|
||||
|
@ -1867,8 +1870,9 @@ pub impl Resolver {
|
|||
}
|
||||
None => {
|
||||
debug!("(building import directive) creating new");
|
||||
let resolution = @ImportResolution(privacy, span,
|
||||
state);
|
||||
let resolution = @mut ImportResolution(privacy,
|
||||
span,
|
||||
state);
|
||||
let name = self.idents_to_str(module_path.get());
|
||||
// Don't warn about unused intrinsics because they're
|
||||
// automatically appended to all files
|
||||
|
@ -1910,7 +1914,7 @@ pub impl Resolver {
|
|||
debug!("(resolving imports) iteration %u, %u imports left",
|
||||
i, self.unresolved_imports);
|
||||
|
||||
let module_root = (*self.graph_root).get_module();
|
||||
let module_root = self.graph_root.get_module();
|
||||
self.resolve_imports_for_module_subtree(module_root);
|
||||
|
||||
if self.unresolved_imports == 0 {
|
||||
|
@ -2191,7 +2195,8 @@ pub impl Resolver {
|
|||
if import_resolution.outstanding_references
|
||||
== 0 => {
|
||||
|
||||
fn get_binding(import_resolution: @ImportResolution,
|
||||
fn get_binding(import_resolution:
|
||||
@mut ImportResolution,
|
||||
namespace: Namespace)
|
||||
-> NamespaceResult {
|
||||
|
||||
|
@ -2469,9 +2474,9 @@ pub impl Resolver {
|
|||
None => {
|
||||
// Simple: just copy the old import resolution.
|
||||
let new_import_resolution =
|
||||
@ImportResolution(privacy,
|
||||
target_import_resolution.span,
|
||||
state);
|
||||
@mut ImportResolution(privacy,
|
||||
target_import_resolution.span,
|
||||
state);
|
||||
new_import_resolution.value_target =
|
||||
copy target_import_resolution.value_target;
|
||||
new_import_resolution.type_target =
|
||||
|
@ -2512,8 +2517,9 @@ pub impl Resolver {
|
|||
match module_.import_resolutions.find(&ident) {
|
||||
None => {
|
||||
// Create a new import resolution from this child.
|
||||
dest_import_resolution = @ImportResolution(privacy, span,
|
||||
state);
|
||||
dest_import_resolution = @mut ImportResolution(privacy,
|
||||
span,
|
||||
state);
|
||||
module_.import_resolutions.insert
|
||||
(ident, dest_import_resolution);
|
||||
}
|
||||
|
@ -3202,7 +3208,7 @@ pub impl Resolver {
|
|||
// processing.
|
||||
|
||||
fn record_exports() {
|
||||
let root_module = (*self.graph_root).get_module();
|
||||
let root_module = self.graph_root.get_module();
|
||||
self.record_exports_for_module_subtree(root_module);
|
||||
}
|
||||
|
||||
|
@ -3265,7 +3271,7 @@ pub impl Resolver {
|
|||
|
||||
fn add_exports_of_namebindings(exports2: &mut ~[Export2],
|
||||
ident: ident,
|
||||
namebindings: @NameBindings,
|
||||
namebindings: @mut NameBindings,
|
||||
ns: Namespace,
|
||||
reexport: bool) {
|
||||
match (namebindings.def_for_namespace(ns),
|
||||
|
@ -3721,7 +3727,7 @@ pub impl Resolver {
|
|||
// If this is the main function, we must record it in the
|
||||
// session.
|
||||
// FIXME #4404 android JNI hacks
|
||||
if !self.session.building_library ||
|
||||
if !*self.session.building_library ||
|
||||
self.session.targ_cfg.os == session::os_android {
|
||||
|
||||
if self.attr_main_fn.is_none() &&
|
||||
|
@ -4673,7 +4679,7 @@ pub impl Resolver {
|
|||
|
||||
let module_path_idents = self.intern_module_part_of_path(path);
|
||||
|
||||
let root_module = (*self.graph_root).get_module();
|
||||
let root_module = self.graph_root.get_module();
|
||||
|
||||
let mut containing_module;
|
||||
match self.resolve_module_path_from_root(root_module,
|
||||
|
@ -5172,10 +5178,10 @@ pub impl Resolver {
|
|||
~"multiple 'main' functions");
|
||||
i += 1;
|
||||
}
|
||||
self.session.main_fn = self.main_fns[0];
|
||||
*self.session.main_fn = self.main_fns[0];
|
||||
}
|
||||
} else {
|
||||
self.session.main_fn = self.attr_main_fn;
|
||||
*self.session.main_fn = self.attr_main_fn;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -5191,7 +5197,7 @@ pub impl Resolver {
|
|||
return;
|
||||
}
|
||||
|
||||
let root_module = (*self.graph_root).get_module();
|
||||
let root_module = self.graph_root.get_module();
|
||||
self.check_for_unused_imports_in_module_subtree(root_module);
|
||||
}
|
||||
|
||||
|
@ -5245,15 +5251,15 @@ pub impl Resolver {
|
|||
import_resolution.state.warned = true;
|
||||
match self.unused_import_lint_level {
|
||||
warn => {
|
||||
self.session.span_warn(import_resolution.span,
|
||||
self.session.span_warn(copy import_resolution.span,
|
||||
~"unused import");
|
||||
}
|
||||
deny | forbid => {
|
||||
self.session.span_err(import_resolution.span,
|
||||
self.session.span_err(copy import_resolution.span,
|
||||
~"unused import");
|
||||
}
|
||||
allow => {
|
||||
self.session.span_bug(import_resolution.span,
|
||||
self.session.span_bug(copy import_resolution.span,
|
||||
~"shouldn't be here if lint \
|
||||
is allowed");
|
||||
}
|
||||
|
|
|
@ -370,7 +370,7 @@ pub fn get_tydesc_simple(ccx: @crate_ctxt, t: ty::t) -> ValueRef {
|
|||
get_tydesc(ccx, t).tydesc
|
||||
}
|
||||
|
||||
pub fn get_tydesc(ccx: @crate_ctxt, t: ty::t) -> @tydesc_info {
|
||||
pub fn get_tydesc(ccx: @crate_ctxt, t: ty::t) -> @mut tydesc_info {
|
||||
match ccx.tydescs.find(&t) {
|
||||
Some(inf) => inf,
|
||||
_ => {
|
||||
|
@ -2159,15 +2159,15 @@ pub fn register_fn_fuller(ccx: @crate_ctxt,
|
|||
|
||||
// FIXME #4404 android JNI hacks
|
||||
let is_main = is_main_fn(&ccx.sess, node_id) &&
|
||||
(!ccx.sess.building_library ||
|
||||
(ccx.sess.building_library &&
|
||||
(!*ccx.sess.building_library ||
|
||||
(*ccx.sess.building_library &&
|
||||
ccx.sess.targ_cfg.os == session::os_android));
|
||||
if is_main { create_main_wrapper(ccx, sp, llfn); }
|
||||
llfn
|
||||
}
|
||||
|
||||
pub fn is_main_fn(sess: &Session, node_id: ast::node_id) -> bool {
|
||||
match sess.main_fn {
|
||||
match *sess.main_fn {
|
||||
Some((main_id, _)) => node_id == main_id,
|
||||
None => false
|
||||
}
|
||||
|
@ -2210,7 +2210,7 @@ pub fn create_main_wrapper(ccx: @crate_ctxt, _sp: span, main_llfn: ValueRef) {
|
|||
let llfty = T_fn(~[ccx.int_type, ccx.int_type], ccx.int_type);
|
||||
|
||||
// FIXME #4404 android JNI hacks
|
||||
let llfn = if ccx.sess.building_library {
|
||||
let llfn = if *ccx.sess.building_library {
|
||||
decl_cdecl_fn(ccx.llmod, ~"amain", llfty)
|
||||
} else {
|
||||
decl_cdecl_fn(ccx.llmod, main_name(), llfty)
|
||||
|
@ -2230,14 +2230,20 @@ pub fn create_main_wrapper(ccx: @crate_ctxt, _sp: span, main_llfn: ValueRef) {
|
|||
let start = decl_cdecl_fn(ccx.llmod, ~"rust_start", start_ty);
|
||||
|
||||
let args = unsafe {
|
||||
if ccx.sess.building_library {
|
||||
~[rust_main,
|
||||
llvm::LLVMConstInt(T_i32(), 0u as c_ulonglong, False),
|
||||
llvm::LLVMConstInt(T_i32(), 0u as c_ulonglong, False),
|
||||
crate_map]
|
||||
if *ccx.sess.building_library {
|
||||
~[
|
||||
rust_main,
|
||||
llvm::LLVMConstInt(T_i32(), 0u as c_ulonglong, False),
|
||||
llvm::LLVMConstInt(T_i32(), 0u as c_ulonglong, False),
|
||||
crate_map
|
||||
]
|
||||
} else {
|
||||
~[rust_main, llvm::LLVMGetParam(llfn, 0 as c_uint),
|
||||
llvm::LLVMGetParam(llfn, 1 as c_uint), crate_map]
|
||||
~[
|
||||
rust_main,
|
||||
llvm::LLVMGetParam(llfn, 0 as c_uint),
|
||||
llvm::LLVMGetParam(llfn, 1 as c_uint),
|
||||
crate_map
|
||||
]
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -2815,10 +2821,12 @@ pub fn decl_crate_map(sess: session::Session, mapmeta: link_meta,
|
|||
let mut n_subcrates = 1;
|
||||
let cstore = sess.cstore;
|
||||
while cstore::have_crate_data(cstore, n_subcrates) { n_subcrates += 1; }
|
||||
let mapname = if sess.building_library {
|
||||
let mapname = if *sess.building_library {
|
||||
mapmeta.name.to_owned() + ~"_" + mapmeta.vers.to_owned() + ~"_"
|
||||
+ mapmeta.extras_hash.to_owned()
|
||||
} else { ~"toplevel" };
|
||||
} else {
|
||||
~"toplevel"
|
||||
};
|
||||
let sym_name = ~"_rust_crate_map_" + mapname;
|
||||
let arrtype = T_array(int_type, n_subcrates as uint);
|
||||
let maptype = T_struct(~[T_i32(), T_ptr(T_i8()), int_type, arrtype]);
|
||||
|
@ -2891,7 +2899,7 @@ pub fn crate_ctxt_to_encode_parms(cx: @crate_ctxt) -> encoder::encode_parms {
|
|||
}
|
||||
|
||||
pub fn write_metadata(cx: @crate_ctxt, crate: &ast::crate) {
|
||||
if !cx.sess.building_library { return; }
|
||||
if !*cx.sess.building_library { return; }
|
||||
let encode_parms = crate_ctxt_to_encode_parms(cx);
|
||||
let llmeta = C_bytes(encoder::encode_metadata(encode_parms, crate));
|
||||
let llconst = C_struct(~[llmeta]);
|
||||
|
@ -3016,18 +3024,19 @@ pub fn trans_crate(sess: session::Session,
|
|||
all_llvm_symbols: HashMap(),
|
||||
tcx: tcx,
|
||||
maps: maps,
|
||||
stats:
|
||||
{mut n_static_tydescs: 0u,
|
||||
mut n_glues_created: 0u,
|
||||
mut n_null_glues: 0u,
|
||||
mut n_real_glues: 0u,
|
||||
mut n_fns: 0u,
|
||||
mut n_monos: 0u,
|
||||
mut n_inlines: 0u,
|
||||
mut n_closures: 0u,
|
||||
llvm_insn_ctxt: @mut ~[],
|
||||
llvm_insns: HashMap(),
|
||||
fn_times: @mut ~[]},
|
||||
stats: @mut Stats {
|
||||
n_static_tydescs: 0u,
|
||||
n_glues_created: 0u,
|
||||
n_null_glues: 0u,
|
||||
n_real_glues: 0u,
|
||||
n_fns: 0u,
|
||||
n_monos: 0u,
|
||||
n_inlines: 0u,
|
||||
n_closures: 0u,
|
||||
llvm_insn_ctxt: @mut ~[],
|
||||
llvm_insns: HashMap(),
|
||||
fn_times: @mut ~[]
|
||||
},
|
||||
upcalls: upcall::declare_upcalls(targ_cfg, llmod),
|
||||
tydesc_type: tydesc_type,
|
||||
int_type: int_type,
|
||||
|
|
|
@ -391,20 +391,20 @@ pub fn trans_rtcall_or_lang_call_with_type_params(bcx: block,
|
|||
}
|
||||
|
||||
pub fn body_contains_ret(body: ast::blk) -> bool {
|
||||
let cx = {mut found: false};
|
||||
let cx = @mut false;
|
||||
visit::visit_block(body, cx, visit::mk_vt(@visit::Visitor {
|
||||
visit_item: |_i, _cx, _v| { },
|
||||
visit_expr: |e: @ast::expr, cx: {mut found: bool}, v| {
|
||||
if !cx.found {
|
||||
visit_expr: |e: @ast::expr, cx: @mut bool, v| {
|
||||
if !*cx {
|
||||
match e.node {
|
||||
ast::expr_ret(_) => cx.found = true,
|
||||
ast::expr_ret(_) => *cx = true,
|
||||
_ => visit::visit_expr(e, cx, v),
|
||||
}
|
||||
}
|
||||
},
|
||||
..*visit::default_visitor()
|
||||
}));
|
||||
cx.found
|
||||
*cx
|
||||
}
|
||||
|
||||
// See [Note-arg-mode]
|
||||
|
|
|
@ -87,16 +87,17 @@ pub fn new_addrspace_gen() -> addrspace_gen {
|
|||
return fn@() -> addrspace { *i += 1; *i };
|
||||
}
|
||||
|
||||
pub type tydesc_info =
|
||||
{ty: ty::t,
|
||||
tydesc: ValueRef,
|
||||
size: ValueRef,
|
||||
align: ValueRef,
|
||||
addrspace: addrspace,
|
||||
mut take_glue: Option<ValueRef>,
|
||||
mut drop_glue: Option<ValueRef>,
|
||||
mut free_glue: Option<ValueRef>,
|
||||
mut visit_glue: Option<ValueRef>};
|
||||
pub struct tydesc_info {
|
||||
ty: ty::t,
|
||||
tydesc: ValueRef,
|
||||
size: ValueRef,
|
||||
align: ValueRef,
|
||||
addrspace: addrspace,
|
||||
take_glue: Option<ValueRef>,
|
||||
drop_glue: Option<ValueRef>,
|
||||
free_glue: Option<ValueRef>,
|
||||
visit_glue: Option<ValueRef>
|
||||
}
|
||||
|
||||
/*
|
||||
* A note on nomenclature of linking: "extern", "foreign", and "upcall".
|
||||
|
@ -124,18 +125,19 @@ pub type tydesc_info =
|
|||
*
|
||||
*/
|
||||
|
||||
pub type stats =
|
||||
{mut n_static_tydescs: uint,
|
||||
mut n_glues_created: uint,
|
||||
mut n_null_glues: uint,
|
||||
mut n_real_glues: uint,
|
||||
mut n_fns: uint,
|
||||
mut n_monos: uint,
|
||||
mut n_inlines: uint,
|
||||
mut n_closures: uint,
|
||||
llvm_insn_ctxt: @mut ~[~str],
|
||||
llvm_insns: HashMap<~str, uint>,
|
||||
fn_times: @mut ~[{ident: ~str, time: int}]};
|
||||
pub struct Stats {
|
||||
n_static_tydescs: uint,
|
||||
n_glues_created: uint,
|
||||
n_null_glues: uint,
|
||||
n_real_glues: uint,
|
||||
n_fns: uint,
|
||||
n_monos: uint,
|
||||
n_inlines: uint,
|
||||
n_closures: uint,
|
||||
llvm_insn_ctxt: @mut ~[~str],
|
||||
llvm_insns: HashMap<~str, uint>,
|
||||
fn_times: @mut ~[{ident: ~str, time: int}]
|
||||
}
|
||||
|
||||
pub struct BuilderRef_res {
|
||||
B: BuilderRef,
|
||||
|
@ -170,7 +172,7 @@ pub struct crate_ctxt {
|
|||
enum_sizes: HashMap<ty::t, uint>,
|
||||
discrims: HashMap<ast::def_id, ValueRef>,
|
||||
discrim_symbols: HashMap<ast::node_id, ~str>,
|
||||
tydescs: HashMap<ty::t, @tydesc_info>,
|
||||
tydescs: HashMap<ty::t, @mut tydesc_info>,
|
||||
// Set when running emit_tydescs to enforce that no more tydescs are
|
||||
// created.
|
||||
mut finished_tydescs: bool,
|
||||
|
@ -208,7 +210,7 @@ pub struct crate_ctxt {
|
|||
all_llvm_symbols: Set<~str>,
|
||||
tcx: ty::ctxt,
|
||||
maps: astencode::Maps,
|
||||
stats: stats,
|
||||
stats: @mut Stats,
|
||||
upcalls: @upcall::upcalls,
|
||||
tydesc_type: TypeRef,
|
||||
int_type: TypeRef,
|
||||
|
@ -216,7 +218,7 @@ pub struct crate_ctxt {
|
|||
task_type: TypeRef,
|
||||
opaque_vec_type: TypeRef,
|
||||
builder: BuilderRef_res,
|
||||
shape_cx: shape::ctxt,
|
||||
shape_cx: shape::Ctxt,
|
||||
crate_map: ValueRef,
|
||||
// Set when at least one function uses GC. Needed so that
|
||||
// decl_gc_metadata knows whether to link to the module metadata, which
|
||||
|
|
|
@ -367,16 +367,16 @@ fn create_pointer_type(cx: @crate_ctxt, t: ty::t, span: span,
|
|||
return mdval;
|
||||
}
|
||||
|
||||
type struct_ctxt = {
|
||||
struct StructCtxt {
|
||||
file: ValueRef,
|
||||
name: ~str,
|
||||
line: int,
|
||||
mut members: ~[ValueRef],
|
||||
mut total_size: int,
|
||||
members: ~[ValueRef],
|
||||
total_size: int,
|
||||
align: int
|
||||
};
|
||||
}
|
||||
|
||||
fn finish_structure(cx: @struct_ctxt) -> ValueRef {
|
||||
fn finish_structure(cx: @mut StructCtxt) -> ValueRef {
|
||||
return create_composite_type(StructureTypeTag,
|
||||
/*bad*/copy cx.name,
|
||||
cx.file,
|
||||
|
@ -389,14 +389,15 @@ fn finish_structure(cx: @struct_ctxt) -> ValueRef {
|
|||
}
|
||||
|
||||
fn create_structure(file: @metadata<file_md>, +name: ~str, line: int)
|
||||
-> @struct_ctxt {
|
||||
let cx = @{file: file.node,
|
||||
name: name,
|
||||
line: line,
|
||||
mut members: ~[],
|
||||
mut total_size: 0,
|
||||
align: 64 //XXX different alignment per arch?
|
||||
};
|
||||
-> @mut StructCtxt {
|
||||
let cx = @mut StructCtxt {
|
||||
file: file.node,
|
||||
name: name,
|
||||
line: line,
|
||||
members: ~[],
|
||||
total_size: 0,
|
||||
align: 64 //XXX different alignment per arch?
|
||||
};
|
||||
return cx;
|
||||
}
|
||||
|
||||
|
@ -416,7 +417,11 @@ fn create_derived_type(type_tag: int, file: ValueRef, +name: ~str, line: int,
|
|||
return llmdnode(lldata);
|
||||
}
|
||||
|
||||
fn add_member(cx: @struct_ctxt, +name: ~str, line: int, size: int, align: int,
|
||||
fn add_member(cx: @mut StructCtxt,
|
||||
+name: ~str,
|
||||
line: int,
|
||||
size: int,
|
||||
align: int,
|
||||
ty: ValueRef) {
|
||||
cx.members.push(create_derived_type(MemberTag, cx.file, name, line,
|
||||
size * 8, align * 8, cx.total_size,
|
||||
|
|
|
@ -143,7 +143,7 @@ pub fn free_ty_immediate(bcx: block, v: ValueRef, t: ty::t) -> block {
|
|||
}
|
||||
|
||||
pub fn lazily_emit_all_tydesc_glue(ccx: @crate_ctxt,
|
||||
static_ti: @tydesc_info) {
|
||||
static_ti: @mut tydesc_info) {
|
||||
lazily_emit_tydesc_glue(ccx, abi::tydesc_field_take_glue, static_ti);
|
||||
lazily_emit_tydesc_glue(ccx, abi::tydesc_field_drop_glue, static_ti);
|
||||
lazily_emit_tydesc_glue(ccx, abi::tydesc_field_free_glue, static_ti);
|
||||
|
@ -204,7 +204,7 @@ pub fn simplified_glue_type(tcx: ty::ctxt, field: uint, t: ty::t) -> ty::t {
|
|||
return t;
|
||||
}
|
||||
|
||||
pub pure fn cast_glue(ccx: @crate_ctxt, ti: @tydesc_info, v: ValueRef)
|
||||
pub pure fn cast_glue(ccx: @crate_ctxt, ti: @mut tydesc_info, v: ValueRef)
|
||||
-> ValueRef {
|
||||
unsafe {
|
||||
let llfnty = type_of_glue_fn(ccx, ti.ty);
|
||||
|
@ -214,7 +214,7 @@ pub pure fn cast_glue(ccx: @crate_ctxt, ti: @tydesc_info, v: ValueRef)
|
|||
|
||||
pub fn lazily_emit_simplified_tydesc_glue(ccx: @crate_ctxt,
|
||||
field: uint,
|
||||
ti: @tydesc_info) -> bool {
|
||||
ti: @mut tydesc_info) -> bool {
|
||||
let _icx = ccx.insn_ctxt("lazily_emit_simplified_tydesc_glue");
|
||||
let simpl = simplified_glue_type(ccx.tcx, field, ti.ty);
|
||||
if simpl != ti.ty {
|
||||
|
@ -241,7 +241,7 @@ pub fn lazily_emit_simplified_tydesc_glue(ccx: @crate_ctxt,
|
|||
|
||||
pub fn lazily_emit_tydesc_glue(ccx: @crate_ctxt,
|
||||
field: uint,
|
||||
ti: @tydesc_info) {
|
||||
ti: @mut tydesc_info) {
|
||||
let _icx = ccx.insn_ctxt("lazily_emit_tydesc_glue");
|
||||
let llfnty = type_of_glue_fn(ccx, ti.ty);
|
||||
|
||||
|
@ -305,8 +305,11 @@ pub fn lazily_emit_tydesc_glue(ccx: @crate_ctxt,
|
|||
}
|
||||
|
||||
// See [Note-arg-mode]
|
||||
pub fn call_tydesc_glue_full(++bcx: block, v: ValueRef, tydesc: ValueRef,
|
||||
field: uint, static_ti: Option<@tydesc_info>) {
|
||||
pub fn call_tydesc_glue_full(++bcx: block,
|
||||
v: ValueRef,
|
||||
tydesc: ValueRef,
|
||||
field: uint,
|
||||
static_ti: Option<@mut tydesc_info>) {
|
||||
let _icx = bcx.insn_ctxt("call_tydesc_glue_full");
|
||||
let ccx = bcx.ccx();
|
||||
// NB: Don't short-circuit even if this block is unreachable because
|
||||
|
@ -647,7 +650,7 @@ pub fn declare_tydesc_addrspace(ccx: @crate_ctxt, t: ty::t) -> addrspace {
|
|||
}
|
||||
|
||||
// Generates the declaration for (but doesn't emit) a type descriptor.
|
||||
pub fn declare_tydesc(ccx: @crate_ctxt, t: ty::t) -> @tydesc_info {
|
||||
pub fn declare_tydesc(ccx: @crate_ctxt, t: ty::t) -> @mut tydesc_info {
|
||||
let _icx = ccx.insn_ctxt("declare_tydesc");
|
||||
// If emit_tydescs already ran, then we shouldn't be creating any new
|
||||
// tydescs.
|
||||
|
@ -678,16 +681,17 @@ pub fn declare_tydesc(ccx: @crate_ctxt, t: ty::t) -> @tydesc_info {
|
|||
llvm::LLVMAddGlobal(ccx.llmod, ccx.tydesc_type, buf)
|
||||
}
|
||||
});
|
||||
let inf =
|
||||
@{ty: t,
|
||||
tydesc: gvar,
|
||||
size: llsize,
|
||||
align: llalign,
|
||||
addrspace: addrspace,
|
||||
mut take_glue: None,
|
||||
mut drop_glue: None,
|
||||
mut free_glue: None,
|
||||
mut visit_glue: None};
|
||||
let inf = @mut tydesc_info {
|
||||
ty: t,
|
||||
tydesc: gvar,
|
||||
size: llsize,
|
||||
align: llalign,
|
||||
addrspace: addrspace,
|
||||
take_glue: None,
|
||||
drop_glue: None,
|
||||
free_glue: None,
|
||||
visit_glue: None
|
||||
};
|
||||
log(debug, ~"--- declare_tydesc " + ppaux::ty_to_str(ccx.tcx, t));
|
||||
return inf;
|
||||
}
|
||||
|
|
|
@ -28,25 +28,24 @@ use std::oldmap::HashMap;
|
|||
use syntax::ast::def_id;
|
||||
use syntax::ast;
|
||||
|
||||
pub enum reflector = {
|
||||
pub struct Reflector {
|
||||
visitor_val: ValueRef,
|
||||
visitor_methods: @~[ty::method],
|
||||
final_bcx: block,
|
||||
tydesc_ty: TypeRef,
|
||||
mut bcx: block
|
||||
};
|
||||
bcx: block
|
||||
}
|
||||
|
||||
pub impl reflector {
|
||||
|
||||
fn c_uint(u: uint) -> ValueRef {
|
||||
pub impl Reflector {
|
||||
fn c_uint(&mut self, u: uint) -> ValueRef {
|
||||
C_uint(self.bcx.ccx(), u)
|
||||
}
|
||||
|
||||
fn c_int(i: int) -> ValueRef {
|
||||
fn c_int(&mut self, i: int) -> ValueRef {
|
||||
C_int(self.bcx.ccx(), i)
|
||||
}
|
||||
|
||||
fn c_slice(+s: ~str) -> ValueRef {
|
||||
fn c_slice(&mut self, +s: ~str) -> ValueRef {
|
||||
// We're careful to not use first class aggregates here because that
|
||||
// will kick us off fast isel. (Issue #4352.)
|
||||
let bcx = self.bcx;
|
||||
|
@ -60,7 +59,7 @@ pub impl reflector {
|
|||
scratch.val
|
||||
}
|
||||
|
||||
fn c_size_and_align(t: ty::t) -> ~[ValueRef] {
|
||||
fn c_size_and_align(&mut self, t: ty::t) -> ~[ValueRef] {
|
||||
let tr = type_of::type_of(self.bcx.ccx(), t);
|
||||
let s = machine::llsize_of_real(self.bcx.ccx(), tr);
|
||||
let a = machine::llalign_of_min(self.bcx.ccx(), tr);
|
||||
|
@ -68,19 +67,19 @@ pub impl reflector {
|
|||
self.c_uint(a)];
|
||||
}
|
||||
|
||||
fn c_tydesc(t: ty::t) -> ValueRef {
|
||||
fn c_tydesc(&mut self, t: ty::t) -> ValueRef {
|
||||
let bcx = self.bcx;
|
||||
let static_ti = get_tydesc(bcx.ccx(), t);
|
||||
glue::lazily_emit_all_tydesc_glue(bcx.ccx(), static_ti);
|
||||
PointerCast(bcx, static_ti.tydesc, T_ptr(self.tydesc_ty))
|
||||
}
|
||||
|
||||
fn c_mt(mt: ty::mt) -> ~[ValueRef] {
|
||||
fn c_mt(&mut self, mt: ty::mt) -> ~[ValueRef] {
|
||||
~[self.c_uint(mt.mutbl as uint),
|
||||
self.c_tydesc(mt.ty)]
|
||||
}
|
||||
|
||||
fn visit(ty_name: ~str, args: ~[ValueRef]) {
|
||||
fn visit(&mut self, ty_name: ~str, args: ~[ValueRef]) {
|
||||
let tcx = self.bcx.tcx();
|
||||
let mth_idx = ty::method_idx(
|
||||
tcx.sess.ident_of(~"visit_" + ty_name),
|
||||
|
@ -114,15 +113,18 @@ pub impl reflector {
|
|||
self.bcx = next_bcx
|
||||
}
|
||||
|
||||
fn bracketed(bracket_name: ~str, +extra: ~[ValueRef],
|
||||
inner: fn()) {
|
||||
fn bracketed(&mut self,
|
||||
bracket_name: ~str,
|
||||
+extra: ~[ValueRef],
|
||||
inner: &fn()) {
|
||||
// XXX: Bad copy.
|
||||
self.visit(~"enter_" + bracket_name, copy extra);
|
||||
inner();
|
||||
self.visit(~"leave_" + bracket_name, extra);
|
||||
}
|
||||
|
||||
fn vstore_name_and_extra(t: ty::t,
|
||||
fn vstore_name_and_extra(&mut self,
|
||||
t: ty::t,
|
||||
vstore: ty::vstore,
|
||||
f: fn(+s: ~str,+v: ~[ValueRef])) {
|
||||
match vstore {
|
||||
|
@ -137,13 +139,12 @@ pub impl reflector {
|
|||
}
|
||||
}
|
||||
|
||||
fn leaf(+name: ~str) {
|
||||
fn leaf(&mut self, +name: ~str) {
|
||||
self.visit(name, ~[]);
|
||||
}
|
||||
|
||||
// Entrypoint
|
||||
fn visit_ty(t: ty::t) {
|
||||
|
||||
fn visit_ty(&mut self, t: ty::t) {
|
||||
let bcx = self.bcx;
|
||||
debug!("reflect::visit_ty %s",
|
||||
ty_to_str(bcx.ccx().tcx, t));
|
||||
|
@ -301,7 +302,7 @@ pub impl reflector {
|
|||
}
|
||||
}
|
||||
|
||||
fn visit_sig(&self, retval: uint, sig: &ty::FnSig) {
|
||||
fn visit_sig(&mut self, retval: uint, sig: &ty::FnSig) {
|
||||
for sig.inputs.eachi |i, arg| {
|
||||
let modeval = match arg.mode {
|
||||
ast::infer(_) => 0u,
|
||||
|
@ -333,13 +334,13 @@ pub fn emit_calls_to_trait_visit_ty(bcx: block,
|
|||
assert bcx.ccx().tcx.intrinsic_defs.contains_key_ref(&tydesc);
|
||||
let (_, tydesc_ty) = bcx.ccx().tcx.intrinsic_defs.get(&tydesc);
|
||||
let tydesc_ty = type_of::type_of(bcx.ccx(), tydesc_ty);
|
||||
let r = reflector({
|
||||
let mut r = Reflector {
|
||||
visitor_val: visitor_val,
|
||||
visitor_methods: ty::trait_methods(bcx.tcx(), visitor_trait_id),
|
||||
final_bcx: final,
|
||||
tydesc_ty: tydesc_ty,
|
||||
mut bcx: bcx
|
||||
});
|
||||
bcx: bcx
|
||||
};
|
||||
r.visit_ty(t);
|
||||
Br(r.bcx, final.llbb);
|
||||
return final;
|
||||
|
|
|
@ -34,7 +34,11 @@ use syntax::util::interner;
|
|||
|
||||
use ty_ctxt = middle::ty::ctxt;
|
||||
|
||||
pub type ctxt = {mut next_tag_id: u16, pad: u16, pad2: u32};
|
||||
pub struct Ctxt {
|
||||
next_tag_id: u16,
|
||||
pad: u16,
|
||||
pad2: u32
|
||||
}
|
||||
|
||||
pub fn mk_global(ccx: @crate_ctxt,
|
||||
name: ~str,
|
||||
|
@ -57,14 +61,18 @@ pub fn mk_global(ccx: @crate_ctxt,
|
|||
}
|
||||
}
|
||||
|
||||
pub fn mk_ctxt(llmod: ModuleRef) -> ctxt {
|
||||
pub fn mk_ctxt(llmod: ModuleRef) -> Ctxt {
|
||||
unsafe {
|
||||
let llshapetablesty = trans::common::T_named_struct(~"shapes");
|
||||
let _llshapetables = str::as_c_str(~"shapes", |buf| {
|
||||
llvm::LLVMAddGlobal(llmod, llshapetablesty, buf)
|
||||
});
|
||||
|
||||
return {mut next_tag_id: 0u16, pad: 0u16, pad2: 0u32};
|
||||
return Ctxt {
|
||||
next_tag_id: 0u16,
|
||||
pad: 0u16,
|
||||
pad2: 0u32
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -232,7 +232,7 @@ struct ctxt_ {
|
|||
vecs_implicitly_copyable: bool,
|
||||
legacy_modes: bool,
|
||||
legacy_records: bool,
|
||||
cstore: metadata::cstore::CStore,
|
||||
cstore: @mut metadata::cstore::CStore,
|
||||
sess: session::Session,
|
||||
def_map: resolve::DefMap,
|
||||
|
||||
|
|
|
@ -11,16 +11,16 @@
|
|||
/*!
|
||||
* Conversion from AST representation of types to the ty.rs
|
||||
* representation. The main routine here is `ast_ty_to_ty()`: each use
|
||||
* is parameterized by an instance of `ast_conv` and a `region_scope`.
|
||||
* is parameterized by an instance of `AstConv` and a `region_scope`.
|
||||
*
|
||||
* The parameterization of `ast_ty_to_ty()` is because it behaves
|
||||
* somewhat differently during the collect and check phases, particularly
|
||||
* with respect to looking up the types of top-level items. In the
|
||||
* collect phase, the crate context is used as the `ast_conv` instance;
|
||||
* collect phase, the crate context is used as the `AstConv` instance;
|
||||
* in this phase, the `get_item_ty()` function triggers a recursive call
|
||||
* to `ty_of_item()` (note that `ast_ty_to_ty()` will detect recursive
|
||||
* types and report an error). In the check phase, when the @fn_ctxt is
|
||||
* used as the `ast_conv`, `get_item_ty()` just looks up the item type in
|
||||
* types and report an error). In the check phase, when the @FnCtxt is
|
||||
* used as the `AstConv`, `get_item_ty()` just looks up the item type in
|
||||
* `tcx.tcache`.
|
||||
*
|
||||
* The `region_scope` trait controls how region references are
|
||||
|
@ -31,7 +31,7 @@
|
|||
* region, or `type_rscope`, which permits the self region if the type in
|
||||
* question is parameterized by a region.
|
||||
*
|
||||
* Unlike the `ast_conv` trait, the region scope can change as we descend
|
||||
* Unlike the `AstConv` trait, the region scope can change as we descend
|
||||
* the type. This is to accommodate the fact that (a) fn types are binding
|
||||
* scopes and (b) the default region may change. To understand case (a),
|
||||
* consider something like:
|
||||
|
@ -58,12 +58,11 @@ use middle::pat_util::pat_id_map;
|
|||
use middle::ty::{arg, field, substs};
|
||||
use middle::ty::{ty_param_substs_and_ty};
|
||||
use middle::ty;
|
||||
use middle::typeck::check::fn_ctxt;
|
||||
use middle::typeck::collect;
|
||||
use middle::typeck::rscope::{anon_rscope, binding_rscope, empty_rscope};
|
||||
use middle::typeck::rscope::{in_anon_rscope, in_binding_rscope};
|
||||
use middle::typeck::rscope::{region_scope, type_rscope};
|
||||
use middle::typeck::{crate_ctxt, write_substs_to_tcx, write_ty_to_tcx};
|
||||
use middle::typeck::{CrateCtxt, write_substs_to_tcx, write_ty_to_tcx};
|
||||
|
||||
use core::result;
|
||||
use core::vec;
|
||||
|
@ -72,13 +71,13 @@ use syntax::codemap::span;
|
|||
use syntax::print::pprust::path_to_str;
|
||||
use util::common::indenter;
|
||||
|
||||
pub trait ast_conv {
|
||||
fn tcx() -> ty::ctxt;
|
||||
fn ccx() -> @crate_ctxt;
|
||||
fn get_item_ty(id: ast::def_id) -> ty::ty_param_bounds_and_ty;
|
||||
pub trait AstConv {
|
||||
fn tcx(@mut self) -> ty::ctxt;
|
||||
fn ccx(@mut self) -> @mut CrateCtxt;
|
||||
fn get_item_ty(@mut self, id: ast::def_id) -> ty::ty_param_bounds_and_ty;
|
||||
|
||||
// what type should we use when a type is omitted?
|
||||
fn ty_infer(span: span) -> ty::t;
|
||||
fn ty_infer(@mut self, span: span) -> ty::t;
|
||||
}
|
||||
|
||||
pub fn get_region_reporting_err(tcx: ty::ctxt,
|
||||
|
@ -95,9 +94,12 @@ pub fn get_region_reporting_err(tcx: ty::ctxt,
|
|||
}
|
||||
}
|
||||
|
||||
pub fn ast_region_to_region<AC: ast_conv, RS: region_scope Copy Durable>(
|
||||
self: AC, rscope: RS, span: span, a_r: @ast::region) -> ty::Region {
|
||||
|
||||
pub fn ast_region_to_region<AC: AstConv, RS: region_scope Copy Durable>(
|
||||
self: @mut AC,
|
||||
rscope: RS,
|
||||
span: span,
|
||||
a_r: @ast::region)
|
||||
-> ty::Region {
|
||||
let res = match a_r.node {
|
||||
ast::re_static => Ok(ty::re_static),
|
||||
ast::re_anon => rscope.anon_region(span),
|
||||
|
@ -108,10 +110,12 @@ pub fn ast_region_to_region<AC: ast_conv, RS: region_scope Copy Durable>(
|
|||
get_region_reporting_err(self.tcx(), span, res)
|
||||
}
|
||||
|
||||
pub fn ast_path_to_substs_and_ty<AC: ast_conv, RS: region_scope Copy Durable>(
|
||||
self: AC, rscope: RS, did: ast::def_id,
|
||||
path: @ast::path) -> ty_param_substs_and_ty {
|
||||
|
||||
pub fn ast_path_to_substs_and_ty<AC: AstConv, RS: region_scope Copy Durable>(
|
||||
self: @mut AC,
|
||||
rscope: RS,
|
||||
did: ast::def_id,
|
||||
path: @ast::path)
|
||||
-> ty_param_substs_and_ty {
|
||||
let tcx = self.tcx();
|
||||
let {bounds: decl_bounds, region_param: decl_rp, ty: decl_ty} =
|
||||
self.get_item_ty(did);
|
||||
|
@ -158,13 +162,13 @@ pub fn ast_path_to_substs_and_ty<AC: ast_conv, RS: region_scope Copy Durable>(
|
|||
{substs: substs, ty: ty}
|
||||
}
|
||||
|
||||
pub fn ast_path_to_ty<AC: ast_conv, RS: region_scope Copy Durable>(
|
||||
self: AC,
|
||||
rscope: RS,
|
||||
did: ast::def_id,
|
||||
path: @ast::path,
|
||||
path_id: ast::node_id) -> ty_param_substs_and_ty {
|
||||
|
||||
pub fn ast_path_to_ty<AC: AstConv, RS: region_scope Copy Durable>(
|
||||
self: @mut AC,
|
||||
rscope: RS,
|
||||
did: ast::def_id,
|
||||
path: @ast::path,
|
||||
path_id: ast::node_id)
|
||||
-> ty_param_substs_and_ty {
|
||||
// Look up the polytype of the item and then substitute the provided types
|
||||
// for any type/region parameters.
|
||||
let tcx = self.tcx();
|
||||
|
@ -181,11 +185,11 @@ pub const NO_TPS: uint = 2;
|
|||
// Parses the programmer's textual representation of a type into our
|
||||
// internal notion of a type. `getter` is a function that returns the type
|
||||
// corresponding to a definition ID:
|
||||
pub fn ast_ty_to_ty<AC: ast_conv, RS: region_scope Copy Durable>(
|
||||
self: AC, rscope: RS, &&ast_ty: @ast::Ty) -> ty::t {
|
||||
pub fn ast_ty_to_ty<AC: AstConv, RS: region_scope Copy Durable>(
|
||||
self: @mut AC, rscope: RS, &&ast_ty: @ast::Ty) -> ty::t {
|
||||
|
||||
fn ast_mt_to_mt<AC: ast_conv, RS: region_scope Copy Durable>(
|
||||
self: AC, rscope: RS, mt: ast::mt) -> ty::mt {
|
||||
fn ast_mt_to_mt<AC: AstConv, RS: region_scope Copy Durable>(
|
||||
self: @mut AC, rscope: RS, mt: ast::mt) -> ty::mt {
|
||||
|
||||
ty::mt {ty: ast_ty_to_ty(self, rscope, mt.ty), mutbl: mt.mutbl}
|
||||
}
|
||||
|
@ -193,8 +197,8 @@ pub fn ast_ty_to_ty<AC: ast_conv, RS: region_scope Copy Durable>(
|
|||
// Handle @, ~, and & being able to mean estrs and evecs.
|
||||
// If a_seq_ty is a str or a vec, make it an estr/evec.
|
||||
// Also handle function sigils and first-class trait types.
|
||||
fn mk_pointer<AC: ast_conv, RS: region_scope Copy Durable>(
|
||||
self: AC,
|
||||
fn mk_pointer<AC: AstConv, RS: region_scope Copy Durable>(
|
||||
self: @mut AC,
|
||||
rscope: RS,
|
||||
a_seq_ty: ast::mt,
|
||||
vst: ty::vstore,
|
||||
|
@ -409,10 +413,12 @@ pub fn ast_ty_to_ty<AC: ast_conv, RS: region_scope Copy Durable>(
|
|||
return typ;
|
||||
}
|
||||
|
||||
pub fn ty_of_arg<AC: ast_conv, RS: region_scope Copy Durable>(
|
||||
self: AC, rscope: RS, a: ast::arg,
|
||||
expected_ty: Option<ty::arg>) -> ty::arg {
|
||||
|
||||
pub fn ty_of_arg<AC: AstConv, RS: region_scope Copy Durable>(
|
||||
self: @mut AC,
|
||||
rscope: RS,
|
||||
a: ast::arg,
|
||||
expected_ty: Option<ty::arg>)
|
||||
-> ty::arg {
|
||||
let ty = match a.ty.node {
|
||||
ast::ty_infer if expected_ty.is_some() => expected_ty.get().ty,
|
||||
ast::ty_infer => self.ty_infer(a.ty.span),
|
||||
|
@ -455,12 +461,13 @@ pub fn ty_of_arg<AC: ast_conv, RS: region_scope Copy Durable>(
|
|||
arg {mode: mode, ty: ty}
|
||||
}
|
||||
|
||||
pub fn ty_of_bare_fn<AC: ast_conv, RS: region_scope Copy Durable>(
|
||||
self: AC, rscope: RS,
|
||||
purity: ast::purity,
|
||||
abi: ast::Abi,
|
||||
decl: ast::fn_decl) -> ty::BareFnTy
|
||||
{
|
||||
pub fn ty_of_bare_fn<AC: AstConv, RS: region_scope Copy Durable>(
|
||||
self: @mut AC,
|
||||
rscope: RS,
|
||||
purity: ast::purity,
|
||||
abi: ast::Abi,
|
||||
decl: ast::fn_decl)
|
||||
-> ty::BareFnTy {
|
||||
debug!("ty_of_fn_decl");
|
||||
|
||||
// new region names that appear inside of the fn decl are bound to
|
||||
|
@ -480,16 +487,17 @@ pub fn ty_of_bare_fn<AC: ast_conv, RS: region_scope Copy Durable>(
|
|||
}
|
||||
}
|
||||
|
||||
pub fn ty_of_closure<AC: ast_conv, RS: region_scope Copy Durable>(
|
||||
self: AC, rscope: RS,
|
||||
sigil: ast::Sigil,
|
||||
purity: ast::purity,
|
||||
onceness: ast::Onceness,
|
||||
opt_region: Option<@ast::region>,
|
||||
decl: ast::fn_decl,
|
||||
expected_tys: Option<ty::FnSig>,
|
||||
span: span) -> ty::ClosureTy
|
||||
{
|
||||
pub fn ty_of_closure<AC: AstConv, RS: region_scope Copy Durable>(
|
||||
self: @mut AC,
|
||||
rscope: RS,
|
||||
sigil: ast::Sigil,
|
||||
purity: ast::purity,
|
||||
onceness: ast::Onceness,
|
||||
opt_region: Option<@ast::region>,
|
||||
decl: ast::fn_decl,
|
||||
expected_tys: Option<ty::FnSig>,
|
||||
span: span)
|
||||
-> ty::ClosureTy {
|
||||
debug!("ty_of_fn_decl");
|
||||
let _i = indenter();
|
||||
|
||||
|
|
|
@ -14,7 +14,7 @@ use middle::pat_util::{PatIdMap, pat_id_map, pat_is_binding, pat_is_const};
|
|||
use middle::pat_util::{pat_is_variant_or_struct};
|
||||
use middle::ty;
|
||||
use middle::typeck::check::demand;
|
||||
use middle::typeck::check::{check_block, check_expr_has_type, fn_ctxt};
|
||||
use middle::typeck::check::{check_block, check_expr_has_type, FnCtxt};
|
||||
use middle::typeck::check::{instantiate_path, lookup_def};
|
||||
use middle::typeck::check::{structure_of, valid_range_bounds};
|
||||
use middle::typeck::require_same_types;
|
||||
|
@ -27,7 +27,7 @@ use syntax::ast_util;
|
|||
use syntax::codemap::span;
|
||||
use syntax::print::pprust;
|
||||
|
||||
pub fn check_match(fcx: @fn_ctxt,
|
||||
pub fn check_match(fcx: @mut FnCtxt,
|
||||
expr: @ast::expr,
|
||||
discrim: @ast::expr,
|
||||
arms: ~[ast::arm]) -> bool {
|
||||
|
@ -69,7 +69,7 @@ pub fn check_match(fcx: @fn_ctxt,
|
|||
}
|
||||
|
||||
pub struct pat_ctxt {
|
||||
fcx: @fn_ctxt,
|
||||
fcx: @mut FnCtxt,
|
||||
map: PatIdMap,
|
||||
match_region: ty::Region, // Region for the match as a whole
|
||||
block_region: ty::Region, // Region for the block of the arm
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
|
||||
|
||||
use middle::ty;
|
||||
use middle::typeck::check::fn_ctxt;
|
||||
use middle::typeck::check::FnCtxt;
|
||||
use middle::typeck::infer;
|
||||
|
||||
use core::result::{Err, Ok};
|
||||
|
@ -20,13 +20,12 @@ use syntax::codemap::span;
|
|||
|
||||
// Requires that the two types unify, and prints an error message if they
|
||||
// don't.
|
||||
pub fn suptype(fcx: @fn_ctxt, sp: span,
|
||||
expected: ty::t, actual: ty::t) {
|
||||
pub fn suptype(fcx: @mut FnCtxt, sp: span, expected: ty::t, actual: ty::t) {
|
||||
suptype_with_fn(fcx, sp, expected, actual,
|
||||
|sp, e, a, s| { fcx.report_mismatched_types(sp, e, a, s) })
|
||||
}
|
||||
|
||||
pub fn suptype_with_fn(fcx: @fn_ctxt,
|
||||
pub fn suptype_with_fn(fcx: @mut FnCtxt,
|
||||
sp: span,
|
||||
expected: ty::t, actual: ty::t,
|
||||
handle_err: fn(span, ty::t, ty::t, &ty::type_err)) {
|
||||
|
@ -40,7 +39,7 @@ pub fn suptype_with_fn(fcx: @fn_ctxt,
|
|||
}
|
||||
}
|
||||
|
||||
pub fn eqtype(fcx: @fn_ctxt, sp: span, expected: ty::t, actual: ty::t) {
|
||||
pub fn eqtype(fcx: @mut FnCtxt, sp: span, expected: ty::t, actual: ty::t) {
|
||||
match infer::mk_eqty(fcx.infcx(), false, sp, actual, expected) {
|
||||
Ok(()) => { /* ok */ }
|
||||
Err(ref err) => {
|
||||
|
@ -50,7 +49,7 @@ pub fn eqtype(fcx: @fn_ctxt, sp: span, expected: ty::t, actual: ty::t) {
|
|||
}
|
||||
|
||||
// Checks that the type `actual` can be coerced to `expected`.
|
||||
pub fn coerce(fcx: @fn_ctxt,
|
||||
pub fn coerce(fcx: @mut FnCtxt,
|
||||
sp: span,
|
||||
expected: ty::t,
|
||||
expr: @ast::expr) {
|
||||
|
|
|
@ -85,7 +85,7 @@ use middle::resolve::{Impl, MethodInfo};
|
|||
use middle::resolve;
|
||||
use middle::ty::*;
|
||||
use middle::ty;
|
||||
use middle::typeck::check::{fn_ctxt, impl_self_ty};
|
||||
use middle::typeck::check::{FnCtxt, impl_self_ty};
|
||||
use middle::typeck::check::{structurally_resolved_type};
|
||||
use middle::typeck::check::vtable::VtableContext;
|
||||
use middle::typeck::check::vtable;
|
||||
|
@ -112,7 +112,7 @@ use syntax::codemap::dummy_sp;
|
|||
use syntax::codemap::span;
|
||||
|
||||
pub fn lookup(
|
||||
fcx: @fn_ctxt,
|
||||
fcx: @mut FnCtxt,
|
||||
|
||||
// In a call `a.b::<X, Y, ...>(...)`:
|
||||
expr: @ast::expr, // The expression `a.b`.
|
||||
|
@ -143,7 +143,7 @@ pub fn lookup(
|
|||
}
|
||||
|
||||
pub struct LookupContext {
|
||||
fcx: @fn_ctxt,
|
||||
fcx: @mut FnCtxt,
|
||||
expr: @ast::expr,
|
||||
self_expr: @ast::expr,
|
||||
callee_id: node_id,
|
||||
|
@ -1264,7 +1264,7 @@ pub impl LookupContext {
|
|||
ty::item_path_str(self.tcx(), did)));
|
||||
}
|
||||
|
||||
fn infcx(&self) -> @infer::InferCtxt {
|
||||
fn infcx(&self) -> @mut infer::InferCtxt {
|
||||
self.fcx.inh.infcx
|
||||
}
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -35,7 +35,7 @@ use middle::ty::{encl_region, re_scope};
|
|||
use middle::ty::{vstore_box, vstore_fixed, vstore_slice};
|
||||
use middle::ty::{vstore_uniq};
|
||||
use middle::ty;
|
||||
use middle::typeck::check::fn_ctxt;
|
||||
use middle::typeck::check::FnCtxt;
|
||||
use middle::typeck::check::lookup_def;
|
||||
use middle::typeck::infer::{fres, resolve_and_force_all_but_regions};
|
||||
use middle::typeck::infer::{resolve_type};
|
||||
|
@ -49,10 +49,14 @@ use syntax::codemap::span;
|
|||
use syntax::print::pprust;
|
||||
use syntax::visit;
|
||||
|
||||
pub enum rcx { rcx_({fcx: @fn_ctxt, mut errors_reported: uint}) }
|
||||
pub type rvt = visit::vt<@rcx>;
|
||||
pub struct Rcx {
|
||||
fcx: @mut FnCtxt,
|
||||
errors_reported: uint
|
||||
}
|
||||
|
||||
pub fn encl_region_of_def(fcx: @fn_ctxt, def: ast::def) -> ty::Region {
|
||||
pub type rvt = visit::vt<@mut Rcx>;
|
||||
|
||||
pub fn encl_region_of_def(fcx: @mut FnCtxt, def: ast::def) -> ty::Region {
|
||||
let tcx = fcx.tcx();
|
||||
match def {
|
||||
def_local(node_id, _) | def_arg(node_id, _, _) |
|
||||
|
@ -71,8 +75,8 @@ pub fn encl_region_of_def(fcx: @fn_ctxt, def: ast::def) -> ty::Region {
|
|||
}
|
||||
}
|
||||
|
||||
pub impl @rcx {
|
||||
fn resolve_type(unresolved_ty: ty::t) -> ty::t {
|
||||
pub impl Rcx {
|
||||
fn resolve_type(@mut self, unresolved_ty: ty::t) -> ty::t {
|
||||
/*!
|
||||
* Try to resolve the type for the given node, returning
|
||||
* t_err if an error results. Note that we never care
|
||||
|
@ -109,23 +113,22 @@ pub impl @rcx {
|
|||
}
|
||||
|
||||
/// Try to resolve the type for the given node.
|
||||
fn resolve_node_type(id: ast::node_id) -> ty::t {
|
||||
fn resolve_node_type(@mut self, id: ast::node_id) -> ty::t {
|
||||
self.resolve_type(self.fcx.node_ty(id))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn regionck_expr(fcx: @fn_ctxt, e: @ast::expr) {
|
||||
let rcx = rcx_({fcx:fcx, mut errors_reported: 0});
|
||||
pub fn regionck_expr(fcx: @mut FnCtxt, e: @ast::expr) {
|
||||
let rcx = @mut Rcx { fcx: fcx, errors_reported: 0 };
|
||||
let v = regionck_visitor();
|
||||
(v.visit_expr)(e, @(move rcx), v);
|
||||
(v.visit_expr)(e, rcx, v);
|
||||
fcx.infcx().resolve_regions();
|
||||
}
|
||||
|
||||
pub fn regionck_fn(fcx: @fn_ctxt,
|
||||
blk: ast::blk) {
|
||||
let rcx = rcx_({fcx:fcx, mut errors_reported: 0});
|
||||
pub fn regionck_fn(fcx: @mut FnCtxt, blk: ast::blk) {
|
||||
let rcx = @mut Rcx { fcx: fcx, errors_reported: 0 };
|
||||
let v = regionck_visitor();
|
||||
(v.visit_block)(blk, @(move rcx), v);
|
||||
(v.visit_block)(blk, rcx, v);
|
||||
fcx.infcx().resolve_regions();
|
||||
}
|
||||
|
||||
|
@ -138,11 +141,11 @@ pub fn regionck_visitor() -> rvt {
|
|||
.. *visit::default_visitor()})
|
||||
}
|
||||
|
||||
pub fn visit_item(_item: @ast::item, &&_rcx: @rcx, _v: rvt) {
|
||||
pub fn visit_item(_item: @ast::item, &&_rcx: @mut Rcx, _v: rvt) {
|
||||
// Ignore items
|
||||
}
|
||||
|
||||
pub fn visit_local(l: @ast::local, &&rcx: @rcx, v: rvt) {
|
||||
pub fn visit_local(l: @ast::local, &&rcx: @mut Rcx, v: rvt) {
|
||||
// Check to make sure that the regions in all local variables are
|
||||
// within scope.
|
||||
//
|
||||
|
@ -173,11 +176,11 @@ pub fn visit_local(l: @ast::local, &&rcx: @rcx, v: rvt) {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn visit_block(b: ast::blk, &&rcx: @rcx, v: rvt) {
|
||||
pub fn visit_block(b: ast::blk, &&rcx: @mut Rcx, v: rvt) {
|
||||
visit::visit_block(b, rcx, v);
|
||||
}
|
||||
|
||||
pub fn visit_expr(expr: @ast::expr, &&rcx: @rcx, v: rvt) {
|
||||
pub fn visit_expr(expr: @ast::expr, &&rcx: @mut Rcx, v: rvt) {
|
||||
debug!("visit_expr(e=%s)", rcx.fcx.expr_to_str(expr));
|
||||
|
||||
for rcx.fcx.inh.adjustments.find(&expr.id).each |adjustment| {
|
||||
|
@ -292,11 +295,11 @@ pub fn visit_expr(expr: @ast::expr, &&rcx: @rcx, v: rvt) {
|
|||
visit::visit_expr(expr, rcx, v);
|
||||
}
|
||||
|
||||
pub fn visit_stmt(s: @ast::stmt, &&rcx: @rcx, v: rvt) {
|
||||
pub fn visit_stmt(s: @ast::stmt, &&rcx: @mut Rcx, v: rvt) {
|
||||
visit::visit_stmt(s, rcx, v);
|
||||
}
|
||||
|
||||
pub fn visit_node(id: ast::node_id, span: span, rcx: @rcx) -> bool {
|
||||
pub fn visit_node(id: ast::node_id, span: span, rcx: @mut Rcx) -> bool {
|
||||
/*!
|
||||
*
|
||||
* checks the type of the node `id` and reports an error if it
|
||||
|
@ -315,7 +318,7 @@ pub fn visit_node(id: ast::node_id, span: span, rcx: @rcx) -> bool {
|
|||
constrain_regions_in_type_of_node(rcx, id, encl_region, span)
|
||||
}
|
||||
|
||||
pub fn constrain_auto_ref(rcx: @rcx, expr: @ast::expr) {
|
||||
pub fn constrain_auto_ref(rcx: @mut Rcx, expr: @ast::expr) {
|
||||
/*!
|
||||
*
|
||||
* If `expr` is auto-ref'd (e.g., as part of a borrow), then this
|
||||
|
@ -360,7 +363,7 @@ pub fn constrain_auto_ref(rcx: @rcx, expr: @ast::expr) {
|
|||
}
|
||||
|
||||
pub fn constrain_free_variables(
|
||||
rcx: @rcx,
|
||||
rcx: @mut Rcx,
|
||||
region: ty::Region,
|
||||
expr: @ast::expr) {
|
||||
/*!
|
||||
|
@ -396,7 +399,7 @@ pub fn constrain_free_variables(
|
|||
}
|
||||
|
||||
pub fn constrain_regions_in_type_of_node(
|
||||
rcx: @rcx,
|
||||
rcx: @mut Rcx,
|
||||
id: ast::node_id,
|
||||
encl_region: ty::Region,
|
||||
span: span) -> bool {
|
||||
|
@ -413,7 +416,7 @@ pub fn constrain_regions_in_type_of_node(
|
|||
}
|
||||
|
||||
pub fn constrain_regions_in_type(
|
||||
rcx: @rcx,
|
||||
rcx: @mut Rcx,
|
||||
encl_region: ty::Region,
|
||||
span: span,
|
||||
ty: ty::t) -> bool {
|
||||
|
@ -434,7 +437,7 @@ pub fn constrain_regions_in_type(
|
|||
|t| ty::type_has_regions(t));
|
||||
return (e == rcx.errors_reported);
|
||||
|
||||
fn constrain_region(rcx: @rcx,
|
||||
fn constrain_region(rcx: @mut Rcx,
|
||||
encl_region: ty::Region,
|
||||
span: span,
|
||||
region: ty::Region) {
|
||||
|
@ -521,13 +524,13 @@ pub mod guarantor {
|
|||
*/
|
||||
|
||||
use core::prelude::*;
|
||||
use middle::typeck::check::regionck::{rcx, infallibly_mk_subr};
|
||||
use middle::typeck::check::regionck::{Rcx, infallibly_mk_subr};
|
||||
use middle::ty;
|
||||
use syntax::ast;
|
||||
use syntax::codemap::span;
|
||||
use util::ppaux::{ty_to_str};
|
||||
|
||||
pub fn for_addr_of(rcx: @rcx, expr: @ast::expr, base: @ast::expr) {
|
||||
pub fn for_addr_of(rcx: @mut Rcx, expr: @ast::expr, base: @ast::expr) {
|
||||
/*!
|
||||
*
|
||||
* Computes the guarantor for an expression `&base` and then
|
||||
|
@ -542,7 +545,7 @@ pub mod guarantor {
|
|||
link(rcx, expr.span, expr.id, guarantor);
|
||||
}
|
||||
|
||||
pub fn for_match(rcx: @rcx, discr: @ast::expr, arms: &[ast::arm]) {
|
||||
pub fn for_match(rcx: @mut Rcx, discr: @ast::expr, arms: &[ast::arm]) {
|
||||
/*!
|
||||
*
|
||||
* Computes the guarantors for any ref bindings in a match and
|
||||
|
@ -558,11 +561,10 @@ pub mod guarantor {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn for_autoref(rcx: @rcx,
|
||||
pub fn for_autoref(rcx: @mut Rcx,
|
||||
expr: @ast::expr,
|
||||
adjustment: &ty::AutoAdjustment,
|
||||
autoref: &ty::AutoRef)
|
||||
{
|
||||
autoref: &ty::AutoRef) {
|
||||
/*!
|
||||
*
|
||||
* Computes the guarantor for an expression that has an
|
||||
|
@ -583,11 +585,10 @@ pub mod guarantor {
|
|||
}
|
||||
|
||||
fn link(
|
||||
rcx: @rcx,
|
||||
rcx: @mut Rcx,
|
||||
span: span,
|
||||
id: ast::node_id,
|
||||
guarantor: Option<ty::Region>)
|
||||
{
|
||||
guarantor: Option<ty::Region>) {
|
||||
/*!
|
||||
*
|
||||
* Links the lifetime of the borrowed pointer resulting from a borrow
|
||||
|
@ -640,7 +641,7 @@ pub mod guarantor {
|
|||
ty: ty::t
|
||||
}
|
||||
|
||||
fn guarantor(rcx: @rcx, expr: @ast::expr) -> Option<ty::Region> {
|
||||
fn guarantor(rcx: @mut Rcx, expr: @ast::expr) -> Option<ty::Region> {
|
||||
/*!
|
||||
*
|
||||
* Computes the guarantor of `expr`, or None if `expr` is
|
||||
|
@ -715,7 +716,7 @@ pub mod guarantor {
|
|||
}
|
||||
}
|
||||
|
||||
fn categorize(rcx: @rcx, expr: @ast::expr) -> ExprCategorization {
|
||||
fn categorize(rcx: @mut Rcx, expr: @ast::expr) -> ExprCategorization {
|
||||
debug!("categorize(expr=%s)", rcx.fcx.expr_to_str(expr));
|
||||
let _i = ::util::common::indenter();
|
||||
|
||||
|
@ -741,8 +742,9 @@ pub mod guarantor {
|
|||
return expr_ct.cat;
|
||||
}
|
||||
|
||||
fn categorize_unadjusted(rcx: @rcx,
|
||||
expr: @ast::expr) -> ExprCategorizationType {
|
||||
fn categorize_unadjusted(rcx: @mut Rcx,
|
||||
expr: @ast::expr)
|
||||
-> ExprCategorizationType {
|
||||
debug!("categorize_unadjusted(expr=%s)", rcx.fcx.expr_to_str(expr));
|
||||
let _i = ::util::common::indenter();
|
||||
|
||||
|
@ -765,11 +767,11 @@ pub mod guarantor {
|
|||
}
|
||||
|
||||
fn apply_autoderefs(
|
||||
+rcx: @rcx,
|
||||
+rcx: @mut Rcx,
|
||||
+expr: @ast::expr,
|
||||
+autoderefs: uint,
|
||||
+ct: ExprCategorizationType) -> ExprCategorizationType
|
||||
{
|
||||
+ct: ExprCategorizationType)
|
||||
-> ExprCategorizationType {
|
||||
let mut ct = ct;
|
||||
let tcx = rcx.fcx.ccx.tcx;
|
||||
for uint::range(0, autoderefs) |_| {
|
||||
|
@ -824,10 +826,9 @@ pub mod guarantor {
|
|||
}
|
||||
|
||||
fn link_ref_bindings_in_pat(
|
||||
rcx: @rcx,
|
||||
rcx: @mut Rcx,
|
||||
pat: @ast::pat,
|
||||
guarantor: Option<ty::Region>)
|
||||
{
|
||||
guarantor: Option<ty::Region>) {
|
||||
/*!
|
||||
*
|
||||
* Descends through the pattern, tracking the guarantor
|
||||
|
@ -901,10 +902,9 @@ pub mod guarantor {
|
|||
}
|
||||
}
|
||||
|
||||
fn link_ref_bindings_in_pats(rcx: @rcx,
|
||||
fn link_ref_bindings_in_pats(rcx: @mut Rcx,
|
||||
pats: &~[@ast::pat],
|
||||
guarantor: Option<ty::Region>)
|
||||
{
|
||||
guarantor: Option<ty::Region>) {
|
||||
for pats.each |pat| {
|
||||
link_ref_bindings_in_pat(rcx, *pat, guarantor);
|
||||
}
|
||||
|
@ -912,11 +912,11 @@ pub mod guarantor {
|
|||
|
||||
}
|
||||
|
||||
pub fn infallibly_mk_subr(rcx: @rcx,
|
||||
pub fn infallibly_mk_subr(rcx: @mut Rcx,
|
||||
a_is_expected: bool,
|
||||
span: span,
|
||||
a: ty::Region,
|
||||
b: ty::Region) {
|
||||
span: span,
|
||||
a: ty::Region,
|
||||
b: ty::Region) {
|
||||
/*!
|
||||
*
|
||||
* Constrains `a` to be a subregion of `b`. In many cases, we
|
||||
|
|
|
@ -13,12 +13,12 @@ use core::prelude::*;
|
|||
use middle::resolve;
|
||||
use middle::ty::{param_ty, substs};
|
||||
use middle::ty;
|
||||
use middle::typeck::check::{fn_ctxt, impl_self_ty};
|
||||
use middle::typeck::check::{FnCtxt, impl_self_ty};
|
||||
use middle::typeck::check::{structurally_resolved_type};
|
||||
use middle::typeck::infer::{fixup_err_to_str, InferCtxt};
|
||||
use middle::typeck::infer::{resolve_and_force_all_but_regions, resolve_type};
|
||||
use middle::typeck::infer;
|
||||
use middle::typeck::{crate_ctxt, vtable_origin, vtable_param, vtable_res};
|
||||
use middle::typeck::{CrateCtxt, vtable_origin, vtable_param, vtable_res};
|
||||
use middle::typeck::{vtable_static, vtable_trait};
|
||||
use util::common::indenter;
|
||||
use util::ppaux::tys_to_str;
|
||||
|
@ -63,8 +63,8 @@ pub struct LocationInfo {
|
|||
/// A vtable context includes an inference context, a crate context, and a
|
||||
/// callback function to call in case of type error.
|
||||
pub struct VtableContext {
|
||||
ccx: @crate_ctxt,
|
||||
infcx: @infer::InferCtxt
|
||||
ccx: @mut CrateCtxt,
|
||||
infcx: @mut infer::InferCtxt
|
||||
}
|
||||
|
||||
pub impl VtableContext {
|
||||
|
@ -501,11 +501,13 @@ pub fn connect_trait_tps(vcx: &VtableContext,
|
|||
}
|
||||
}
|
||||
|
||||
pub fn insert_vtables(ccx: @crate_ctxt, callee_id: ast::node_id,
|
||||
pub fn insert_vtables(ccx: @mut CrateCtxt,
|
||||
callee_id: ast::node_id,
|
||||
vtables: vtable_res) {
|
||||
debug!("insert_vtables(callee_id=%d, vtables=%?)",
|
||||
callee_id, vtables.map(|v| v.to_str(ccx.tcx)));
|
||||
ccx.vtable_map.insert(callee_id, vtables);
|
||||
let vtable_map = ccx.vtable_map;
|
||||
vtable_map.insert(callee_id, vtables);
|
||||
}
|
||||
|
||||
pub fn location_info_for_expr(expr: @ast::expr) -> LocationInfo {
|
||||
|
@ -515,7 +517,9 @@ pub fn location_info_for_expr(expr: @ast::expr) -> LocationInfo {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn early_resolve_expr(ex: @ast::expr, &&fcx: @fn_ctxt, is_early: bool) {
|
||||
pub fn early_resolve_expr(ex: @ast::expr,
|
||||
&&fcx: @mut FnCtxt,
|
||||
is_early: bool) {
|
||||
debug!("vtable: early_resolve_expr() ex with id %? (early: %b): %s",
|
||||
ex.id, is_early, expr_to_str(ex, fcx.tcx().sess.intr()));
|
||||
let _indent = indenter();
|
||||
|
@ -540,7 +544,10 @@ pub fn early_resolve_expr(ex: @ast::expr, &&fcx: @fn_ctxt, is_early: bool) {
|
|||
let vtbls = lookup_vtables(&vcx, &location_info_for_expr(ex),
|
||||
item_ty.bounds, substs, false,
|
||||
is_early);
|
||||
if !is_early { cx.vtable_map.insert(ex.id, vtbls); }
|
||||
if !is_early {
|
||||
let vtable_map = cx.vtable_map;
|
||||
vtable_map.insert(ex.id, vtbls);
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => ()
|
||||
|
@ -625,8 +632,10 @@ pub fn early_resolve_expr(ex: @ast::expr, &&fcx: @fn_ctxt, is_early: bool) {
|
|||
// vtable (that is: "ex has vtable
|
||||
// <vtable>")
|
||||
if !is_early {
|
||||
cx.vtable_map.insert(
|
||||
ex.id, @~[vtable]);
|
||||
let vtable_map =
|
||||
cx.vtable_map;
|
||||
vtable_map.insert(ex.id,
|
||||
@~[vtable]);
|
||||
}
|
||||
}
|
||||
None => err = true
|
||||
|
@ -696,7 +705,10 @@ pub fn early_resolve_expr(ex: @ast::expr, &&fcx: @fn_ctxt, is_early: bool) {
|
|||
Map this expression to that vtable (that is: "ex has
|
||||
vtable <vtable>")
|
||||
*/
|
||||
if !is_early { cx.vtable_map.insert(ex.id, @~[vtable]); }
|
||||
if !is_early {
|
||||
let vtable_map = cx.vtable_map;
|
||||
vtable_map.insert(ex.id, @~[vtable]);
|
||||
}
|
||||
fcx.tcx().legacy_boxed_traits.insert(ex.id, ());
|
||||
}
|
||||
}
|
||||
|
@ -709,19 +721,18 @@ pub fn early_resolve_expr(ex: @ast::expr, &&fcx: @fn_ctxt, is_early: bool) {
|
|||
}
|
||||
|
||||
pub fn resolve_expr(ex: @ast::expr,
|
||||
&&fcx: @fn_ctxt,
|
||||
v: visit::vt<@fn_ctxt>) {
|
||||
&&fcx: @mut FnCtxt,
|
||||
v: visit::vt<@mut FnCtxt>) {
|
||||
early_resolve_expr(ex, fcx, false);
|
||||
visit::visit_expr(ex, fcx, v);
|
||||
}
|
||||
|
||||
// Detect points where a trait-bounded type parameter is
|
||||
// instantiated, resolve the impls for the parameters.
|
||||
pub fn resolve_in_block(fcx: @fn_ctxt, bl: ast::blk) {
|
||||
pub fn resolve_in_block(fcx: @mut FnCtxt, bl: ast::blk) {
|
||||
visit::visit_block(bl, fcx, visit::mk_vt(@visit::Visitor {
|
||||
visit_expr: resolve_expr,
|
||||
visit_item: fn@(_i: @ast::item, &&_e: @fn_ctxt,
|
||||
_v: visit::vt<@fn_ctxt>) {},
|
||||
visit_item: |_,_,_| {},
|
||||
.. *visit::default_visitor()
|
||||
}));
|
||||
}
|
||||
|
|
|
@ -17,7 +17,7 @@ use core::prelude::*;
|
|||
use middle::pat_util;
|
||||
use middle::ty::arg;
|
||||
use middle::ty;
|
||||
use middle::typeck::check::{fn_ctxt, self_info};
|
||||
use middle::typeck::check::{FnCtxt, self_info};
|
||||
use middle::typeck::infer::{force_all, resolve_all, resolve_region};
|
||||
use middle::typeck::infer::{resolve_type};
|
||||
use middle::typeck::infer;
|
||||
|
@ -34,8 +34,8 @@ use syntax::codemap::span;
|
|||
use syntax::print::pprust::pat_to_str;
|
||||
use syntax::visit;
|
||||
|
||||
fn resolve_type_vars_in_type(fcx: @fn_ctxt, sp: span, typ: ty::t)
|
||||
-> Option<ty::t> {
|
||||
fn resolve_type_vars_in_type(fcx: @mut FnCtxt, sp: span, typ: ty::t)
|
||||
-> Option<ty::t> {
|
||||
if !ty::type_needs_infer(typ) { return Some(typ); }
|
||||
match resolve_type(fcx.infcx(), typ, resolve_all | force_all) {
|
||||
Ok(new_type) => return Some(new_type),
|
||||
|
@ -52,28 +52,28 @@ fn resolve_type_vars_in_type(fcx: @fn_ctxt, sp: span, typ: ty::t)
|
|||
}
|
||||
}
|
||||
|
||||
fn resolve_method_map_entry(fcx: @fn_ctxt, sp: span, id: ast::node_id)
|
||||
{
|
||||
fn resolve_method_map_entry(fcx: @mut FnCtxt, sp: span, id: ast::node_id) {
|
||||
// Resolve any method map entry
|
||||
match fcx.ccx.method_map.find(&id) {
|
||||
None => {}
|
||||
Some(ref mme) => {
|
||||
for resolve_type_vars_in_type(fcx, sp, mme.self_arg.ty).each |t| {
|
||||
fcx.ccx.method_map.insert(
|
||||
id,
|
||||
method_map_entry {
|
||||
self_arg: arg {mode: mme.self_arg.mode, ty: *t},
|
||||
.. *mme
|
||||
}
|
||||
);
|
||||
let method_map = fcx.ccx.method_map;
|
||||
method_map.insert(id,
|
||||
method_map_entry {
|
||||
self_arg: arg {
|
||||
mode: mme.self_arg.mode,
|
||||
ty: *t
|
||||
},
|
||||
.. *mme
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_type_vars_for_node(wbcx: wb_ctxt, sp: span, id: ast::node_id)
|
||||
-> Option<ty::t>
|
||||
{
|
||||
fn resolve_type_vars_for_node(wbcx: @mut WbCtxt, sp: span, id: ast::node_id)
|
||||
-> Option<ty::t> {
|
||||
let fcx = wbcx.fcx, tcx = fcx.ccx.tcx;
|
||||
|
||||
// Resolve any borrowings for the node with id `id`
|
||||
|
@ -137,10 +137,10 @@ fn resolve_type_vars_for_node(wbcx: wb_ctxt, sp: span, id: ast::node_id)
|
|||
}
|
||||
}
|
||||
|
||||
fn maybe_resolve_type_vars_for_node(wbcx: wb_ctxt, sp: span,
|
||||
fn maybe_resolve_type_vars_for_node(wbcx: @mut WbCtxt,
|
||||
sp: span,
|
||||
id: ast::node_id)
|
||||
-> Option<ty::t>
|
||||
{
|
||||
-> Option<ty::t> {
|
||||
if wbcx.fcx.inh.node_types.contains_key_ref(&id) {
|
||||
resolve_type_vars_for_node(wbcx, sp, id)
|
||||
} else {
|
||||
|
@ -148,18 +148,22 @@ fn maybe_resolve_type_vars_for_node(wbcx: wb_ctxt, sp: span,
|
|||
}
|
||||
}
|
||||
|
||||
type wb_ctxt =
|
||||
// As soon as we hit an error we have to stop resolving
|
||||
// the entire function
|
||||
{fcx: @fn_ctxt, mut success: bool};
|
||||
type wb_vt = visit::vt<wb_ctxt>;
|
||||
struct WbCtxt {
|
||||
fcx: @mut FnCtxt,
|
||||
|
||||
fn visit_stmt(s: @ast::stmt, wbcx: wb_ctxt, v: wb_vt) {
|
||||
// As soon as we hit an error we have to stop resolving
|
||||
// the entire function.
|
||||
success: bool,
|
||||
}
|
||||
|
||||
type wb_vt = visit::vt<@mut WbCtxt>;
|
||||
|
||||
fn visit_stmt(s: @ast::stmt, &&wbcx: @mut WbCtxt, v: wb_vt) {
|
||||
if !wbcx.success { return; }
|
||||
resolve_type_vars_for_node(wbcx, s.span, ty::stmt_node_id(s));
|
||||
visit::visit_stmt(s, wbcx, v);
|
||||
}
|
||||
fn visit_expr(e: @ast::expr, wbcx: wb_ctxt, v: wb_vt) {
|
||||
fn visit_expr(e: @ast::expr, &&wbcx: @mut WbCtxt, v: wb_vt) {
|
||||
if !wbcx.success { return; }
|
||||
resolve_type_vars_for_node(wbcx, e.span, e.id);
|
||||
resolve_method_map_entry(wbcx.fcx, e.span, e.id);
|
||||
|
@ -196,12 +200,12 @@ fn visit_expr(e: @ast::expr, wbcx: wb_ctxt, v: wb_vt) {
|
|||
}
|
||||
visit::visit_expr(e, wbcx, v);
|
||||
}
|
||||
fn visit_block(b: ast::blk, wbcx: wb_ctxt, v: wb_vt) {
|
||||
fn visit_block(b: ast::blk, &&wbcx: @mut WbCtxt, v: wb_vt) {
|
||||
if !wbcx.success { return; }
|
||||
resolve_type_vars_for_node(wbcx, b.span, b.node.id);
|
||||
visit::visit_block(b, wbcx, v);
|
||||
}
|
||||
fn visit_pat(p: @ast::pat, wbcx: wb_ctxt, v: wb_vt) {
|
||||
fn visit_pat(p: @ast::pat, &&wbcx: @mut WbCtxt, v: wb_vt) {
|
||||
if !wbcx.success { return; }
|
||||
resolve_type_vars_for_node(wbcx, p.span, p.id);
|
||||
debug!("Type for pattern binding %s (id %d) resolved to %s",
|
||||
|
@ -211,7 +215,7 @@ fn visit_pat(p: @ast::pat, wbcx: wb_ctxt, v: wb_vt) {
|
|||
p.id)));
|
||||
visit::visit_pat(p, wbcx, v);
|
||||
}
|
||||
fn visit_local(l: @ast::local, wbcx: wb_ctxt, v: wb_vt) {
|
||||
fn visit_local(l: @ast::local, &&wbcx: @mut WbCtxt, v: wb_vt) {
|
||||
if !wbcx.success { return; }
|
||||
let var_ty = wbcx.fcx.local_ty(l.span, l.node.id);
|
||||
match resolve_type(wbcx.fcx.infcx(), var_ty, resolve_all | force_all) {
|
||||
|
@ -233,11 +237,11 @@ fn visit_local(l: @ast::local, wbcx: wb_ctxt, v: wb_vt) {
|
|||
}
|
||||
visit::visit_local(l, wbcx, v);
|
||||
}
|
||||
fn visit_item(_item: @ast::item, _wbcx: wb_ctxt, _v: wb_vt) {
|
||||
fn visit_item(_item: @ast::item, &&_wbcx: @mut WbCtxt, _v: wb_vt) {
|
||||
// Ignore items
|
||||
}
|
||||
|
||||
fn mk_visitor() -> visit::vt<wb_ctxt> {
|
||||
fn mk_visitor() -> visit::vt<@mut WbCtxt> {
|
||||
visit::mk_vt(@visit::Visitor {visit_item: visit_item,
|
||||
visit_stmt: visit_stmt,
|
||||
visit_expr: visit_expr,
|
||||
|
@ -247,18 +251,18 @@ fn mk_visitor() -> visit::vt<wb_ctxt> {
|
|||
.. *visit::default_visitor()})
|
||||
}
|
||||
|
||||
pub fn resolve_type_vars_in_expr(fcx: @fn_ctxt, e: @ast::expr) -> bool {
|
||||
let wbcx = {fcx: fcx, mut success: true};
|
||||
pub fn resolve_type_vars_in_expr(fcx: @mut FnCtxt, e: @ast::expr) -> bool {
|
||||
let wbcx = @mut WbCtxt { fcx: fcx, success: true };
|
||||
let visit = mk_visitor();
|
||||
(visit.visit_expr)(e, wbcx, visit);
|
||||
return wbcx.success;
|
||||
}
|
||||
|
||||
pub fn resolve_type_vars_in_fn(fcx: @fn_ctxt,
|
||||
pub fn resolve_type_vars_in_fn(fcx: @mut FnCtxt,
|
||||
decl: &ast::fn_decl,
|
||||
blk: ast::blk,
|
||||
self_info: Option<self_info>) -> bool {
|
||||
let wbcx = {fcx: fcx, mut success: true};
|
||||
let wbcx = @mut WbCtxt { fcx: fcx, success: true };
|
||||
let visit = mk_visitor();
|
||||
(visit.visit_block)(blk, wbcx, visit);
|
||||
for self_info.each |self_info| {
|
||||
|
|
|
@ -33,7 +33,7 @@ use middle::ty::{ty_type, ty_uint, ty_uniq, ty_bare_fn, ty_closure};
|
|||
use middle::ty::{ty_opaque_closure_ptr, ty_unboxed_vec, type_kind_ext};
|
||||
use middle::ty::{type_is_ty_var};
|
||||
use middle::ty;
|
||||
use middle::typeck::crate_ctxt;
|
||||
use middle::typeck::CrateCtxt;
|
||||
use middle::typeck::infer::combine::Combine;
|
||||
use middle::typeck::infer::{InferCtxt, can_mk_subty};
|
||||
use middle::typeck::infer::{new_infer_ctxt, resolve_ivar};
|
||||
|
@ -71,7 +71,7 @@ pub struct UniversalQuantificationResult {
|
|||
bounds: @~[param_bounds]
|
||||
}
|
||||
|
||||
pub fn get_base_type(inference_context: @InferCtxt,
|
||||
pub fn get_base_type(inference_context: @mut InferCtxt,
|
||||
span: span,
|
||||
original_type: t)
|
||||
-> Option<t> {
|
||||
|
@ -119,7 +119,7 @@ pub fn get_base_type(inference_context: @InferCtxt,
|
|||
}
|
||||
|
||||
// Returns the def ID of the base type, if there is one.
|
||||
pub fn get_base_type_def_id(inference_context: @InferCtxt,
|
||||
pub fn get_base_type_def_id(inference_context: @mut InferCtxt,
|
||||
span: span,
|
||||
original_type: t)
|
||||
-> Option<def_id> {
|
||||
|
@ -171,7 +171,7 @@ pub fn CoherenceInfo() -> CoherenceInfo {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn CoherenceChecker(crate_context: @crate_ctxt) -> CoherenceChecker {
|
||||
pub fn CoherenceChecker(crate_context: @mut CrateCtxt) -> CoherenceChecker {
|
||||
CoherenceChecker {
|
||||
crate_context: crate_context,
|
||||
inference_context: new_infer_ctxt(crate_context.tcx),
|
||||
|
@ -182,8 +182,8 @@ pub fn CoherenceChecker(crate_context: @crate_ctxt) -> CoherenceChecker {
|
|||
}
|
||||
|
||||
pub struct CoherenceChecker {
|
||||
crate_context: @crate_ctxt,
|
||||
inference_context: @InferCtxt,
|
||||
crate_context: @mut CrateCtxt,
|
||||
inference_context: @mut InferCtxt,
|
||||
|
||||
// A mapping from implementations to the corresponding base type
|
||||
// definition ID.
|
||||
|
@ -415,7 +415,7 @@ pub impl CoherenceChecker {
|
|||
}
|
||||
|
||||
fn check_implementation_coherence() {
|
||||
let coherence_info = &self.crate_context.coherence_info;
|
||||
let coherence_info = &mut self.crate_context.coherence_info;
|
||||
let extension_methods = &coherence_info.extension_methods;
|
||||
|
||||
for extension_methods.each_key_ref |&trait_id| {
|
||||
|
@ -478,7 +478,7 @@ pub impl CoherenceChecker {
|
|||
fn iter_impls_of_trait(trait_def_id: def_id,
|
||||
f: &fn(@Impl)) {
|
||||
|
||||
let coherence_info = &self.crate_context.coherence_info;
|
||||
let coherence_info = &mut self.crate_context.coherence_info;
|
||||
let extension_methods = &coherence_info.extension_methods;
|
||||
|
||||
match extension_methods.find(&trait_def_id) {
|
||||
|
@ -824,9 +824,8 @@ pub impl CoherenceChecker {
|
|||
// External crate handling
|
||||
|
||||
fn add_impls_for_module(impls_seen: HashMap<def_id,()>,
|
||||
crate_store: CStore,
|
||||
crate_store: @mut CStore,
|
||||
module_def_id: def_id) {
|
||||
|
||||
let implementations = get_impls_for_mod(crate_store,
|
||||
module_def_id,
|
||||
None);
|
||||
|
@ -986,7 +985,7 @@ pub impl CoherenceChecker {
|
|||
//
|
||||
|
||||
fn populate_destructor_table() {
|
||||
let coherence_info = &self.crate_context.coherence_info;
|
||||
let coherence_info = &mut self.crate_context.coherence_info;
|
||||
let tcx = self.crate_context.tcx;
|
||||
let drop_trait = tcx.lang_items.drop_trait();
|
||||
let impls_opt = coherence_info.extension_methods.find(&drop_trait);
|
||||
|
@ -1035,8 +1034,8 @@ pub impl CoherenceChecker {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn check_coherence(crate_context: @crate_ctxt, crate: @crate) {
|
||||
pub fn check_coherence(crate_context: @mut CrateCtxt, crate: @crate) {
|
||||
let coherence_checker = @CoherenceChecker(crate_context);
|
||||
(*coherence_checker).check_coherence(crate);
|
||||
coherence_checker.check_coherence(crate);
|
||||
}
|
||||
|
||||
|
|
|
@ -36,13 +36,13 @@ use metadata::csearch;
|
|||
use middle::ty::{InstantiatedTraitRef, arg};
|
||||
use middle::ty::{substs, ty_param_substs_and_ty};
|
||||
use middle::ty;
|
||||
use middle::typeck::astconv::{ast_conv, ty_of_arg};
|
||||
use middle::typeck::astconv::{AstConv, ty_of_arg};
|
||||
use middle::typeck::astconv::{ast_ty_to_ty};
|
||||
use middle::typeck::astconv;
|
||||
use middle::typeck::infer;
|
||||
use middle::typeck::rscope::*;
|
||||
use middle::typeck::rscope;
|
||||
use middle::typeck::{crate_ctxt, lookup_def_tcx, no_params, write_ty_to_tcx};
|
||||
use middle::typeck::{CrateCtxt, lookup_def_tcx, no_params, write_ty_to_tcx};
|
||||
use util::common::{indenter, pluralize};
|
||||
use util::ppaux;
|
||||
use util::ppaux::bound_to_str;
|
||||
|
@ -61,7 +61,7 @@ use syntax::codemap;
|
|||
use syntax::print::pprust::path_to_str;
|
||||
use syntax::visit;
|
||||
|
||||
pub fn collect_item_types(ccx: @crate_ctxt, crate: @ast::crate) {
|
||||
pub fn collect_item_types(ccx: @mut CrateCtxt, crate: @ast::crate) {
|
||||
|
||||
// FIXME (#2592): hooking into the "intrinsic" root module is crude.
|
||||
// There ought to be a better approach. Attributes?
|
||||
|
@ -114,19 +114,18 @@ pub fn collect_item_types(ccx: @crate_ctxt, crate: @ast::crate) {
|
|||
}));
|
||||
}
|
||||
|
||||
pub impl @crate_ctxt {
|
||||
fn to_ty<RS: region_scope Copy Durable>(
|
||||
rs: RS, ast_ty: @ast::Ty) -> ty::t {
|
||||
|
||||
pub impl @mut CrateCtxt {
|
||||
fn to_ty<RS: region_scope Copy Durable>(rs: RS, ast_ty: @ast::Ty)
|
||||
-> ty::t {
|
||||
ast_ty_to_ty(self, rs, ast_ty)
|
||||
}
|
||||
}
|
||||
|
||||
pub impl @crate_ctxt: ast_conv {
|
||||
fn tcx() -> ty::ctxt { self.tcx }
|
||||
fn ccx() -> @crate_ctxt { self }
|
||||
pub impl CrateCtxt: AstConv {
|
||||
fn tcx(@mut self) -> ty::ctxt { self.tcx }
|
||||
fn ccx(@mut self) -> @mut CrateCtxt { self }
|
||||
|
||||
fn get_item_ty(id: ast::def_id) -> ty::ty_param_bounds_and_ty {
|
||||
fn get_item_ty(@mut self, id: ast::def_id) -> ty::ty_param_bounds_and_ty {
|
||||
if id.crate != ast::local_crate {
|
||||
csearch::get_type(self.tcx, id)
|
||||
} else {
|
||||
|
@ -145,18 +144,17 @@ pub impl @crate_ctxt: ast_conv {
|
|||
}
|
||||
}
|
||||
|
||||
fn ty_infer(span: span) -> ty::t {
|
||||
fn ty_infer(@mut self, span: span) -> ty::t {
|
||||
self.tcx.sess.span_bug(span,
|
||||
~"found `ty_infer` in unexpected place");
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_enum_variant_types(ccx: @crate_ctxt,
|
||||
pub fn get_enum_variant_types(ccx: @mut CrateCtxt,
|
||||
enum_ty: ty::t,
|
||||
variants: &[ast::variant],
|
||||
ty_params: &[ast::ty_param],
|
||||
rp: Option<ty::region_variance>)
|
||||
{
|
||||
+ty_params: ~[ast::ty_param],
|
||||
rp: Option<ty::region_variance>) {
|
||||
let tcx = ccx.tcx;
|
||||
|
||||
// Create a set of parameter types shared among all the variants.
|
||||
|
@ -193,9 +191,11 @@ pub fn get_enum_variant_types(ccx: @crate_ctxt,
|
|||
}
|
||||
|
||||
ast::enum_variant_kind(ref enum_definition) => {
|
||||
get_enum_variant_types(ccx, enum_ty,
|
||||
get_enum_variant_types(ccx,
|
||||
enum_ty,
|
||||
enum_definition.variants,
|
||||
ty_params, rp);
|
||||
copy ty_params,
|
||||
rp);
|
||||
result_ty = None;
|
||||
}
|
||||
};
|
||||
|
@ -213,15 +213,17 @@ pub fn get_enum_variant_types(ccx: @crate_ctxt,
|
|||
}
|
||||
}
|
||||
|
||||
pub fn ensure_trait_methods(ccx: @crate_ctxt,
|
||||
pub fn ensure_trait_methods(ccx: @mut CrateCtxt,
|
||||
id: ast::node_id,
|
||||
trait_ty: ty::t) {
|
||||
fn store_methods<T>(ccx: @crate_ctxt, id: ast::node_id,
|
||||
stuff: ~[T], f: &fn(v: &T) -> ty::method) {
|
||||
fn store_methods<T>(ccx: @mut CrateCtxt,
|
||||
id: ast::node_id,
|
||||
stuff: ~[T],
|
||||
f: &fn(v: &T) -> ty::method) {
|
||||
ty::store_trait_methods(ccx.tcx, id, @vec::map(stuff, f));
|
||||
}
|
||||
|
||||
fn make_static_method_ty(ccx: @crate_ctxt,
|
||||
fn make_static_method_ty(ccx: @mut CrateCtxt,
|
||||
am: ast::ty_method,
|
||||
rp: Option<ty::region_variance>,
|
||||
m: ty::method,
|
||||
|
@ -296,7 +298,7 @@ pub fn ensure_trait_methods(ccx: @crate_ctxt,
|
|||
}
|
||||
}
|
||||
|
||||
pub fn ensure_supertraits(ccx: @crate_ctxt,
|
||||
pub fn ensure_supertraits(ccx: @mut CrateCtxt,
|
||||
id: ast::node_id,
|
||||
sp: codemap::span,
|
||||
rp: Option<ty::region_variance>,
|
||||
|
@ -480,7 +482,7 @@ pub fn compare_impl_method(tcx: ty::ctxt,
|
|||
}
|
||||
}
|
||||
|
||||
pub fn check_methods_against_trait(ccx: @crate_ctxt,
|
||||
pub fn check_methods_against_trait(ccx: @mut CrateCtxt,
|
||||
tps: ~[ast::ty_param],
|
||||
rp: Option<ty::region_variance>,
|
||||
selfty: ty::t,
|
||||
|
@ -531,7 +533,7 @@ pub fn check_methods_against_trait(ccx: @crate_ctxt,
|
|||
}
|
||||
} // fn
|
||||
|
||||
pub fn convert_field(ccx: @crate_ctxt,
|
||||
pub fn convert_field(ccx: @mut CrateCtxt,
|
||||
rp: Option<ty::region_variance>,
|
||||
bounds: @~[ty::param_bounds],
|
||||
v: @ast::struct_field) {
|
||||
|
@ -551,7 +553,7 @@ pub struct ConvertedMethod {
|
|||
body_id: ast::node_id
|
||||
}
|
||||
|
||||
pub fn convert_methods(ccx: @crate_ctxt,
|
||||
pub fn convert_methods(ccx: @mut CrateCtxt,
|
||||
ms: ~[@ast::method],
|
||||
rp: Option<ty::region_variance>,
|
||||
rcvr_bounds: @~[ty::param_bounds])
|
||||
|
@ -576,7 +578,7 @@ pub fn convert_methods(ccx: @crate_ctxt,
|
|||
}
|
||||
}
|
||||
|
||||
pub fn ensure_no_ty_param_bounds(ccx: @crate_ctxt,
|
||||
pub fn ensure_no_ty_param_bounds(ccx: @mut CrateCtxt,
|
||||
span: span,
|
||||
ty_params: &[ast::ty_param],
|
||||
thing: &static/str) {
|
||||
|
@ -590,7 +592,7 @@ pub fn ensure_no_ty_param_bounds(ccx: @crate_ctxt,
|
|||
}
|
||||
}
|
||||
|
||||
pub fn convert(ccx: @crate_ctxt, it: @ast::item) {
|
||||
pub fn convert(ccx: @mut CrateCtxt, it: @ast::item) {
|
||||
let tcx = ccx.tcx;
|
||||
let rp = tcx.region_paramd_items.find(&it.id);
|
||||
debug!("convert: item %s with id %d rp %?",
|
||||
|
@ -602,8 +604,11 @@ pub fn convert(ccx: @crate_ctxt, it: @ast::item) {
|
|||
ensure_no_ty_param_bounds(ccx, it.span, *ty_params, "enumeration");
|
||||
let tpt = ty_of_item(ccx, it);
|
||||
write_ty_to_tcx(tcx, it.id, tpt.ty);
|
||||
get_enum_variant_types(ccx, tpt.ty, enum_definition.variants,
|
||||
*ty_params, rp);
|
||||
get_enum_variant_types(ccx,
|
||||
tpt.ty,
|
||||
enum_definition.variants,
|
||||
copy *ty_params,
|
||||
rp);
|
||||
}
|
||||
ast::item_impl(ref tps, trait_ref, selfty, ref ms) => {
|
||||
let i_bounds = ty_param_bounds(ccx, *tps);
|
||||
|
@ -659,7 +664,7 @@ pub fn convert(ccx: @crate_ctxt, it: @ast::item) {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn convert_struct(ccx: @crate_ctxt,
|
||||
pub fn convert_struct(ccx: @mut CrateCtxt,
|
||||
rp: Option<ty::region_variance>,
|
||||
struct_def: @ast::struct_def,
|
||||
+tps: ~[ast::ty_param],
|
||||
|
@ -716,7 +721,7 @@ pub fn convert_struct(ccx: @crate_ctxt,
|
|||
}
|
||||
}
|
||||
|
||||
pub fn convert_foreign(ccx: @crate_ctxt, i: @ast::foreign_item) {
|
||||
pub fn convert_foreign(ccx: @mut CrateCtxt, i: @ast::foreign_item) {
|
||||
// As above, this call populates the type table with the converted
|
||||
// type of the foreign item. We simply write it into the node type
|
||||
// table.
|
||||
|
@ -725,7 +730,7 @@ pub fn convert_foreign(ccx: @crate_ctxt, i: @ast::foreign_item) {
|
|||
ccx.tcx.tcache.insert(local_def(i.id), tpt);
|
||||
}
|
||||
|
||||
pub fn ty_of_method(ccx: @crate_ctxt,
|
||||
pub fn ty_of_method(ccx: @mut CrateCtxt,
|
||||
m: @ast::method,
|
||||
rp: Option<ty::region_variance>) -> ty::method {
|
||||
{ident: m.ident,
|
||||
|
@ -737,7 +742,7 @@ pub fn ty_of_method(ccx: @crate_ctxt,
|
|||
def_id: local_def(m.id)}
|
||||
}
|
||||
|
||||
pub fn ty_of_ty_method(self: @crate_ctxt,
|
||||
pub fn ty_of_ty_method(self: @mut CrateCtxt,
|
||||
m: ast::ty_method,
|
||||
rp: Option<ty::region_variance>,
|
||||
id: ast::def_id) -> ty::method {
|
||||
|
@ -756,7 +761,7 @@ pub fn ty_of_ty_method(self: @crate_ctxt,
|
|||
it's bound to a valid trait type. Returns the def_id for the defining
|
||||
trait. Fails if the type is a type other than an trait type.
|
||||
*/
|
||||
pub fn instantiate_trait_ref(ccx: @crate_ctxt, t: @ast::trait_ref,
|
||||
pub fn instantiate_trait_ref(ccx: @mut CrateCtxt, t: @ast::trait_ref,
|
||||
rp: Option<ty::region_variance>)
|
||||
-> (ast::def_id, ty_param_substs_and_ty) {
|
||||
|
||||
|
@ -780,9 +785,8 @@ pub fn instantiate_trait_ref(ccx: @crate_ctxt, t: @ast::trait_ref,
|
|||
}
|
||||
}
|
||||
|
||||
pub fn ty_of_item(ccx: @crate_ctxt, it: @ast::item)
|
||||
-> ty::ty_param_bounds_and_ty {
|
||||
|
||||
pub fn ty_of_item(ccx: @mut CrateCtxt, it: @ast::item)
|
||||
-> ty::ty_param_bounds_and_ty {
|
||||
let def_id = local_def(it.id);
|
||||
let tcx = ccx.tcx;
|
||||
match tcx.tcache.find(&def_id) {
|
||||
|
@ -871,7 +875,7 @@ pub fn ty_of_item(ccx: @crate_ctxt, it: @ast::item)
|
|||
}
|
||||
}
|
||||
|
||||
pub fn ty_of_foreign_item(ccx: @crate_ctxt, it: @ast::foreign_item)
|
||||
pub fn ty_of_foreign_item(ccx: @mut CrateCtxt, it: @ast::foreign_item)
|
||||
-> ty::ty_param_bounds_and_ty {
|
||||
match /*bad*/copy it.node {
|
||||
ast::foreign_item_fn(fn_decl, _, params) => {
|
||||
|
@ -892,7 +896,7 @@ pub fn ty_of_foreign_item(ccx: @crate_ctxt, it: @ast::foreign_item)
|
|||
// of a newtyped Ty or a region) to ty's notion of ty param bounds, which can
|
||||
// either be user-defined traits, or one of the four built-in traits (formerly
|
||||
// known as kinds): Const, Copy, Durable, and Send.
|
||||
pub fn compute_bounds(ccx: @crate_ctxt,
|
||||
pub fn compute_bounds(ccx: @mut CrateCtxt,
|
||||
ast_bounds: @~[ast::ty_param_bound])
|
||||
-> ty::param_bounds {
|
||||
@do vec::flat_map(*ast_bounds) |b| {
|
||||
|
@ -928,8 +932,9 @@ pub fn compute_bounds(ccx: @crate_ctxt,
|
|||
}
|
||||
}
|
||||
|
||||
pub fn ty_param_bounds(ccx: @crate_ctxt,
|
||||
params: &[ast::ty_param]) -> @~[ty::param_bounds] {
|
||||
pub fn ty_param_bounds(ccx: @mut CrateCtxt,
|
||||
params: ~[ast::ty_param])
|
||||
-> @~[ty::param_bounds] {
|
||||
@do params.map |param| {
|
||||
match ccx.tcx.ty_param_bounds.find(¶m.id) {
|
||||
Some(bs) => bs,
|
||||
|
@ -942,7 +947,7 @@ pub fn ty_param_bounds(ccx: @crate_ctxt,
|
|||
}
|
||||
}
|
||||
|
||||
pub fn ty_of_foreign_fn_decl(ccx: @crate_ctxt,
|
||||
pub fn ty_of_foreign_fn_decl(ccx: @mut CrateCtxt,
|
||||
decl: ast::fn_decl,
|
||||
+ty_params: ~[ast::ty_param],
|
||||
def_id: ast::def_id)
|
||||
|
@ -964,7 +969,7 @@ pub fn ty_of_foreign_fn_decl(ccx: @crate_ctxt,
|
|||
return tpt;
|
||||
}
|
||||
|
||||
pub fn mk_ty_params(ccx: @crate_ctxt, atps: ~[ast::ty_param])
|
||||
pub fn mk_ty_params(ccx: @mut CrateCtxt, atps: ~[ast::ty_param])
|
||||
-> {bounds: @~[ty::param_bounds], params: ~[ty::t]} {
|
||||
|
||||
let mut i = 0u;
|
||||
|
@ -977,7 +982,7 @@ pub fn mk_ty_params(ccx: @crate_ctxt, atps: ~[ast::ty_param])
|
|||
})}
|
||||
}
|
||||
|
||||
pub fn mk_substs(ccx: @crate_ctxt,
|
||||
pub fn mk_substs(ccx: @mut CrateCtxt,
|
||||
+atps: ~[ast::ty_param],
|
||||
rp: Option<ty::region_variance>)
|
||||
-> {bounds: @~[ty::param_bounds], substs: ty::substs} {
|
||||
|
|
|
@ -78,7 +78,7 @@ pub fn macros() {
|
|||
}
|
||||
|
||||
pub trait Combine {
|
||||
fn infcx() -> @InferCtxt;
|
||||
fn infcx() -> @mut InferCtxt;
|
||||
fn tag() -> ~str;
|
||||
fn a_is_expected() -> bool;
|
||||
fn span() -> span;
|
||||
|
@ -113,7 +113,7 @@ pub trait Combine {
|
|||
}
|
||||
|
||||
pub struct CombineFields {
|
||||
infcx: @InferCtxt,
|
||||
infcx: @mut InferCtxt,
|
||||
a_is_expected: bool,
|
||||
span: span,
|
||||
}
|
||||
|
|
|
@ -28,7 +28,7 @@ use std::list;
|
|||
pub enum Glb = CombineFields; // "greatest lower bound" (common subtype)
|
||||
|
||||
pub impl Glb: Combine {
|
||||
fn infcx() -> @InferCtxt { self.infcx }
|
||||
fn infcx() -> @mut InferCtxt { self.infcx }
|
||||
fn tag() -> ~str { ~"glb" }
|
||||
fn a_is_expected() -> bool { self.a_is_expected }
|
||||
fn span() -> span { self.span }
|
||||
|
|
|
@ -37,7 +37,7 @@ pub impl Lub {
|
|||
}
|
||||
|
||||
pub impl Lub: Combine {
|
||||
fn infcx() -> @InferCtxt { self.infcx }
|
||||
fn infcx() -> @mut InferCtxt { self.infcx }
|
||||
fn tag() -> ~str { ~"lub" }
|
||||
fn a_is_expected() -> bool { self.a_is_expected }
|
||||
fn span() -> span { self.span }
|
||||
|
|
|
@ -313,15 +313,15 @@ pub struct InferCtxt {
|
|||
// types that might instantiate a general type variable have an
|
||||
// order, represented by its upper and lower bounds.
|
||||
ty_var_bindings: ValsAndBindings<ty::TyVid, Bounds<ty::t>>,
|
||||
mut ty_var_counter: uint,
|
||||
ty_var_counter: uint,
|
||||
|
||||
// Map from integral variable to the kind of integer it represents
|
||||
int_var_bindings: ValsAndBindings<ty::IntVid, Option<IntVarValue>>,
|
||||
mut int_var_counter: uint,
|
||||
int_var_counter: uint,
|
||||
|
||||
// Map from floating variable to the kind of float it represents
|
||||
float_var_bindings: ValsAndBindings<ty::FloatVid, Option<ast::float_ty>>,
|
||||
mut float_var_counter: uint,
|
||||
float_var_counter: uint,
|
||||
|
||||
// For region variables.
|
||||
region_vars: RegionVarBindings,
|
||||
|
@ -351,12 +351,12 @@ pub fn fixup_err_to_str(f: fixup_err) -> ~str {
|
|||
fn new_ValsAndBindings<V:Copy, T:Copy>() -> ValsAndBindings<V, T> {
|
||||
ValsAndBindings {
|
||||
vals: oldsmallintmap::mk(),
|
||||
mut bindings: ~[]
|
||||
bindings: ~[]
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_infer_ctxt(tcx: ty::ctxt) -> @InferCtxt {
|
||||
@InferCtxt {
|
||||
pub fn new_infer_ctxt(tcx: ty::ctxt) -> @mut InferCtxt {
|
||||
@mut InferCtxt {
|
||||
tcx: tcx,
|
||||
|
||||
ty_var_bindings: new_ValsAndBindings(),
|
||||
|
@ -372,8 +372,12 @@ pub fn new_infer_ctxt(tcx: ty::ctxt) -> @InferCtxt {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn mk_subty(cx: @InferCtxt, a_is_expected: bool, span: span,
|
||||
a: ty::t, b: ty::t) -> ures {
|
||||
pub fn mk_subty(cx: @mut InferCtxt,
|
||||
a_is_expected: bool,
|
||||
span: span,
|
||||
a: ty::t,
|
||||
b: ty::t)
|
||||
-> ures {
|
||||
debug!("mk_subty(%s <: %s)", a.inf_str(cx), b.inf_str(cx));
|
||||
do indent {
|
||||
do cx.commit {
|
||||
|
@ -382,7 +386,7 @@ pub fn mk_subty(cx: @InferCtxt, a_is_expected: bool, span: span,
|
|||
}.to_ures()
|
||||
}
|
||||
|
||||
pub fn can_mk_subty(cx: @InferCtxt, a: ty::t, b: ty::t) -> ures {
|
||||
pub fn can_mk_subty(cx: @mut InferCtxt, a: ty::t, b: ty::t) -> ures {
|
||||
debug!("can_mk_subty(%s <: %s)", a.inf_str(cx), b.inf_str(cx));
|
||||
do indent {
|
||||
do cx.probe {
|
||||
|
@ -391,8 +395,12 @@ pub fn can_mk_subty(cx: @InferCtxt, a: ty::t, b: ty::t) -> ures {
|
|||
}.to_ures()
|
||||
}
|
||||
|
||||
pub fn mk_subr(cx: @InferCtxt, a_is_expected: bool, span: span,
|
||||
a: ty::Region, b: ty::Region) -> ures {
|
||||
pub fn mk_subr(cx: @mut InferCtxt,
|
||||
a_is_expected: bool,
|
||||
span: span,
|
||||
a: ty::Region,
|
||||
b: ty::Region)
|
||||
-> ures {
|
||||
debug!("mk_subr(%s <: %s)", a.inf_str(cx), b.inf_str(cx));
|
||||
do indent {
|
||||
do cx.commit {
|
||||
|
@ -401,8 +409,12 @@ pub fn mk_subr(cx: @InferCtxt, a_is_expected: bool, span: span,
|
|||
}.to_ures()
|
||||
}
|
||||
|
||||
pub fn mk_eqty(cx: @InferCtxt, a_is_expected: bool, span: span,
|
||||
a: ty::t, b: ty::t) -> ures {
|
||||
pub fn mk_eqty(cx: @mut InferCtxt,
|
||||
a_is_expected: bool,
|
||||
span: span,
|
||||
a: ty::t,
|
||||
b: ty::t)
|
||||
-> ures {
|
||||
debug!("mk_eqty(%s <: %s)", a.inf_str(cx), b.inf_str(cx));
|
||||
do indent {
|
||||
do cx.commit {
|
||||
|
@ -412,8 +424,12 @@ pub fn mk_eqty(cx: @InferCtxt, a_is_expected: bool, span: span,
|
|||
}.to_ures()
|
||||
}
|
||||
|
||||
pub fn mk_coercety(cx: @InferCtxt, a_is_expected: bool, span: span,
|
||||
a: ty::t, b: ty::t) -> CoerceResult {
|
||||
pub fn mk_coercety(cx: @mut InferCtxt,
|
||||
a_is_expected: bool,
|
||||
span: span,
|
||||
a: ty::t,
|
||||
b: ty::t)
|
||||
-> CoerceResult {
|
||||
debug!("mk_coercety(%s -> %s)", a.inf_str(cx), b.inf_str(cx));
|
||||
do indent {
|
||||
do cx.commit {
|
||||
|
@ -422,7 +438,7 @@ pub fn mk_coercety(cx: @InferCtxt, a_is_expected: bool, span: span,
|
|||
}
|
||||
}
|
||||
|
||||
pub fn can_mk_coercety(cx: @InferCtxt, a: ty::t, b: ty::t) -> ures {
|
||||
pub fn can_mk_coercety(cx: @mut InferCtxt, a: ty::t, b: ty::t) -> ures {
|
||||
debug!("can_mk_coercety(%s -> %s)", a.inf_str(cx), b.inf_str(cx));
|
||||
do indent {
|
||||
do cx.probe {
|
||||
|
@ -433,17 +449,22 @@ pub fn can_mk_coercety(cx: @InferCtxt, a: ty::t, b: ty::t) -> ures {
|
|||
}
|
||||
|
||||
// See comment on the type `resolve_state` below
|
||||
pub fn resolve_type(cx: @InferCtxt, a: ty::t, modes: uint) -> fres<ty::t> {
|
||||
resolver(cx, modes).resolve_type_chk(a)
|
||||
pub fn resolve_type(cx: @mut InferCtxt,
|
||||
a: ty::t,
|
||||
modes: uint)
|
||||
-> fres<ty::t> {
|
||||
let mut resolver = resolver(cx, modes);
|
||||
resolver.resolve_type_chk(a)
|
||||
}
|
||||
|
||||
pub fn resolve_region(cx: @InferCtxt, r: ty::Region, modes: uint)
|
||||
pub fn resolve_region(cx: @mut InferCtxt, r: ty::Region, modes: uint)
|
||||
-> fres<ty::Region> {
|
||||
resolver(cx, modes).resolve_region_chk(r)
|
||||
let mut resolver = resolver(cx, modes);
|
||||
resolver.resolve_region_chk(r)
|
||||
}
|
||||
|
||||
/*
|
||||
fn resolve_borrowings(cx: @InferCtxt) {
|
||||
fn resolve_borrowings(cx: @mut InferCtxt) {
|
||||
for cx.borrowings.each |item| {
|
||||
match resolve_region(cx, item.scope, resolve_all|force_all) {
|
||||
Ok(region) => {
|
||||
|
@ -510,7 +531,7 @@ pub fn uok() -> ures {
|
|||
}
|
||||
|
||||
fn rollback_to<V:Copy Vid, T:Copy>(
|
||||
vb: &ValsAndBindings<V, T>,
|
||||
vb: &mut ValsAndBindings<V, T>,
|
||||
len: uint)
|
||||
{
|
||||
while vb.bindings.len() != len {
|
||||
|
@ -526,7 +547,7 @@ struct Snapshot {
|
|||
region_vars_snapshot: uint,
|
||||
}
|
||||
|
||||
impl @InferCtxt {
|
||||
impl @mut InferCtxt {
|
||||
fn combine_fields(a_is_expected: bool,
|
||||
span: span) -> CombineFields {
|
||||
CombineFields {infcx: self,
|
||||
|
@ -557,11 +578,11 @@ impl @InferCtxt {
|
|||
|
||||
fn rollback_to(snapshot: &Snapshot) {
|
||||
debug!("rollback!");
|
||||
rollback_to(&self.ty_var_bindings, snapshot.ty_var_bindings_len);
|
||||
rollback_to(&mut self.ty_var_bindings, snapshot.ty_var_bindings_len);
|
||||
|
||||
rollback_to(&self.int_var_bindings,
|
||||
rollback_to(&mut self.int_var_bindings,
|
||||
snapshot.int_var_bindings_len);
|
||||
rollback_to(&self.float_var_bindings,
|
||||
rollback_to(&mut self.float_var_bindings,
|
||||
snapshot.float_var_bindings_len);
|
||||
|
||||
self.region_vars.rollback_to(snapshot.region_vars_snapshot);
|
||||
|
@ -609,21 +630,21 @@ impl @InferCtxt {
|
|||
}
|
||||
|
||||
fn next_simple_var<V: Copy,T: Copy>(
|
||||
+counter: &mut uint,
|
||||
+bindings: &ValsAndBindings<V,Option<T>>) -> uint
|
||||
{
|
||||
+counter: &mut uint,
|
||||
+bindings: &mut ValsAndBindings<V,Option<T>>)
|
||||
-> uint {
|
||||
let id = *counter;
|
||||
*counter += 1;
|
||||
bindings.vals.insert(id, Root(None, 0));
|
||||
return id;
|
||||
}
|
||||
|
||||
impl @InferCtxt {
|
||||
impl @mut InferCtxt {
|
||||
fn next_ty_var_id() -> TyVid {
|
||||
let id = self.ty_var_counter;
|
||||
self.ty_var_counter += 1;
|
||||
self.ty_var_bindings.vals.insert(id,
|
||||
Root({lb: None, ub: None}, 0u));
|
||||
let vals = self.ty_var_bindings.vals;
|
||||
vals.insert(id, Root({lb: None, ub: None}, 0u));
|
||||
return TyVid(id);
|
||||
}
|
||||
|
||||
|
@ -637,7 +658,7 @@ impl @InferCtxt {
|
|||
|
||||
fn next_int_var_id() -> IntVid {
|
||||
IntVid(next_simple_var(&mut self.int_var_counter,
|
||||
&self.int_var_bindings))
|
||||
&mut self.int_var_bindings))
|
||||
}
|
||||
|
||||
fn next_int_var() -> ty::t {
|
||||
|
@ -646,7 +667,7 @@ impl @InferCtxt {
|
|||
|
||||
fn next_float_var_id() -> FloatVid {
|
||||
FloatVid(next_simple_var(&mut self.float_var_counter,
|
||||
&self.float_var_bindings))
|
||||
&mut self.float_var_bindings))
|
||||
}
|
||||
|
||||
fn next_float_var() -> ty::t {
|
||||
|
@ -726,9 +747,9 @@ impl @InferCtxt {
|
|||
}
|
||||
|
||||
fn replace_bound_regions_with_fresh_regions(
|
||||
&self, span: span,
|
||||
fsig: &ty::FnSig) -> (ty::FnSig, isr_alist)
|
||||
{
|
||||
span: span,
|
||||
fsig: &ty::FnSig)
|
||||
-> (ty::FnSig, isr_alist) {
|
||||
let {fn_sig: fn_sig, isr: isr, _} =
|
||||
replace_bound_regions_in_fn_sig(self.tcx, @Nil, None, fsig, |br| {
|
||||
// N.B.: The name of the bound region doesn't have anything to
|
||||
|
|
|
@ -636,8 +636,8 @@ pub struct RegionVarBindings {
|
|||
constraints: HashMap<Constraint, span>,
|
||||
lubs: CombineMap,
|
||||
glbs: CombineMap,
|
||||
mut skolemization_count: uint,
|
||||
mut bound_count: uint,
|
||||
skolemization_count: uint,
|
||||
bound_count: uint,
|
||||
|
||||
// The undo log records actions that might later be undone.
|
||||
//
|
||||
|
@ -647,7 +647,7 @@ pub struct RegionVarBindings {
|
|||
// actively snapshotting. The reason for this is that otherwise
|
||||
// we end up adding entries for things like the lower bound on
|
||||
// a variable and so forth, which can never be rolled back.
|
||||
mut undo_log: ~[UndoLogEntry],
|
||||
undo_log: ~[UndoLogEntry],
|
||||
|
||||
// This contains the results of inference. It begins as an empty
|
||||
// cell and only acquires a value after inference is complete.
|
||||
|
@ -677,11 +677,11 @@ fn CombineMap() -> CombineMap {
|
|||
}
|
||||
|
||||
pub impl RegionVarBindings {
|
||||
fn in_snapshot(&self) -> bool {
|
||||
fn in_snapshot(&mut self) -> bool {
|
||||
self.undo_log.len() > 0
|
||||
}
|
||||
|
||||
fn start_snapshot(&self) -> uint {
|
||||
fn start_snapshot(&mut self) -> uint {
|
||||
debug!("RegionVarBindings: snapshot()=%u", self.undo_log.len());
|
||||
if self.in_snapshot() {
|
||||
self.undo_log.len()
|
||||
|
@ -691,14 +691,14 @@ pub impl RegionVarBindings {
|
|||
}
|
||||
}
|
||||
|
||||
fn commit(&self) {
|
||||
fn commit(&mut self) {
|
||||
debug!("RegionVarBindings: commit()");
|
||||
while self.undo_log.len() > 0 {
|
||||
self.undo_log.pop();
|
||||
}
|
||||
}
|
||||
|
||||
fn rollback_to(&self, snapshot: uint) {
|
||||
fn rollback_to(&mut self, snapshot: uint) {
|
||||
debug!("RegionVarBindings: rollback_to(%u)", snapshot);
|
||||
while self.undo_log.len() > snapshot {
|
||||
let undo_item = self.undo_log.pop();
|
||||
|
@ -719,11 +719,11 @@ pub impl RegionVarBindings {
|
|||
}
|
||||
}
|
||||
|
||||
fn num_vars(&self) -> uint {
|
||||
fn num_vars(&mut self) -> uint {
|
||||
self.var_spans.len()
|
||||
}
|
||||
|
||||
fn new_region_var(&self, span: span) -> RegionVid {
|
||||
fn new_region_var(&mut self, span: span) -> RegionVid {
|
||||
let id = self.num_vars();
|
||||
self.var_spans.push(span);
|
||||
let vid = RegionVid(id);
|
||||
|
@ -735,13 +735,13 @@ pub impl RegionVarBindings {
|
|||
return vid;
|
||||
}
|
||||
|
||||
fn new_skolemized(&self, br: ty::bound_region) -> Region {
|
||||
fn new_skolemized(&mut self, br: ty::bound_region) -> Region {
|
||||
let sc = self.skolemization_count;
|
||||
self.skolemization_count += 1;
|
||||
re_infer(ReSkolemized(sc, br))
|
||||
}
|
||||
|
||||
fn new_bound(&self) -> Region {
|
||||
fn new_bound(&mut self) -> Region {
|
||||
// Creates a fresh bound variable for use in GLB computations.
|
||||
// See discussion of GLB computation in the large comment at
|
||||
// the top of this file for more details.
|
||||
|
@ -761,7 +761,7 @@ pub impl RegionVarBindings {
|
|||
re_bound(br_fresh(sc))
|
||||
}
|
||||
|
||||
fn add_constraint(&self, +constraint: Constraint, span: span) {
|
||||
fn add_constraint(&mut self, +constraint: Constraint, span: span) {
|
||||
// cannot add constraints once regions are resolved
|
||||
assert self.values.is_empty();
|
||||
|
||||
|
@ -774,8 +774,10 @@ pub impl RegionVarBindings {
|
|||
}
|
||||
}
|
||||
|
||||
fn make_subregion(&self, span: span,
|
||||
sub: Region, sup: Region) -> cres<()> {
|
||||
fn make_subregion(&mut self,
|
||||
span: span,
|
||||
sub: Region,
|
||||
sup: Region) -> cres<()> {
|
||||
// cannot add constraints once regions are resolved
|
||||
assert self.values.is_empty();
|
||||
|
||||
|
@ -813,7 +815,11 @@ pub impl RegionVarBindings {
|
|||
}
|
||||
}
|
||||
|
||||
fn lub_regions(&self, span: span, a: Region, b: Region) -> cres<Region> {
|
||||
fn lub_regions(&mut self,
|
||||
span: span,
|
||||
a: Region,
|
||||
b: Region)
|
||||
-> cres<Region> {
|
||||
// cannot add constraints once regions are resolved
|
||||
assert self.values.is_empty();
|
||||
|
||||
|
@ -835,7 +841,11 @@ pub impl RegionVarBindings {
|
|||
}
|
||||
}
|
||||
|
||||
fn glb_regions(&self, span: span, a: Region, b: Region) -> cres<Region> {
|
||||
fn glb_regions(&mut self,
|
||||
span: span,
|
||||
a: Region,
|
||||
b: Region)
|
||||
-> cres<Region> {
|
||||
// cannot add constraints once regions are resolved
|
||||
assert self.values.is_empty();
|
||||
|
||||
|
@ -858,7 +868,7 @@ pub impl RegionVarBindings {
|
|||
}
|
||||
}
|
||||
|
||||
fn resolve_var(&self, rid: RegionVid) -> ty::Region {
|
||||
fn resolve_var(&mut self, rid: RegionVid) -> ty::Region {
|
||||
debug!("RegionVarBindings: resolve_var(%?=%u)", rid, *rid);
|
||||
if self.values.is_empty() {
|
||||
self.tcx.sess.span_bug(
|
||||
|
@ -900,14 +910,13 @@ pub impl RegionVarBindings {
|
|||
}
|
||||
}
|
||||
|
||||
fn combine_vars(&self,
|
||||
fn combine_vars(&mut self,
|
||||
combines: CombineMap,
|
||||
a: Region,
|
||||
b: Region,
|
||||
span: span,
|
||||
relate: fn(old_r: Region, new_r: Region) -> cres<()>)
|
||||
-> cres<Region> {
|
||||
|
||||
relate: &fn(old_r: Region, new_r: Region) -> cres<()>)
|
||||
-> cres<Region> {
|
||||
let vars = TwoRegions { a: a, b: b };
|
||||
match combines.find(&vars) {
|
||||
Some(c) => Ok(re_infer(ReVar(c))),
|
||||
|
@ -927,7 +936,9 @@ pub impl RegionVarBindings {
|
|||
}
|
||||
}
|
||||
|
||||
fn vars_created_since_snapshot(&self, snapshot: uint) -> ~[RegionVid] {
|
||||
fn vars_created_since_snapshot(&mut self,
|
||||
snapshot: uint)
|
||||
-> ~[RegionVid] {
|
||||
do vec::build |push| {
|
||||
for uint::range(snapshot, self.undo_log.len()) |i| {
|
||||
match self.undo_log[i] {
|
||||
|
@ -938,7 +949,7 @@ pub impl RegionVarBindings {
|
|||
}
|
||||
}
|
||||
|
||||
fn tainted(&self, snapshot: uint, r0: Region) -> ~[Region] {
|
||||
fn tainted(&mut self, snapshot: uint, r0: Region) -> ~[Region] {
|
||||
/*!
|
||||
*
|
||||
* Computes all regions that have been related to `r0` in any
|
||||
|
@ -1022,18 +1033,18 @@ pub impl RegionVarBindings {
|
|||
constraints, assuming such values can be found; if they cannot,
|
||||
errors are reported.
|
||||
*/
|
||||
fn resolve_regions(&self) {
|
||||
fn resolve_regions(&mut self) {
|
||||
debug!("RegionVarBindings: resolve_regions()");
|
||||
self.values.put_back(self.infer_variable_values());
|
||||
}
|
||||
}
|
||||
|
||||
priv impl RegionVarBindings {
|
||||
fn is_subregion_of(&self, sub: Region, sup: Region) -> bool {
|
||||
fn is_subregion_of(&mut self, sub: Region, sup: Region) -> bool {
|
||||
is_subregion_of(self.tcx.region_map, sub, sup)
|
||||
}
|
||||
|
||||
fn lub_concrete_regions(&self, +a: Region, +b: Region) -> Region {
|
||||
fn lub_concrete_regions(&mut self, +a: Region, +b: Region) -> Region {
|
||||
match (a, b) {
|
||||
(re_static, _) | (_, re_static) => {
|
||||
re_static // nothing lives longer than static
|
||||
|
@ -1090,7 +1101,10 @@ priv impl RegionVarBindings {
|
|||
}
|
||||
}
|
||||
|
||||
fn glb_concrete_regions(&self, +a: Region, +b: Region) -> cres<Region> {
|
||||
fn glb_concrete_regions(&mut self,
|
||||
+a: Region,
|
||||
+b: Region)
|
||||
-> cres<Region> {
|
||||
match (a, b) {
|
||||
(re_static, r) | (r, re_static) => {
|
||||
// static lives longer than everything else
|
||||
|
@ -1156,7 +1170,7 @@ priv impl RegionVarBindings {
|
|||
}
|
||||
}
|
||||
|
||||
fn report_type_error(&self, span: span, terr: &ty::type_err) {
|
||||
fn report_type_error(&mut self, span: span, terr: &ty::type_err) {
|
||||
let terr_str = ty::type_err_to_str(self.tcx, terr);
|
||||
self.tcx.sess.span_err(span, terr_str);
|
||||
}
|
||||
|
@ -1164,35 +1178,23 @@ priv impl RegionVarBindings {
|
|||
|
||||
// ______________________________________________________________________
|
||||
|
||||
#[deriving_eq]
|
||||
enum Direction { Incoming = 0, Outgoing = 1 }
|
||||
|
||||
impl Direction : cmp::Eq {
|
||||
pure fn eq(&self, other: &Direction) -> bool {
|
||||
((*self) as uint) == ((*other) as uint)
|
||||
}
|
||||
pure fn ne(&self, other: &Direction) -> bool { !(*self).eq(other) }
|
||||
}
|
||||
|
||||
#[deriving_eq]
|
||||
enum Classification { Expanding, Contracting }
|
||||
|
||||
impl Classification : cmp::Eq {
|
||||
pure fn eq(&self, other: &Classification) -> bool {
|
||||
((*self) as uint) == ((*other) as uint)
|
||||
}
|
||||
pure fn ne(&self, other: &Classification) -> bool { !(*self).eq(other) }
|
||||
}
|
||||
|
||||
enum GraphNodeValue { NoValue, Value(Region), ErrorValue }
|
||||
|
||||
struct GraphNode {
|
||||
span: span,
|
||||
mut classification: Classification,
|
||||
mut value: GraphNodeValue,
|
||||
head_edge: [mut uint * 2], // FIXME(#3226)--should not need mut
|
||||
classification: Classification,
|
||||
value: GraphNodeValue,
|
||||
head_edge: [uint * 2],
|
||||
}
|
||||
|
||||
struct GraphEdge {
|
||||
next_edge: [mut uint * 2], // FIXME(#3226)--should not need mut
|
||||
next_edge: [uint * 2],
|
||||
constraint: Constraint,
|
||||
span: span,
|
||||
}
|
||||
|
@ -1214,14 +1216,14 @@ fn TwoRegionsMap() -> TwoRegionsMap {
|
|||
}
|
||||
|
||||
impl RegionVarBindings {
|
||||
fn infer_variable_values(&self) -> ~[GraphNodeValue] {
|
||||
let graph = self.construct_graph();
|
||||
self.expansion(&graph);
|
||||
self.contraction(&graph);
|
||||
self.extract_values_and_report_conflicts(&graph)
|
||||
fn infer_variable_values(&mut self) -> ~[GraphNodeValue] {
|
||||
let mut graph = self.construct_graph();
|
||||
self.expansion(&mut graph);
|
||||
self.contraction(&mut graph);
|
||||
self.extract_values_and_report_conflicts(&mut graph)
|
||||
}
|
||||
|
||||
fn construct_graph(&self) -> Graph {
|
||||
fn construct_graph(&mut self) -> Graph {
|
||||
let num_vars = self.num_vars();
|
||||
let num_edges = self.constraints.len();
|
||||
|
||||
|
@ -1234,7 +1236,7 @@ impl RegionVarBindings {
|
|||
classification: Contracting,
|
||||
span: self.var_spans[var_idx],
|
||||
value: NoValue,
|
||||
head_edge: [mut uint::max_value, uint::max_value]
|
||||
head_edge: [uint::max_value, uint::max_value]
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -1242,7 +1244,7 @@ impl RegionVarBindings {
|
|||
let mut edges = vec::with_capacity(num_edges);
|
||||
for self.constraints.each_ref |constraint, span| {
|
||||
edges.push(GraphEdge {
|
||||
next_edge: [mut uint::max_value, uint::max_value],
|
||||
next_edge: [uint::max_value, uint::max_value],
|
||||
constraint: *constraint,
|
||||
span: *span
|
||||
});
|
||||
|
@ -1282,18 +1284,18 @@ impl RegionVarBindings {
|
|||
}
|
||||
}
|
||||
|
||||
fn expansion(&self, graph: &Graph) {
|
||||
fn expansion(&mut self, graph: &mut Graph) {
|
||||
do self.iterate_until_fixed_point(~"Expansion", graph) |edge| {
|
||||
match edge.constraint {
|
||||
ConstrainRegSubVar(copy a_region, copy b_vid) => {
|
||||
let b_node = &graph.nodes[*b_vid];
|
||||
let b_node = &mut graph.nodes[*b_vid];
|
||||
self.expand_node(a_region, b_vid, b_node)
|
||||
}
|
||||
ConstrainVarSubVar(copy a_vid, copy b_vid) => {
|
||||
match graph.nodes[*a_vid].value {
|
||||
NoValue | ErrorValue => false,
|
||||
Value(copy a_region) => {
|
||||
let b_node = &graph.nodes[*b_vid];
|
||||
let b_node = &mut graph.nodes[*b_vid];
|
||||
self.expand_node(a_region, b_vid, b_node)
|
||||
}
|
||||
}
|
||||
|
@ -1306,10 +1308,11 @@ impl RegionVarBindings {
|
|||
}
|
||||
}
|
||||
|
||||
fn expand_node(&self,
|
||||
fn expand_node(&mut self,
|
||||
a_region: Region,
|
||||
b_vid: RegionVid,
|
||||
b_node: &GraphNode) -> bool {
|
||||
b_node: &mut GraphNode)
|
||||
-> bool {
|
||||
debug!("expand_node(%?, %? == %?)",
|
||||
a_region, b_vid, b_node.value);
|
||||
|
||||
|
@ -1341,7 +1344,7 @@ impl RegionVarBindings {
|
|||
}
|
||||
}
|
||||
|
||||
fn contraction(&self, graph: &Graph) {
|
||||
fn contraction(&mut self, graph: &mut Graph) {
|
||||
do self.iterate_until_fixed_point(~"Contraction", graph) |edge| {
|
||||
match edge.constraint {
|
||||
ConstrainRegSubVar(*) => {
|
||||
|
@ -1352,23 +1355,24 @@ impl RegionVarBindings {
|
|||
match graph.nodes[*b_vid].value {
|
||||
NoValue | ErrorValue => false,
|
||||
Value(copy b_region) => {
|
||||
let a_node = &graph.nodes[*a_vid];
|
||||
let a_node = &mut graph.nodes[*a_vid];
|
||||
self.contract_node(a_vid, a_node, b_region)
|
||||
}
|
||||
}
|
||||
}
|
||||
ConstrainVarSubReg(copy a_vid, copy b_region) => {
|
||||
let a_node = &graph.nodes[*a_vid];
|
||||
let a_node = &mut graph.nodes[*a_vid];
|
||||
self.contract_node(a_vid, a_node, b_region)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn contract_node(&self,
|
||||
fn contract_node(&mut self,
|
||||
a_vid: RegionVid,
|
||||
a_node: &GraphNode,
|
||||
b_region: Region) -> bool {
|
||||
a_node: &mut GraphNode,
|
||||
b_region: Region)
|
||||
-> bool {
|
||||
debug!("contract_node(%? == %?/%?, %?)",
|
||||
a_vid, a_node.value, a_node.classification, b_region);
|
||||
|
||||
|
@ -1395,11 +1399,12 @@ impl RegionVarBindings {
|
|||
}
|
||||
};
|
||||
|
||||
fn check_node(self: &RegionVarBindings,
|
||||
fn check_node(+self: &mut RegionVarBindings,
|
||||
a_vid: RegionVid,
|
||||
a_node: &GraphNode,
|
||||
+a_node: &mut GraphNode,
|
||||
a_region: Region,
|
||||
b_region: Region) -> bool {
|
||||
b_region: Region)
|
||||
-> bool {
|
||||
if !self.is_subregion_of(a_region, b_region) {
|
||||
debug!("Setting %? to ErrorValue: %? not subregion of %?",
|
||||
a_vid, a_region, b_region);
|
||||
|
@ -1408,11 +1413,12 @@ impl RegionVarBindings {
|
|||
false
|
||||
}
|
||||
|
||||
fn adjust_node(self: &RegionVarBindings,
|
||||
fn adjust_node(+self: &mut RegionVarBindings,
|
||||
a_vid: RegionVid,
|
||||
a_node: &GraphNode,
|
||||
+a_node: &mut GraphNode,
|
||||
a_region: Region,
|
||||
b_region: Region) -> bool {
|
||||
b_region: Region)
|
||||
-> bool {
|
||||
match self.glb_concrete_regions(a_region, b_region) {
|
||||
Ok(glb) => {
|
||||
if glb == a_region {
|
||||
|
@ -1432,12 +1438,10 @@ impl RegionVarBindings {
|
|||
}
|
||||
}
|
||||
|
||||
fn iterate_until_fixed_point(
|
||||
&self,
|
||||
tag: ~str,
|
||||
graph: &Graph,
|
||||
body: fn(edge: &GraphEdge) -> bool)
|
||||
{
|
||||
fn iterate_until_fixed_point(&mut self,
|
||||
tag: ~str,
|
||||
graph: &mut Graph,
|
||||
body: &fn(edge: &GraphEdge) -> bool) {
|
||||
let mut iteration = 0;
|
||||
let mut changed = true;
|
||||
let num_edges = graph.edges.len();
|
||||
|
@ -1454,10 +1458,9 @@ impl RegionVarBindings {
|
|||
debug!("---- %s Complete after %u iteration(s)", tag, iteration);
|
||||
}
|
||||
|
||||
fn extract_values_and_report_conflicts(
|
||||
&self,
|
||||
graph: &Graph) -> ~[GraphNodeValue]
|
||||
{
|
||||
fn extract_values_and_report_conflicts(&mut self,
|
||||
graph: &mut Graph)
|
||||
-> ~[GraphNodeValue] {
|
||||
let dup_map = TwoRegionsMap();
|
||||
graph.nodes.mapi(|idx, node| {
|
||||
match node.value {
|
||||
|
@ -1512,16 +1515,17 @@ impl RegionVarBindings {
|
|||
}
|
||||
|
||||
// Used to suppress reporting the same basic error over and over
|
||||
fn is_reported(&self,
|
||||
fn is_reported(&mut self,
|
||||
dup_map: TwoRegionsMap,
|
||||
r_a: Region,
|
||||
r_b: Region) -> bool {
|
||||
r_b: Region)
|
||||
-> bool {
|
||||
let key = TwoRegions { a: r_a, b: r_b };
|
||||
!dup_map.insert(key, ())
|
||||
}
|
||||
|
||||
fn report_error_for_expanding_node(&self,
|
||||
graph: &Graph,
|
||||
fn report_error_for_expanding_node(&mut self,
|
||||
graph: &mut Graph,
|
||||
dup_map: TwoRegionsMap,
|
||||
node_idx: RegionVid) {
|
||||
// Errors in expanding nodes result from a lower-bound that is
|
||||
|
@ -1573,8 +1577,8 @@ impl RegionVarBindings {
|
|||
}
|
||||
}
|
||||
|
||||
fn report_error_for_contracting_node(&self,
|
||||
graph: &Graph,
|
||||
fn report_error_for_contracting_node(&mut self,
|
||||
graph: &mut Graph,
|
||||
dup_map: TwoRegionsMap,
|
||||
node_idx: RegionVid) {
|
||||
// Errors in contracting nodes result from two upper-bounds
|
||||
|
@ -1627,10 +1631,11 @@ impl RegionVarBindings {
|
|||
}
|
||||
}
|
||||
|
||||
fn collect_concrete_regions(&self,
|
||||
graph: &Graph,
|
||||
fn collect_concrete_regions(&mut self,
|
||||
graph: &mut Graph,
|
||||
orig_node_idx: RegionVid,
|
||||
dir: Direction) -> ~[SpannedRegion] {
|
||||
dir: Direction)
|
||||
-> ~[SpannedRegion] {
|
||||
let set = HashMap();
|
||||
let mut stack = ~[orig_node_idx];
|
||||
set.insert(*orig_node_idx, ());
|
||||
|
@ -1670,8 +1675,8 @@ impl RegionVarBindings {
|
|||
return result;
|
||||
}
|
||||
|
||||
fn each_edge(&self,
|
||||
graph: &Graph,
|
||||
fn each_edge(&mut self,
|
||||
graph: &mut Graph,
|
||||
node_idx: RegionVid,
|
||||
dir: Direction,
|
||||
op: fn(edge: &GraphEdge) -> bool) {
|
||||
|
|
|
@ -83,14 +83,14 @@ pub const resolve_and_force_all_but_regions: uint =
|
|||
(resolve_all | force_all) & not_regions;
|
||||
|
||||
pub struct ResolveState {
|
||||
infcx: @InferCtxt,
|
||||
infcx: @mut InferCtxt,
|
||||
modes: uint,
|
||||
mut err: Option<fixup_err>,
|
||||
mut v_seen: ~[TyVid],
|
||||
mut type_depth: uint
|
||||
err: Option<fixup_err>,
|
||||
v_seen: ~[TyVid],
|
||||
type_depth: uint
|
||||
}
|
||||
|
||||
pub fn resolver(infcx: @InferCtxt, modes: uint) -> ResolveState {
|
||||
pub fn resolver(infcx: @mut InferCtxt, modes: uint) -> ResolveState {
|
||||
ResolveState {
|
||||
infcx: infcx,
|
||||
modes: modes,
|
||||
|
@ -101,11 +101,11 @@ pub fn resolver(infcx: @InferCtxt, modes: uint) -> ResolveState {
|
|||
}
|
||||
|
||||
pub impl ResolveState {
|
||||
fn should(&self, mode: uint) -> bool {
|
||||
fn should(&mut self, mode: uint) -> bool {
|
||||
(self.modes & mode) == mode
|
||||
}
|
||||
|
||||
fn resolve_type_chk(&self, typ: ty::t) -> fres<ty::t> {
|
||||
fn resolve_type_chk(&mut self, typ: ty::t) -> fres<ty::t> {
|
||||
self.err = None;
|
||||
|
||||
debug!("Resolving %s (modes=%x)",
|
||||
|
@ -130,7 +130,7 @@ pub impl ResolveState {
|
|||
}
|
||||
}
|
||||
|
||||
fn resolve_region_chk(&self, orig: ty::Region) -> fres<ty::Region> {
|
||||
fn resolve_region_chk(&mut self, orig: ty::Region) -> fres<ty::Region> {
|
||||
self.err = None;
|
||||
let resolved = indent(|| self.resolve_region(orig) );
|
||||
match self.err {
|
||||
|
@ -139,7 +139,7 @@ pub impl ResolveState {
|
|||
}
|
||||
}
|
||||
|
||||
fn resolve_type(&self, typ: ty::t) -> ty::t {
|
||||
fn resolve_type(&mut self, typ: ty::t) -> ty::t {
|
||||
debug!("resolve_type(%s)", typ.inf_str(self.infcx));
|
||||
let _i = indenter();
|
||||
|
||||
|
@ -181,7 +181,7 @@ pub impl ResolveState {
|
|||
}
|
||||
}
|
||||
|
||||
fn resolve_region(&self, orig: ty::Region) -> ty::Region {
|
||||
fn resolve_region(&mut self, orig: ty::Region) -> ty::Region {
|
||||
debug!("Resolve_region(%s)", orig.inf_str(self.infcx));
|
||||
match orig {
|
||||
ty::re_infer(ty::ReVar(rid)) => self.resolve_region_var(rid),
|
||||
|
@ -189,14 +189,14 @@ pub impl ResolveState {
|
|||
}
|
||||
}
|
||||
|
||||
fn resolve_region_var(&self, rid: RegionVid) -> ty::Region {
|
||||
fn resolve_region_var(&mut self, rid: RegionVid) -> ty::Region {
|
||||
if !self.should(resolve_rvar) {
|
||||
return ty::re_infer(ty::ReVar(rid));
|
||||
}
|
||||
self.infcx.region_vars.resolve_var(rid)
|
||||
}
|
||||
|
||||
fn assert_not_rvar(&self, rid: RegionVid, r: ty::Region) {
|
||||
fn assert_not_rvar(&mut self, rid: RegionVid, r: ty::Region) {
|
||||
match r {
|
||||
ty::re_infer(ty::ReVar(rid2)) => {
|
||||
self.err = Some(region_var_bound_by_region_var(rid, rid2));
|
||||
|
@ -205,7 +205,7 @@ pub impl ResolveState {
|
|||
}
|
||||
}
|
||||
|
||||
fn resolve_ty_var(&self, vid: TyVid) -> ty::t {
|
||||
fn resolve_ty_var(&mut self, vid: TyVid) -> ty::t {
|
||||
if vec::contains(self.v_seen, &vid) {
|
||||
self.err = Some(cyclic_ty(vid));
|
||||
return ty::mk_var(self.infcx.tcx, vid);
|
||||
|
@ -238,7 +238,7 @@ pub impl ResolveState {
|
|||
}
|
||||
}
|
||||
|
||||
fn resolve_int_var(&self, vid: IntVid) -> ty::t {
|
||||
fn resolve_int_var(&mut self, vid: IntVid) -> ty::t {
|
||||
if !self.should(resolve_ivar) {
|
||||
return ty::mk_int_var(self.infcx.tcx, vid);
|
||||
}
|
||||
|
@ -261,7 +261,7 @@ pub impl ResolveState {
|
|||
}
|
||||
}
|
||||
|
||||
fn resolve_float_var(&self, vid: FloatVid) -> ty::t {
|
||||
fn resolve_float_var(&mut self, vid: FloatVid) -> ty::t {
|
||||
if !self.should(resolve_fvar) {
|
||||
return ty::mk_float_var(self.infcx.tcx, vid);
|
||||
}
|
||||
|
|
|
@ -33,7 +33,7 @@ pub fn macros() {
|
|||
pub enum Sub = CombineFields; // "subtype", "subregion" etc
|
||||
|
||||
pub impl Sub: Combine {
|
||||
fn infcx() -> @InferCtxt { self.infcx }
|
||||
fn infcx() -> @mut InferCtxt { self.infcx }
|
||||
fn tag() -> ~str { ~"sub" }
|
||||
fn a_is_expected() -> bool { self.a_is_expected }
|
||||
fn span() -> span { self.span }
|
||||
|
|
|
@ -28,7 +28,7 @@ pub enum VarValue<V, T> {
|
|||
|
||||
pub struct ValsAndBindings<V, T> {
|
||||
vals: SmallIntMap<VarValue<V, T>>,
|
||||
mut bindings: ~[(V, VarValue<V, T>)],
|
||||
bindings: ~[(V, VarValue<V, T>)],
|
||||
}
|
||||
|
||||
pub struct Node<V, T> {
|
||||
|
@ -38,15 +38,14 @@ pub struct Node<V, T> {
|
|||
}
|
||||
|
||||
pub trait UnifyVid<T> {
|
||||
static fn appropriate_vals_and_bindings(infcx: &v/InferCtxt)
|
||||
-> &v/ValsAndBindings<Self, T>;
|
||||
static fn appropriate_vals_and_bindings(infcx: &v/mut InferCtxt)
|
||||
-> &v/mut ValsAndBindings<Self, T>;
|
||||
}
|
||||
|
||||
pub impl InferCtxt {
|
||||
fn get<T:Copy, V:Copy Eq Vid UnifyVid<T>>(
|
||||
&self,
|
||||
+vid: V) -> Node<V, T>
|
||||
{
|
||||
fn get<T:Copy, V:Copy Eq Vid UnifyVid<T>>(&mut self,
|
||||
+vid: V)
|
||||
-> Node<V, T> {
|
||||
/*!
|
||||
*
|
||||
* Find the root node for `vid`. This uses the standard
|
||||
|
@ -79,10 +78,9 @@ pub impl InferCtxt {
|
|||
}
|
||||
|
||||
fn set<T:Copy InferStr, V:Copy Vid ToStr UnifyVid<T>>(
|
||||
&self,
|
||||
+vid: V,
|
||||
+new_v: VarValue<V, T>)
|
||||
{
|
||||
&mut self,
|
||||
+vid: V,
|
||||
+new_v: VarValue<V, T>) {
|
||||
/*!
|
||||
*
|
||||
* Sets the value for `vid` to `new_v`. `vid` MUST be a root node!
|
||||
|
@ -98,10 +96,10 @@ pub impl InferCtxt {
|
|||
}
|
||||
|
||||
fn unify<T:Copy InferStr, V:Copy Vid ToStr UnifyVid<T>, R>(
|
||||
&self,
|
||||
node_a: &Node<V, T>,
|
||||
node_b: &Node<V, T>,
|
||||
op: &fn(new_root: V, new_rank: uint) -> R
|
||||
&mut self,
|
||||
node_a: &Node<V, T>,
|
||||
node_b: &Node<V, T>,
|
||||
op: &fn(new_root: V, new_rank: uint) -> R
|
||||
) -> R {
|
||||
// Rank optimization: if you don't know what it is, check
|
||||
// out <http://en.wikipedia.org/wiki/Disjoint-set_data_structure>
|
||||
|
@ -154,11 +152,11 @@ pub fn mk_err<T: SimplyUnifiable>(+a_is_expected: bool,
|
|||
pub impl InferCtxt {
|
||||
fn simple_vars<T:Copy Eq InferStr SimplyUnifiable,
|
||||
V:Copy Eq Vid ToStr UnifyVid<Option<T>>>(
|
||||
&self,
|
||||
+a_is_expected: bool,
|
||||
+a_id: V,
|
||||
+b_id: V) -> ures
|
||||
{
|
||||
&mut self,
|
||||
+a_is_expected: bool,
|
||||
+a_id: V,
|
||||
+b_id: V)
|
||||
-> ures {
|
||||
/*!
|
||||
*
|
||||
* Unifies two simple variables. Because simple variables do
|
||||
|
@ -193,10 +191,11 @@ pub impl InferCtxt {
|
|||
|
||||
fn simple_var_t<T:Copy Eq InferStr SimplyUnifiable,
|
||||
V:Copy Eq Vid ToStr UnifyVid<Option<T>>>(
|
||||
+a_is_expected: bool,
|
||||
+a_id: V,
|
||||
+b: T) -> ures
|
||||
{
|
||||
&mut self,
|
||||
+a_is_expected: bool,
|
||||
+a_id: V,
|
||||
+b: T)
|
||||
-> ures {
|
||||
/*!
|
||||
*
|
||||
* Sets the value of the variable `a_id` to `b`. Because
|
||||
|
@ -227,41 +226,36 @@ pub impl InferCtxt {
|
|||
// ______________________________________________________________________
|
||||
|
||||
pub impl ty::TyVid : UnifyVid<Bounds<ty::t>> {
|
||||
static fn appropriate_vals_and_bindings(infcx: &v/InferCtxt)
|
||||
-> &v/ValsAndBindings<ty::TyVid, Bounds<ty::t>>
|
||||
{
|
||||
return &infcx.ty_var_bindings;
|
||||
static fn appropriate_vals_and_bindings(infcx: &v/mut InferCtxt)
|
||||
-> &v/mut ValsAndBindings<ty::TyVid, Bounds<ty::t>> {
|
||||
return &mut infcx.ty_var_bindings;
|
||||
}
|
||||
}
|
||||
|
||||
pub impl ty::IntVid : UnifyVid<Option<IntVarValue>> {
|
||||
static fn appropriate_vals_and_bindings(infcx: &v/InferCtxt)
|
||||
-> &v/ValsAndBindings<ty::IntVid, Option<IntVarValue>>
|
||||
{
|
||||
return &infcx.int_var_bindings;
|
||||
static fn appropriate_vals_and_bindings(infcx: &v/mut InferCtxt)
|
||||
-> &v/mut ValsAndBindings<ty::IntVid, Option<IntVarValue>> {
|
||||
return &mut infcx.int_var_bindings;
|
||||
}
|
||||
}
|
||||
|
||||
pub impl IntVarValue : SimplyUnifiable {
|
||||
static fn to_type_err(err: expected_found<IntVarValue>)
|
||||
-> ty::type_err
|
||||
{
|
||||
-> ty::type_err {
|
||||
return ty::terr_int_mismatch(err);
|
||||
}
|
||||
}
|
||||
|
||||
pub impl ty::FloatVid : UnifyVid<Option<ast::float_ty>> {
|
||||
static fn appropriate_vals_and_bindings(infcx: &v/InferCtxt)
|
||||
-> &v/ValsAndBindings<ty::FloatVid, Option<ast::float_ty>>
|
||||
{
|
||||
return &infcx.float_var_bindings;
|
||||
static fn appropriate_vals_and_bindings(infcx: &v/mut InferCtxt)
|
||||
-> &v/mut ValsAndBindings<ty::FloatVid, Option<ast::float_ty>> {
|
||||
return &mut infcx.float_var_bindings;
|
||||
}
|
||||
}
|
||||
|
||||
pub impl ast::float_ty : SimplyUnifiable {
|
||||
static fn to_type_err(err: expected_found<ast::float_ty>)
|
||||
-> ty::type_err
|
||||
{
|
||||
-> ty::type_err {
|
||||
return ty::terr_float_mismatch(err);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -196,7 +196,7 @@ pub impl vtable_origin {
|
|||
|
||||
pub type vtable_map = HashMap<ast::node_id, vtable_res>;
|
||||
|
||||
struct crate_ctxt__ {
|
||||
pub struct CrateCtxt {
|
||||
// A mapping from method call sites to traits that have that method.
|
||||
trait_map: resolve::TraitMap,
|
||||
method_map: method_map,
|
||||
|
@ -205,10 +205,6 @@ struct crate_ctxt__ {
|
|||
tcx: ty::ctxt
|
||||
}
|
||||
|
||||
pub enum crate_ctxt {
|
||||
crate_ctxt_(crate_ctxt__)
|
||||
}
|
||||
|
||||
// Functions that write types into the node type table
|
||||
pub fn write_ty_to_tcx(tcx: ty::ctxt, node_id: ast::node_id, ty: ty::t) {
|
||||
debug!("write_ty_to_tcx(%d, %s)", node_id, ppaux::ty_to_str(tcx, ty));
|
||||
|
@ -233,7 +229,7 @@ pub fn lookup_def_tcx(tcx: ty::ctxt, sp: span, id: ast::node_id) -> ast::def {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn lookup_def_ccx(ccx: @crate_ctxt, sp: span, id: ast::node_id)
|
||||
pub fn lookup_def_ccx(ccx: @mut CrateCtxt, sp: span, id: ast::node_id)
|
||||
-> ast::def {
|
||||
lookup_def_tcx(ccx.tcx, sp, id)
|
||||
}
|
||||
|
@ -244,7 +240,7 @@ pub fn no_params(t: ty::t) -> ty::ty_param_bounds_and_ty {
|
|||
|
||||
pub fn require_same_types(
|
||||
tcx: ty::ctxt,
|
||||
maybe_infcx: Option<@infer::InferCtxt>,
|
||||
maybe_infcx: Option<@mut infer::InferCtxt>,
|
||||
t1_is_expected: bool,
|
||||
span: span,
|
||||
t1: ty::t,
|
||||
|
@ -317,10 +313,9 @@ fn arg_is_argv_ty(tcx: ty::ctxt, a: ty::arg) -> bool {
|
|||
}
|
||||
}
|
||||
|
||||
fn check_main_fn_ty(ccx: @crate_ctxt,
|
||||
fn check_main_fn_ty(ccx: @mut CrateCtxt,
|
||||
main_id: ast::node_id,
|
||||
main_span: span) {
|
||||
|
||||
let tcx = ccx.tcx;
|
||||
let main_t = ty::node_id_to_type(tcx, main_id);
|
||||
match ty::get(main_t).sty {
|
||||
|
@ -360,10 +355,10 @@ fn check_main_fn_ty(ccx: @crate_ctxt,
|
|||
}
|
||||
}
|
||||
|
||||
fn check_for_main_fn(ccx: @crate_ctxt) {
|
||||
fn check_for_main_fn(ccx: @mut CrateCtxt) {
|
||||
let tcx = ccx.tcx;
|
||||
if !tcx.sess.building_library {
|
||||
match copy tcx.sess.main_fn {
|
||||
if !*tcx.sess.building_library {
|
||||
match *tcx.sess.main_fn {
|
||||
Some((id, sp)) => check_main_fn_ty(ccx, id, sp),
|
||||
None => tcx.sess.err(~"main function not found")
|
||||
}
|
||||
|
@ -374,14 +369,13 @@ pub fn check_crate(tcx: ty::ctxt,
|
|||
trait_map: resolve::TraitMap,
|
||||
crate: @ast::crate)
|
||||
-> (method_map, vtable_map) {
|
||||
|
||||
let ccx = @crate_ctxt_(crate_ctxt__ {
|
||||
let ccx = @mut CrateCtxt {
|
||||
trait_map: trait_map,
|
||||
method_map: oldmap::HashMap(),
|
||||
vtable_map: oldmap::HashMap(),
|
||||
coherence_info: @coherence::CoherenceInfo(),
|
||||
tcx: tcx
|
||||
});
|
||||
};
|
||||
collect::collect_item_types(ccx, crate);
|
||||
coherence::check_coherence(ccx, crate);
|
||||
|
||||
|
|
|
@ -19,39 +19,40 @@ use syntax::codemap::span;
|
|||
use syntax::parse::token::special_idents;
|
||||
|
||||
pub trait region_scope {
|
||||
fn anon_region(span: span) -> Result<ty::Region, ~str>;
|
||||
fn self_region(span: span) -> Result<ty::Region, ~str>;
|
||||
fn named_region(span: span, id: ast::ident) -> Result<ty::Region, ~str>;
|
||||
pure fn anon_region(span: span) -> Result<ty::Region, ~str>;
|
||||
pure fn self_region(span: span) -> Result<ty::Region, ~str>;
|
||||
pure fn named_region(span: span, id: ast::ident)
|
||||
-> Result<ty::Region, ~str>;
|
||||
}
|
||||
|
||||
pub enum empty_rscope { empty_rscope }
|
||||
pub impl empty_rscope: region_scope {
|
||||
fn anon_region(_span: span) -> Result<ty::Region, ~str> {
|
||||
pure fn anon_region(_span: span) -> Result<ty::Region, ~str> {
|
||||
result::Ok(ty::re_static)
|
||||
}
|
||||
fn self_region(_span: span) -> Result<ty::Region, ~str> {
|
||||
pure fn self_region(_span: span) -> Result<ty::Region, ~str> {
|
||||
result::Err(~"only the static region is allowed here")
|
||||
}
|
||||
fn named_region(_span: span, _id: ast::ident)
|
||||
-> Result<ty::Region, ~str>
|
||||
{
|
||||
pure fn named_region(_span: span, _id: ast::ident)
|
||||
-> Result<ty::Region, ~str> {
|
||||
result::Err(~"only the static region is allowed here")
|
||||
}
|
||||
}
|
||||
|
||||
pub enum type_rscope = Option<ty::region_variance>;
|
||||
pub impl type_rscope: region_scope {
|
||||
fn anon_region(_span: span) -> Result<ty::Region, ~str> {
|
||||
pure fn anon_region(_span: span) -> Result<ty::Region, ~str> {
|
||||
match *self {
|
||||
Some(_) => result::Ok(ty::re_bound(ty::br_self)),
|
||||
None => result::Err(~"to use region types here, the containing \
|
||||
type must be declared with a region bound")
|
||||
}
|
||||
}
|
||||
fn self_region(span: span) -> Result<ty::Region, ~str> {
|
||||
pure fn self_region(span: span) -> Result<ty::Region, ~str> {
|
||||
self.anon_region(span)
|
||||
}
|
||||
fn named_region(span: span, id: ast::ident) -> Result<ty::Region, ~str> {
|
||||
pure fn named_region(span: span, id: ast::ident)
|
||||
-> Result<ty::Region, ~str> {
|
||||
do empty_rscope.named_region(span, id).chain_err |_e| {
|
||||
result::Err(~"named regions other than `self` are not \
|
||||
allowed as part of a type declaration")
|
||||
|
@ -73,36 +74,42 @@ pub fn in_anon_rscope<RS: region_scope Copy Durable>(self: RS, r: ty::Region)
|
|||
@anon_rscope({anon: r, base: self as region_scope})
|
||||
}
|
||||
pub impl @anon_rscope: region_scope {
|
||||
fn anon_region(_span: span) -> Result<ty::Region, ~str> {
|
||||
pure fn anon_region(_span: span) -> Result<ty::Region, ~str> {
|
||||
result::Ok(self.anon)
|
||||
}
|
||||
fn self_region(span: span) -> Result<ty::Region, ~str> {
|
||||
pure fn self_region(span: span) -> Result<ty::Region, ~str> {
|
||||
self.base.self_region(span)
|
||||
}
|
||||
fn named_region(span: span, id: ast::ident) -> Result<ty::Region, ~str> {
|
||||
pure fn named_region(span: span, id: ast::ident)
|
||||
-> Result<ty::Region, ~str> {
|
||||
self.base.named_region(span, id)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct binding_rscope {
|
||||
base: region_scope,
|
||||
mut anon_bindings: uint,
|
||||
anon_bindings: uint,
|
||||
}
|
||||
|
||||
pub fn in_binding_rscope<RS: region_scope Copy Durable>(self: RS)
|
||||
-> @binding_rscope {
|
||||
-> @mut binding_rscope {
|
||||
let base = self as region_scope;
|
||||
@binding_rscope { base: base, anon_bindings: 0 }
|
||||
@mut binding_rscope { base: base, anon_bindings: 0 }
|
||||
}
|
||||
pub impl @binding_rscope: region_scope {
|
||||
fn anon_region(_span: span) -> Result<ty::Region, ~str> {
|
||||
let idx = self.anon_bindings;
|
||||
self.anon_bindings += 1;
|
||||
result::Ok(ty::re_bound(ty::br_anon(idx)))
|
||||
pub impl @mut binding_rscope: region_scope {
|
||||
pure fn anon_region(_span: span) -> Result<ty::Region, ~str> {
|
||||
// XXX: Unsafe to work around purity
|
||||
unsafe {
|
||||
let idx = self.anon_bindings;
|
||||
self.anon_bindings += 1;
|
||||
result::Ok(ty::re_bound(ty::br_anon(idx)))
|
||||
}
|
||||
}
|
||||
fn self_region(span: span) -> Result<ty::Region, ~str> {
|
||||
pure fn self_region(span: span) -> Result<ty::Region, ~str> {
|
||||
self.base.self_region(span)
|
||||
}
|
||||
fn named_region(span: span, id: ast::ident) -> Result<ty::Region, ~str> {
|
||||
pure fn named_region(span: span, id: ast::ident)
|
||||
-> Result<ty::Region, ~str> {
|
||||
do self.base.named_region(span, id).chain_err |_e| {
|
||||
result::Ok(ty::re_bound(ty::br_named(id)))
|
||||
}
|
||||
|
|
|
@ -209,7 +209,7 @@ pub fn describe_debug_flags() {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn run_compiler(args: &~[~str], demitter: diagnostic::emitter) {
|
||||
pub fn run_compiler(args: &~[~str], demitter: diagnostic::Emitter) {
|
||||
// Don't display log spew by default. Can override with RUST_LOG.
|
||||
logging::console_off();
|
||||
|
||||
|
@ -320,7 +320,7 @@ diagnostic emitter which records when we hit a fatal error. If the task
|
|||
fails without recording a fatal error then we've encountered a compiler
|
||||
bug and need to present an error.
|
||||
*/
|
||||
pub fn monitor(+f: fn~(diagnostic::emitter)) {
|
||||
pub fn monitor(+f: fn~(diagnostic::Emitter)) {
|
||||
use core::pipes::*;
|
||||
use std::cell::Cell;
|
||||
let (p, ch) = stream();
|
||||
|
|
|
@ -64,7 +64,7 @@ enum CmdAction {
|
|||
|
||||
/// A utility function that hands off a pretty printer to a callback.
|
||||
fn with_pp(intr: @token::ident_interner,
|
||||
cb: fn(pprust::ps, io::Writer)) -> ~str {
|
||||
cb: fn(@pprust::ps, io::Writer)) -> ~str {
|
||||
do io::with_str_writer |writer| {
|
||||
let pp = pprust::rust_printer(writer, intr);
|
||||
|
||||
|
@ -203,7 +203,7 @@ fn compile_crate(src_filename: ~str, binary: ~str) -> Option<bool> {
|
|||
};
|
||||
let input = driver::file_input(src_path);
|
||||
let sess = driver::build_session(options, diagnostic::emit);
|
||||
sess.building_library = true;
|
||||
*sess.building_library = true;
|
||||
let cfg = driver::build_configuration(sess, binary, input);
|
||||
let outputs = driver::build_output_filenames(
|
||||
input, &None, &None, sess);
|
||||
|
|
|
@ -72,7 +72,7 @@ pub struct uv_handle_fields {
|
|||
loop_handle: *libc::c_void,
|
||||
type_: handle_type,
|
||||
close_cb: *u8,
|
||||
mut data: *libc::c_void,
|
||||
data: *libc::c_void,
|
||||
}
|
||||
|
||||
// unix size: 8
|
||||
|
@ -240,10 +240,10 @@ pub struct uv_timer_t {
|
|||
|
||||
// unix size: 16
|
||||
pub struct sockaddr_in {
|
||||
mut sin_family: u16,
|
||||
mut sin_port: u16,
|
||||
mut sin_addr: u32, // in_addr: this is an opaque, per-platform struct
|
||||
mut sin_zero: (u8, u8, u8, u8, u8, u8, u8, u8),
|
||||
sin_family: u16,
|
||||
sin_port: u16,
|
||||
sin_addr: u32, // in_addr: this is an opaque, per-platform struct
|
||||
sin_zero: (u8, u8, u8, u8, u8, u8, u8, u8),
|
||||
}
|
||||
|
||||
// unix size: 28 .. FIXME #1645
|
||||
|
|
|
@ -107,15 +107,17 @@ pub enum ast_node {
|
|||
}
|
||||
|
||||
pub type map = std::oldmap::HashMap<node_id, ast_node>;
|
||||
pub struct ctx {
|
||||
map: map,
|
||||
mut path: path,
|
||||
mut local_id: uint,
|
||||
|
||||
pub struct Ctx {
|
||||
map: @map,
|
||||
path: path,
|
||||
local_id: uint,
|
||||
diag: span_handler,
|
||||
}
|
||||
pub type vt = visit::vt<ctx>;
|
||||
|
||||
pub fn extend(cx: ctx, +elt: ident) -> @path {
|
||||
pub type vt = visit::vt<@mut Ctx>;
|
||||
|
||||
pub fn extend(cx: @mut Ctx, +elt: ident) -> @path {
|
||||
@(vec::append(cx.path, ~[path_name(elt)]))
|
||||
}
|
||||
|
||||
|
@ -133,31 +135,33 @@ pub fn mk_ast_map_visitor() -> vt {
|
|||
}
|
||||
|
||||
pub fn map_crate(diag: span_handler, c: crate) -> map {
|
||||
let cx = ctx {
|
||||
map: std::oldmap::HashMap(),
|
||||
mut path: ~[],
|
||||
mut local_id: 0u,
|
||||
let cx = @mut Ctx {
|
||||
map: @std::oldmap::HashMap(),
|
||||
path: ~[],
|
||||
local_id: 0u,
|
||||
diag: diag,
|
||||
};
|
||||
visit::visit_crate(c, cx, mk_ast_map_visitor());
|
||||
cx.map
|
||||
*cx.map
|
||||
}
|
||||
|
||||
// Used for items loaded from external crate that are being inlined into this
|
||||
// crate. The `path` should be the path to the item but should not include
|
||||
// the item itself.
|
||||
pub fn map_decoded_item(diag: span_handler,
|
||||
map: map, path: path, ii: inlined_item) {
|
||||
map: map,
|
||||
path: path,
|
||||
ii: inlined_item) {
|
||||
// I believe it is ok for the local IDs of inlined items from other crates
|
||||
// to overlap with the local ids from this crate, so just generate the ids
|
||||
// starting from 0. (In particular, I think these ids are only used in
|
||||
// alias analysis, which we will not be running on the inlined items, and
|
||||
// even if we did I think it only needs an ordering between local
|
||||
// variables that are simultaneously in scope).
|
||||
let cx = ctx {
|
||||
map: map,
|
||||
mut path: path,
|
||||
mut local_id: 0,
|
||||
let cx = @mut Ctx {
|
||||
map: @map,
|
||||
path: path,
|
||||
local_id: 0,
|
||||
diag: diag,
|
||||
};
|
||||
let v = mk_ast_map_visitor();
|
||||
|
@ -181,7 +185,7 @@ pub fn map_decoded_item(diag: span_handler,
|
|||
}
|
||||
|
||||
pub fn map_fn(fk: visit::fn_kind, decl: fn_decl, body: blk,
|
||||
sp: codemap::span, id: node_id, cx: ctx, v: vt) {
|
||||
sp: codemap::span, id: node_id, &&cx: @mut Ctx, v: vt) {
|
||||
for decl.inputs.each |a| {
|
||||
cx.map.insert(a.id,
|
||||
node_arg(/* FIXME (#2543) */
|
||||
|
@ -208,12 +212,12 @@ pub fn map_fn(fk: visit::fn_kind, decl: fn_decl, body: blk,
|
|||
visit::visit_fn(fk, decl, body, sp, id, cx, v);
|
||||
}
|
||||
|
||||
pub fn map_block(b: blk, cx: ctx, v: vt) {
|
||||
pub fn map_block(b: blk, &&cx: @mut Ctx, v: vt) {
|
||||
cx.map.insert(b.node.id, node_block(/* FIXME (#2543) */ copy b));
|
||||
visit::visit_block(b, cx, v);
|
||||
}
|
||||
|
||||
pub fn number_pat(cx: ctx, pat: @pat) {
|
||||
pub fn number_pat(cx: @mut Ctx, pat: @pat) {
|
||||
do ast_util::walk_pat(pat) |p| {
|
||||
match p.node {
|
||||
pat_ident(*) => {
|
||||
|
@ -225,24 +229,24 @@ pub fn number_pat(cx: ctx, pat: @pat) {
|
|||
};
|
||||
}
|
||||
|
||||
pub fn map_local(loc: @local, cx: ctx, v: vt) {
|
||||
pub fn map_local(loc: @local, &&cx: @mut Ctx, v: vt) {
|
||||
number_pat(cx, loc.node.pat);
|
||||
visit::visit_local(loc, cx, v);
|
||||
}
|
||||
|
||||
pub fn map_arm(arm: arm, cx: ctx, v: vt) {
|
||||
pub fn map_arm(arm: arm, &&cx: @mut Ctx, v: vt) {
|
||||
number_pat(cx, arm.pats[0]);
|
||||
visit::visit_arm(arm, cx, v);
|
||||
}
|
||||
|
||||
pub fn map_method(impl_did: def_id, impl_path: @path,
|
||||
m: @method, cx: ctx) {
|
||||
m: @method, &&cx: @mut Ctx) {
|
||||
cx.map.insert(m.id, node_method(m, impl_did, impl_path));
|
||||
cx.map.insert(m.self_id, node_local(cx.local_id));
|
||||
cx.local_id += 1u;
|
||||
}
|
||||
|
||||
pub fn map_item(i: @item, cx: ctx, v: vt) {
|
||||
pub fn map_item(i: @item, &&cx: @mut Ctx, v: vt) {
|
||||
let item_path = @/* FIXME (#2543) */ copy cx.path;
|
||||
cx.map.insert(i.id, node_item(i, item_path));
|
||||
match i.node {
|
||||
|
@ -305,7 +309,7 @@ pub fn map_item(i: @item, cx: ctx, v: vt) {
|
|||
}
|
||||
|
||||
pub fn map_struct_def(struct_def: @ast::struct_def, parent_node: ast_node,
|
||||
ident: ast::ident, cx: ctx, _v: vt) {
|
||||
ident: ast::ident, cx: @mut Ctx, _v: vt) {
|
||||
let p = extend(cx, ident);
|
||||
// If this is a tuple-like struct, register the constructor.
|
||||
match struct_def.ctor_id {
|
||||
|
@ -322,12 +326,12 @@ pub fn map_struct_def(struct_def: @ast::struct_def, parent_node: ast_node,
|
|||
}
|
||||
}
|
||||
|
||||
pub fn map_expr(ex: @expr, cx: ctx, v: vt) {
|
||||
pub fn map_expr(ex: @expr, &&cx: @mut Ctx, v: vt) {
|
||||
cx.map.insert(ex.id, node_expr(ex));
|
||||
visit::visit_expr(ex, cx, v);
|
||||
}
|
||||
|
||||
pub fn map_stmt(stmt: @stmt, cx: ctx, v: vt) {
|
||||
pub fn map_stmt(stmt: @stmt, &&cx: @mut Ctx, v: vt) {
|
||||
cx.map.insert(stmt_id(*stmt), node_stmt(stmt));
|
||||
visit::visit_stmt(stmt, cx, v);
|
||||
}
|
||||
|
|
|
@ -23,83 +23,86 @@ use core::dvec::DVec;
|
|||
|
||||
use std::term;
|
||||
|
||||
pub type emitter = fn@(cmsp: Option<(@codemap::CodeMap, span)>,
|
||||
pub type Emitter = fn@(cmsp: Option<(@codemap::CodeMap, span)>,
|
||||
msg: &str, lvl: level);
|
||||
|
||||
|
||||
pub trait span_handler {
|
||||
fn span_fatal(sp: span, msg: &str) -> !;
|
||||
fn span_err(sp: span, msg: &str);
|
||||
fn span_warn(sp: span, msg: &str);
|
||||
fn span_note(sp: span, msg: &str);
|
||||
fn span_bug(sp: span, msg: &str) -> !;
|
||||
fn span_unimpl(sp: span, msg: &str) -> !;
|
||||
fn handler() -> handler;
|
||||
fn span_fatal(@mut self, sp: span, msg: &str) -> !;
|
||||
fn span_err(@mut self, sp: span, msg: &str);
|
||||
fn span_warn(@mut self, sp: span, msg: &str);
|
||||
fn span_note(@mut self, sp: span, msg: &str);
|
||||
fn span_bug(@mut self, sp: span, msg: &str) -> !;
|
||||
fn span_unimpl(@mut self, sp: span, msg: &str) -> !;
|
||||
fn handler(@mut self) -> handler;
|
||||
}
|
||||
|
||||
pub trait handler {
|
||||
fn fatal(msg: &str) -> !;
|
||||
fn err(msg: &str);
|
||||
fn bump_err_count();
|
||||
fn has_errors() -> bool;
|
||||
fn abort_if_errors();
|
||||
fn warn(msg: &str);
|
||||
fn note(msg: &str);
|
||||
fn bug(msg: &str) -> !;
|
||||
fn unimpl(msg: &str) -> !;
|
||||
fn emit(cmsp: Option<(@codemap::CodeMap, span)>, msg: &str, lvl: level);
|
||||
fn fatal(@mut self, msg: &str) -> !;
|
||||
fn err(@mut self, msg: &str);
|
||||
fn bump_err_count(@mut self);
|
||||
fn has_errors(@mut self) -> bool;
|
||||
fn abort_if_errors(@mut self);
|
||||
fn warn(@mut self, msg: &str);
|
||||
fn note(@mut self, msg: &str);
|
||||
fn bug(@mut self, msg: &str) -> !;
|
||||
fn unimpl(@mut self, msg: &str) -> !;
|
||||
fn emit(@mut self,
|
||||
cmsp: Option<(@codemap::CodeMap, span)>,
|
||||
msg: &str,
|
||||
lvl: level);
|
||||
}
|
||||
|
||||
struct handler_t {
|
||||
mut err_count: uint,
|
||||
emit: emitter,
|
||||
struct HandlerT {
|
||||
err_count: uint,
|
||||
emit: Emitter,
|
||||
}
|
||||
|
||||
struct codemap_t {
|
||||
struct CodemapT {
|
||||
handler: handler,
|
||||
cm: @codemap::CodeMap,
|
||||
}
|
||||
|
||||
impl codemap_t: span_handler {
|
||||
fn span_fatal(sp: span, msg: &str) -> ! {
|
||||
impl CodemapT: span_handler {
|
||||
fn span_fatal(@mut self, sp: span, msg: &str) -> ! {
|
||||
self.handler.emit(Some((self.cm, sp)), msg, fatal);
|
||||
die!();
|
||||
}
|
||||
fn span_err(sp: span, msg: &str) {
|
||||
fn span_err(@mut self, sp: span, msg: &str) {
|
||||
self.handler.emit(Some((self.cm, sp)), msg, error);
|
||||
self.handler.bump_err_count();
|
||||
}
|
||||
fn span_warn(sp: span, msg: &str) {
|
||||
fn span_warn(@mut self, sp: span, msg: &str) {
|
||||
self.handler.emit(Some((self.cm, sp)), msg, warning);
|
||||
}
|
||||
fn span_note(sp: span, msg: &str) {
|
||||
fn span_note(@mut self, sp: span, msg: &str) {
|
||||
self.handler.emit(Some((self.cm, sp)), msg, note);
|
||||
}
|
||||
fn span_bug(sp: span, msg: &str) -> ! {
|
||||
fn span_bug(@mut self, sp: span, msg: &str) -> ! {
|
||||
self.span_fatal(sp, ice_msg(msg));
|
||||
}
|
||||
fn span_unimpl(sp: span, msg: &str) -> ! {
|
||||
fn span_unimpl(@mut self, sp: span, msg: &str) -> ! {
|
||||
self.span_bug(sp, ~"unimplemented " + msg);
|
||||
}
|
||||
fn handler() -> handler {
|
||||
fn handler(@mut self) -> handler {
|
||||
self.handler
|
||||
}
|
||||
}
|
||||
|
||||
impl handler_t: handler {
|
||||
fn fatal(msg: &str) -> ! {
|
||||
impl HandlerT: handler {
|
||||
fn fatal(@mut self, msg: &str) -> ! {
|
||||
(self.emit)(None, msg, fatal);
|
||||
die!();
|
||||
}
|
||||
fn err(msg: &str) {
|
||||
fn err(@mut self, msg: &str) {
|
||||
(self.emit)(None, msg, error);
|
||||
self.bump_err_count();
|
||||
}
|
||||
fn bump_err_count() {
|
||||
fn bump_err_count(@mut self) {
|
||||
self.err_count += 1u;
|
||||
}
|
||||
fn has_errors() -> bool { self.err_count > 0u }
|
||||
fn abort_if_errors() {
|
||||
fn has_errors(@mut self) -> bool { self.err_count > 0u }
|
||||
fn abort_if_errors(@mut self) {
|
||||
let s;
|
||||
match self.err_count {
|
||||
0u => return,
|
||||
|
@ -111,17 +114,22 @@ impl handler_t: handler {
|
|||
}
|
||||
self.fatal(s);
|
||||
}
|
||||
fn warn(msg: &str) {
|
||||
fn warn(@mut self, msg: &str) {
|
||||
(self.emit)(None, msg, warning);
|
||||
}
|
||||
fn note(msg: &str) {
|
||||
fn note(@mut self, msg: &str) {
|
||||
(self.emit)(None, msg, note);
|
||||
}
|
||||
fn bug(msg: &str) -> ! {
|
||||
fn bug(@mut self, msg: &str) -> ! {
|
||||
self.fatal(ice_msg(msg));
|
||||
}
|
||||
fn unimpl(msg: &str) -> ! { self.bug(~"unimplemented " + msg); }
|
||||
fn emit(cmsp: Option<(@codemap::CodeMap, span)>, msg: &str, lvl: level) {
|
||||
fn unimpl(@mut self, msg: &str) -> ! {
|
||||
self.bug(~"unimplemented " + msg);
|
||||
}
|
||||
fn emit(@mut self,
|
||||
cmsp: Option<(@codemap::CodeMap, span)>,
|
||||
msg: &str,
|
||||
lvl: level) {
|
||||
(self.emit)(cmsp, msg, lvl);
|
||||
}
|
||||
}
|
||||
|
@ -132,25 +140,22 @@ pub fn ice_msg(msg: &str) -> ~str {
|
|||
|
||||
pub fn mk_span_handler(handler: handler, cm: @codemap::CodeMap)
|
||||
-> span_handler {
|
||||
@codemap_t { handler: handler, cm: cm } as span_handler
|
||||
@mut CodemapT { handler: handler, cm: cm } as @span_handler
|
||||
}
|
||||
|
||||
pub fn mk_handler(emitter: Option<emitter>) -> handler {
|
||||
|
||||
let emit = match emitter {
|
||||
Some(e) => e,
|
||||
None => {
|
||||
let f = fn@(cmsp: Option<(@codemap::CodeMap, span)>,
|
||||
msg: &str, t: level) {
|
||||
emit(cmsp, msg, t);
|
||||
};
|
||||
f
|
||||
}
|
||||
pub fn mk_handler(emitter: Option<Emitter>) -> @handler {
|
||||
let emit: Emitter = match emitter {
|
||||
Some(e) => e,
|
||||
None => {
|
||||
let emit: Emitter = |cmsp, msg, t| emit(cmsp, msg, t);
|
||||
emit
|
||||
}
|
||||
};
|
||||
|
||||
@handler_t { mut err_count: 0, emit: emit } as handler
|
||||
@mut HandlerT { mut err_count: 0, emit: emit } as @handler
|
||||
}
|
||||
|
||||
#[deriving_eq]
|
||||
pub enum level {
|
||||
fatal,
|
||||
error,
|
||||
|
@ -158,28 +163,21 @@ pub enum level {
|
|||
note,
|
||||
}
|
||||
|
||||
impl level : cmp::Eq {
|
||||
pure fn eq(&self, other: &level) -> bool {
|
||||
((*self) as uint) == ((*other) as uint)
|
||||
}
|
||||
pure fn ne(&self, other: &level) -> bool { !(*self).eq(other) }
|
||||
}
|
||||
|
||||
fn diagnosticstr(lvl: level) -> ~str {
|
||||
match lvl {
|
||||
fatal => ~"error",
|
||||
error => ~"error",
|
||||
warning => ~"warning",
|
||||
note => ~"note"
|
||||
fatal => ~"error",
|
||||
error => ~"error",
|
||||
warning => ~"warning",
|
||||
note => ~"note"
|
||||
}
|
||||
}
|
||||
|
||||
fn diagnosticcolor(lvl: level) -> u8 {
|
||||
match lvl {
|
||||
fatal => term::color_bright_red,
|
||||
error => term::color_bright_red,
|
||||
warning => term::color_bright_yellow,
|
||||
note => term::color_bright_green
|
||||
fatal => term::color_bright_red,
|
||||
error => term::color_bright_red,
|
||||
warning => term::color_bright_yellow,
|
||||
note => term::color_bright_green
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -223,9 +221,9 @@ pub fn emit(cmsp: Option<(@codemap::CodeMap, span)>, msg: &str, lvl: level) {
|
|||
}
|
||||
}
|
||||
|
||||
fn highlight_lines(cm: @codemap::CodeMap, sp: span,
|
||||
fn highlight_lines(cm: @codemap::CodeMap,
|
||||
sp: span,
|
||||
lines: @codemap::FileLines) {
|
||||
|
||||
let fm = lines.file;
|
||||
|
||||
// arbitrarily only print up to six lines of the error
|
||||
|
|
|
@ -161,54 +161,56 @@ pub fn syntax_expander_table() -> HashMap<~str, SyntaxExtension> {
|
|||
// when a macro expansion occurs, the resulting nodes have the backtrace()
|
||||
// -> expn_info of their expansion context stored into their span.
|
||||
pub trait ext_ctxt {
|
||||
fn codemap() -> @CodeMap;
|
||||
fn parse_sess() -> parse::parse_sess;
|
||||
fn cfg() -> ast::crate_cfg;
|
||||
fn call_site() -> span;
|
||||
fn print_backtrace();
|
||||
fn backtrace() -> Option<@ExpnInfo>;
|
||||
fn mod_push(mod_name: ast::ident);
|
||||
fn mod_pop();
|
||||
fn mod_path() -> ~[ast::ident];
|
||||
fn bt_push(ei: codemap::ExpnInfo);
|
||||
fn bt_pop();
|
||||
fn span_fatal(sp: span, msg: &str) -> !;
|
||||
fn span_err(sp: span, msg: &str);
|
||||
fn span_warn(sp: span, msg: &str);
|
||||
fn span_unimpl(sp: span, msg: &str) -> !;
|
||||
fn span_bug(sp: span, msg: &str) -> !;
|
||||
fn bug(msg: &str) -> !;
|
||||
fn next_id() -> ast::node_id;
|
||||
pure fn trace_macros() -> bool;
|
||||
fn set_trace_macros(x: bool);
|
||||
fn codemap(@mut self) -> @CodeMap;
|
||||
fn parse_sess(@mut self) -> parse::parse_sess;
|
||||
fn cfg(@mut self) -> ast::crate_cfg;
|
||||
fn call_site(@mut self) -> span;
|
||||
fn print_backtrace(@mut self);
|
||||
fn backtrace(@mut self) -> Option<@ExpnInfo>;
|
||||
fn mod_push(@mut self, mod_name: ast::ident);
|
||||
fn mod_pop(@mut self);
|
||||
fn mod_path(@mut self) -> ~[ast::ident];
|
||||
fn bt_push(@mut self, ei: codemap::ExpnInfo);
|
||||
fn bt_pop(@mut self);
|
||||
fn span_fatal(@mut self, sp: span, msg: &str) -> !;
|
||||
fn span_err(@mut self, sp: span, msg: &str);
|
||||
fn span_warn(@mut self, sp: span, msg: &str);
|
||||
fn span_unimpl(@mut self, sp: span, msg: &str) -> !;
|
||||
fn span_bug(@mut self, sp: span, msg: &str) -> !;
|
||||
fn bug(@mut self, msg: &str) -> !;
|
||||
fn next_id(@mut self) -> ast::node_id;
|
||||
pure fn trace_macros(@mut self) -> bool;
|
||||
fn set_trace_macros(@mut self, x: bool);
|
||||
/* for unhygienic identifier transformation */
|
||||
fn str_of(id: ast::ident) -> ~str;
|
||||
fn ident_of(st: ~str) -> ast::ident;
|
||||
fn str_of(@mut self, id: ast::ident) -> ~str;
|
||||
fn ident_of(@mut self, st: ~str) -> ast::ident;
|
||||
}
|
||||
|
||||
pub fn mk_ctxt(parse_sess: parse::parse_sess,
|
||||
cfg: ast::crate_cfg) -> ext_ctxt {
|
||||
type ctxt_repr = {parse_sess: parse::parse_sess,
|
||||
cfg: ast::crate_cfg,
|
||||
mut backtrace: Option<@ExpnInfo>,
|
||||
mut mod_path: ~[ast::ident],
|
||||
mut trace_mac: bool};
|
||||
impl ctxt_repr: ext_ctxt {
|
||||
fn codemap() -> @CodeMap { self.parse_sess.cm }
|
||||
fn parse_sess() -> parse::parse_sess { self.parse_sess }
|
||||
fn cfg() -> ast::crate_cfg { self.cfg }
|
||||
fn call_site() -> span {
|
||||
struct CtxtRepr {
|
||||
parse_sess: parse::parse_sess,
|
||||
cfg: ast::crate_cfg,
|
||||
backtrace: Option<@ExpnInfo>,
|
||||
mod_path: ~[ast::ident],
|
||||
trace_mac: bool
|
||||
}
|
||||
impl CtxtRepr: ext_ctxt {
|
||||
fn codemap(@mut self) -> @CodeMap { self.parse_sess.cm }
|
||||
fn parse_sess(@mut self) -> parse::parse_sess { self.parse_sess }
|
||||
fn cfg(@mut self) -> ast::crate_cfg { self.cfg }
|
||||
fn call_site(@mut self) -> span {
|
||||
match self.backtrace {
|
||||
Some(@ExpandedFrom({call_site: cs, _})) => cs,
|
||||
None => self.bug(~"missing top span")
|
||||
}
|
||||
}
|
||||
fn print_backtrace() { }
|
||||
fn backtrace() -> Option<@ExpnInfo> { self.backtrace }
|
||||
fn mod_push(i: ast::ident) { self.mod_path.push(i); }
|
||||
fn mod_pop() { self.mod_path.pop(); }
|
||||
fn mod_path() -> ~[ast::ident] { return self.mod_path; }
|
||||
fn bt_push(ei: codemap::ExpnInfo) {
|
||||
fn print_backtrace(@mut self) { }
|
||||
fn backtrace(@mut self) -> Option<@ExpnInfo> { self.backtrace }
|
||||
fn mod_push(@mut self, i: ast::ident) { self.mod_path.push(i); }
|
||||
fn mod_pop(@mut self) { self.mod_path.pop(); }
|
||||
fn mod_path(@mut self) -> ~[ast::ident] { return self.mod_path; }
|
||||
fn bt_push(@mut self, ei: codemap::ExpnInfo) {
|
||||
match ei {
|
||||
ExpandedFrom({call_site: cs, callie: ref callie}) => {
|
||||
self.backtrace =
|
||||
|
@ -219,7 +221,7 @@ pub fn mk_ctxt(parse_sess: parse::parse_sess,
|
|||
}
|
||||
}
|
||||
}
|
||||
fn bt_pop() {
|
||||
fn bt_pop(@mut self) {
|
||||
match self.backtrace {
|
||||
Some(@ExpandedFrom({
|
||||
call_site: span {expn_info: prev, _}, _
|
||||
|
@ -229,55 +231,55 @@ pub fn mk_ctxt(parse_sess: parse::parse_sess,
|
|||
_ => self.bug(~"tried to pop without a push")
|
||||
}
|
||||
}
|
||||
fn span_fatal(sp: span, msg: &str) -> ! {
|
||||
fn span_fatal(@mut self, sp: span, msg: &str) -> ! {
|
||||
self.print_backtrace();
|
||||
self.parse_sess.span_diagnostic.span_fatal(sp, msg);
|
||||
}
|
||||
fn span_err(sp: span, msg: &str) {
|
||||
fn span_err(@mut self, sp: span, msg: &str) {
|
||||
self.print_backtrace();
|
||||
self.parse_sess.span_diagnostic.span_err(sp, msg);
|
||||
}
|
||||
fn span_warn(sp: span, msg: &str) {
|
||||
fn span_warn(@mut self, sp: span, msg: &str) {
|
||||
self.print_backtrace();
|
||||
self.parse_sess.span_diagnostic.span_warn(sp, msg);
|
||||
}
|
||||
fn span_unimpl(sp: span, msg: &str) -> ! {
|
||||
fn span_unimpl(@mut self, sp: span, msg: &str) -> ! {
|
||||
self.print_backtrace();
|
||||
self.parse_sess.span_diagnostic.span_unimpl(sp, msg);
|
||||
}
|
||||
fn span_bug(sp: span, msg: &str) -> ! {
|
||||
fn span_bug(@mut self, sp: span, msg: &str) -> ! {
|
||||
self.print_backtrace();
|
||||
self.parse_sess.span_diagnostic.span_bug(sp, msg);
|
||||
}
|
||||
fn bug(msg: &str) -> ! {
|
||||
fn bug(@mut self, msg: &str) -> ! {
|
||||
self.print_backtrace();
|
||||
self.parse_sess.span_diagnostic.handler().bug(msg);
|
||||
}
|
||||
fn next_id() -> ast::node_id {
|
||||
fn next_id(@mut self) -> ast::node_id {
|
||||
return parse::next_node_id(self.parse_sess);
|
||||
}
|
||||
pure fn trace_macros() -> bool {
|
||||
pure fn trace_macros(@mut self) -> bool {
|
||||
self.trace_mac
|
||||
}
|
||||
fn set_trace_macros(x: bool) {
|
||||
fn set_trace_macros(@mut self, x: bool) {
|
||||
self.trace_mac = x
|
||||
}
|
||||
|
||||
fn str_of(id: ast::ident) -> ~str {
|
||||
fn str_of(@mut self, id: ast::ident) -> ~str {
|
||||
*self.parse_sess.interner.get(id)
|
||||
}
|
||||
fn ident_of(st: ~str) -> ast::ident {
|
||||
fn ident_of(@mut self, st: ~str) -> ast::ident {
|
||||
self.parse_sess.interner.intern(@st)
|
||||
}
|
||||
}
|
||||
let imp: ctxt_repr = {
|
||||
let imp: @mut CtxtRepr = @mut CtxtRepr {
|
||||
parse_sess: parse_sess,
|
||||
cfg: cfg,
|
||||
mut backtrace: None,
|
||||
mut mod_path: ~[],
|
||||
mut trace_mac: false
|
||||
backtrace: None,
|
||||
mod_path: ~[],
|
||||
trace_mac: false
|
||||
};
|
||||
move ((move imp) as ext_ctxt)
|
||||
move ((move imp) as @ext_ctxt)
|
||||
}
|
||||
|
||||
pub fn expr_to_str(cx: ext_ctxt, expr: @ast::expr, err_msg: ~str) -> ~str {
|
||||
|
|
|
@ -24,80 +24,90 @@ use core::vec;
|
|||
use std;
|
||||
use std::oldmap::HashMap;
|
||||
|
||||
enum tt_frame_up { /* to break a circularity */
|
||||
tt_frame_up(Option<tt_frame>)
|
||||
}
|
||||
|
||||
/* FIXME #2811: figure out how to have a uniquely linked stack, and change to
|
||||
`~` */
|
||||
///an unzipping of `token_tree`s
|
||||
type tt_frame = @{
|
||||
struct TtFrame {
|
||||
readme: ~[ast::token_tree],
|
||||
mut idx: uint,
|
||||
idx: uint,
|
||||
dotdotdoted: bool,
|
||||
sep: Option<Token>,
|
||||
up: tt_frame_up,
|
||||
};
|
||||
up: Option<@mut TtFrame>,
|
||||
}
|
||||
|
||||
pub type tt_reader = @tt_reader_;
|
||||
pub type tt_reader_ = {
|
||||
pub struct TtReader {
|
||||
sp_diag: span_handler,
|
||||
interner: @ident_interner,
|
||||
mut cur: tt_frame,
|
||||
cur: @mut TtFrame,
|
||||
/* for MBE-style macro transcription */
|
||||
interpolations: std::oldmap::HashMap<ident, @named_match>,
|
||||
mut repeat_idx: ~[uint],
|
||||
mut repeat_len: ~[uint],
|
||||
repeat_idx: ~[uint],
|
||||
repeat_len: ~[uint],
|
||||
/* cached: */
|
||||
mut cur_tok: Token,
|
||||
mut cur_span: span
|
||||
};
|
||||
cur_tok: Token,
|
||||
cur_span: span
|
||||
}
|
||||
|
||||
/** This can do Macro-By-Example transcription. On the other hand, if
|
||||
* `src` contains no `tt_seq`s and `tt_nonterminal`s, `interp` can (and
|
||||
* should) be none. */
|
||||
pub fn new_tt_reader(sp_diag: span_handler, itr: @ident_interner,
|
||||
pub fn new_tt_reader(sp_diag: span_handler,
|
||||
itr: @ident_interner,
|
||||
interp: Option<std::oldmap::HashMap<ident,@named_match>>,
|
||||
src: ~[ast::token_tree])
|
||||
-> tt_reader {
|
||||
let r = @{sp_diag: sp_diag, interner: itr,
|
||||
mut cur: @{readme: src, mut idx: 0u, dotdotdoted: false,
|
||||
sep: None, up: tt_frame_up(option::None)},
|
||||
interpolations: match interp { /* just a convienience */
|
||||
None => std::oldmap::HashMap(),
|
||||
Some(x) => x
|
||||
},
|
||||
mut repeat_idx: ~[],
|
||||
mut repeat_len: ~[],
|
||||
/* dummy values, never read: */
|
||||
mut cur_tok: EOF,
|
||||
mut cur_span: dummy_sp()
|
||||
};
|
||||
-> @mut TtReader {
|
||||
let r = @mut TtReader {
|
||||
sp_diag: sp_diag,
|
||||
interner: itr,
|
||||
mut cur: @mut TtFrame {
|
||||
readme: src,
|
||||
idx: 0u,
|
||||
dotdotdoted: false,
|
||||
sep: None,
|
||||
up: option::None
|
||||
},
|
||||
interpolations: match interp { /* just a convienience */
|
||||
None => std::oldmap::HashMap(),
|
||||
Some(x) => x
|
||||
},
|
||||
repeat_idx: ~[],
|
||||
repeat_len: ~[],
|
||||
/* dummy values, never read: */
|
||||
cur_tok: EOF,
|
||||
cur_span: dummy_sp()
|
||||
};
|
||||
tt_next_token(r); /* get cur_tok and cur_span set up */
|
||||
return r;
|
||||
}
|
||||
|
||||
pure fn dup_tt_frame(&&f: tt_frame) -> tt_frame {
|
||||
@{readme: f.readme, mut idx: f.idx, dotdotdoted: f.dotdotdoted,
|
||||
sep: f.sep, up: match f.up {
|
||||
tt_frame_up(Some(up_frame)) => {
|
||||
tt_frame_up(Some(dup_tt_frame(up_frame)))
|
||||
pure fn dup_tt_frame(f: @mut TtFrame) -> @mut TtFrame {
|
||||
@mut TtFrame {
|
||||
readme: f.readme,
|
||||
idx: f.idx,
|
||||
dotdotdoted: f.dotdotdoted,
|
||||
sep: f.sep,
|
||||
up: match f.up {
|
||||
Some(up_frame) => Some(dup_tt_frame(up_frame)),
|
||||
None => None
|
||||
}
|
||||
tt_frame_up(none) => tt_frame_up(none)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub pure fn dup_tt_reader(r: &tt_reader_) -> tt_reader {
|
||||
@{sp_diag: r.sp_diag, interner: r.interner,
|
||||
mut cur: dup_tt_frame(r.cur),
|
||||
interpolations: r.interpolations,
|
||||
mut repeat_idx: copy r.repeat_idx, mut repeat_len: copy r.repeat_len,
|
||||
mut cur_tok: r.cur_tok, mut cur_span: r.cur_span}
|
||||
pub pure fn dup_tt_reader(r: @mut TtReader) -> @mut TtReader {
|
||||
@mut TtReader {
|
||||
sp_diag: r.sp_diag,
|
||||
interner: r.interner,
|
||||
cur: dup_tt_frame(r.cur),
|
||||
interpolations: r.interpolations,
|
||||
repeat_idx: copy r.repeat_idx,
|
||||
repeat_len: copy r.repeat_len,
|
||||
cur_tok: r.cur_tok,
|
||||
cur_span: r.cur_span
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
pure fn lookup_cur_matched_by_matched(r: &tt_reader_,
|
||||
pure fn lookup_cur_matched_by_matched(r: @mut TtReader,
|
||||
start: @named_match) -> @named_match {
|
||||
pure fn red(+ad: @named_match, idx: &uint) -> @named_match {
|
||||
match *ad {
|
||||
|
@ -111,15 +121,15 @@ pure fn lookup_cur_matched_by_matched(r: &tt_reader_,
|
|||
vec::foldl(start, r.repeat_idx, red)
|
||||
}
|
||||
|
||||
fn lookup_cur_matched(r: &tt_reader_, name: ident) -> @named_match {
|
||||
fn lookup_cur_matched(r: @mut TtReader, name: ident) -> @named_match {
|
||||
lookup_cur_matched_by_matched(r, r.interpolations.get(&name))
|
||||
}
|
||||
enum lis {
|
||||
lis_unconstrained, lis_constraint(uint, ident), lis_contradiction(~str)
|
||||
}
|
||||
|
||||
fn lockstep_iter_size(t: token_tree, r: &tt_reader_) -> lis {
|
||||
fn lis_merge(lhs: lis, rhs: lis, r: &tt_reader_) -> lis {
|
||||
fn lockstep_iter_size(t: token_tree, r: @mut TtReader) -> lis {
|
||||
fn lis_merge(lhs: lis, rhs: lis, r: @mut TtReader) -> lis {
|
||||
match lhs {
|
||||
lis_unconstrained => rhs,
|
||||
lis_contradiction(_) => lhs,
|
||||
|
@ -151,7 +161,7 @@ fn lockstep_iter_size(t: token_tree, r: &tt_reader_) -> lis {
|
|||
}
|
||||
|
||||
|
||||
pub fn tt_next_token(r: &tt_reader_) -> TokenAndSpan {
|
||||
pub fn tt_next_token(r: @mut TtReader) -> TokenAndSpan {
|
||||
let ret_val = TokenAndSpan { tok: r.cur_tok, sp: r.cur_span };
|
||||
while r.cur.idx >= r.cur.readme.len() {
|
||||
/* done with this set; pop or repeat? */
|
||||
|
@ -159,11 +169,11 @@ pub fn tt_next_token(r: &tt_reader_) -> TokenAndSpan {
|
|||
|| r.repeat_idx.last() == r.repeat_len.last() - 1 {
|
||||
|
||||
match r.cur.up {
|
||||
tt_frame_up(None) => {
|
||||
None => {
|
||||
r.cur_tok = EOF;
|
||||
return ret_val;
|
||||
}
|
||||
tt_frame_up(Some(tt_f)) => {
|
||||
Some(tt_f) => {
|
||||
if r.cur.dotdotdoted {
|
||||
r.repeat_idx.pop();
|
||||
r.repeat_len.pop();
|
||||
|
@ -178,8 +188,8 @@ pub fn tt_next_token(r: &tt_reader_) -> TokenAndSpan {
|
|||
r.cur.idx = 0u;
|
||||
r.repeat_idx[r.repeat_idx.len() - 1u] += 1u;
|
||||
match r.cur.sep {
|
||||
Some(ref tk) => {
|
||||
r.cur_tok = (*tk); /* repeat same span, I guess */
|
||||
Some(tk) => {
|
||||
r.cur_tok = tk; /* repeat same span, I guess */
|
||||
return ret_val;
|
||||
}
|
||||
None => ()
|
||||
|
@ -189,20 +199,25 @@ pub fn tt_next_token(r: &tt_reader_) -> TokenAndSpan {
|
|||
loop { /* because it's easiest, this handles `tt_delim` not starting
|
||||
with a `tt_tok`, even though it won't happen */
|
||||
match r.cur.readme[r.cur.idx] {
|
||||
tt_delim(ref tts) => {
|
||||
r.cur = @{readme: (*tts), mut idx: 0u, dotdotdoted: false,
|
||||
sep: None, up: tt_frame_up(option::Some(r.cur)) };
|
||||
tt_delim(copy tts) => {
|
||||
r.cur = @mut TtFrame {
|
||||
readme: tts,
|
||||
idx: 0u,
|
||||
dotdotdoted: false,
|
||||
sep: None,
|
||||
up: option::Some(r.cur)
|
||||
};
|
||||
// if this could be 0-length, we'd need to potentially recur here
|
||||
}
|
||||
tt_tok(sp, ref tok) => {
|
||||
r.cur_span = sp; r.cur_tok = (*tok);
|
||||
tt_tok(sp, copy tok) => {
|
||||
r.cur_span = sp; r.cur_tok = tok;
|
||||
r.cur.idx += 1u;
|
||||
return ret_val;
|
||||
}
|
||||
tt_seq(sp, ref tts, ref sep, zerok) => {
|
||||
match lockstep_iter_size(tt_seq(sp, (*tts), (*sep), zerok), r) {
|
||||
lis_unconstrained => {
|
||||
r.sp_diag.span_fatal(
|
||||
tt_seq(sp, copy tts, copy sep, zerok) => {
|
||||
match lockstep_iter_size(tt_seq(sp, tts, sep, zerok), r) {
|
||||
lis_unconstrained => {
|
||||
r.sp_diag.span_fatal(
|
||||
sp, /* blame macro writer */
|
||||
~"attempted to repeat an expression \
|
||||
containing no syntax \
|
||||
|
@ -226,12 +241,12 @@ pub fn tt_next_token(r: &tt_reader_) -> TokenAndSpan {
|
|||
} else {
|
||||
r.repeat_len.push(len);
|
||||
r.repeat_idx.push(0u);
|
||||
r.cur = @{
|
||||
readme: (*tts),
|
||||
mut idx: 0u,
|
||||
r.cur = @mut TtFrame {
|
||||
readme: tts,
|
||||
idx: 0u,
|
||||
dotdotdoted: true,
|
||||
sep: (*sep),
|
||||
up: tt_frame_up(option::Some(r.cur))
|
||||
sep: sep,
|
||||
up: option::Some(r.cur)
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -14,7 +14,7 @@ use ast;
|
|||
use codemap::{BytePos, CharPos, CodeMap, FileMap, Pos};
|
||||
use diagnostic;
|
||||
use parse::lexer::{is_whitespace, get_str_from, reader};
|
||||
use parse::lexer::{string_reader, bump, is_eof, nextch, TokenAndSpan};
|
||||
use parse::lexer::{StringReader, bump, is_eof, nextch, TokenAndSpan};
|
||||
use parse::lexer;
|
||||
use parse::token;
|
||||
use parse;
|
||||
|
@ -120,7 +120,7 @@ pub fn strip_doc_comment_decoration(comment: ~str) -> ~str {
|
|||
die!(~"not a doc-comment: " + comment);
|
||||
}
|
||||
|
||||
fn read_to_eol(rdr: string_reader) -> ~str {
|
||||
fn read_to_eol(rdr: @mut StringReader) -> ~str {
|
||||
let mut val = ~"";
|
||||
while rdr.curr != '\n' && !is_eof(rdr) {
|
||||
str::push_char(&mut val, rdr.curr);
|
||||
|
@ -130,26 +130,26 @@ fn read_to_eol(rdr: string_reader) -> ~str {
|
|||
return val;
|
||||
}
|
||||
|
||||
fn read_one_line_comment(rdr: string_reader) -> ~str {
|
||||
fn read_one_line_comment(rdr: @mut StringReader) -> ~str {
|
||||
let val = read_to_eol(rdr);
|
||||
assert ((val[0] == '/' as u8 && val[1] == '/' as u8) ||
|
||||
(val[0] == '#' as u8 && val[1] == '!' as u8));
|
||||
return val;
|
||||
}
|
||||
|
||||
fn consume_non_eol_whitespace(rdr: string_reader) {
|
||||
fn consume_non_eol_whitespace(rdr: @mut StringReader) {
|
||||
while is_whitespace(rdr.curr) && rdr.curr != '\n' && !is_eof(rdr) {
|
||||
bump(rdr);
|
||||
}
|
||||
}
|
||||
|
||||
fn push_blank_line_comment(rdr: string_reader, comments: &mut ~[cmnt]) {
|
||||
fn push_blank_line_comment(rdr: @mut StringReader, comments: &mut ~[cmnt]) {
|
||||
debug!(">>> blank-line comment");
|
||||
let v: ~[~str] = ~[];
|
||||
comments.push({style: blank_line, lines: v, pos: rdr.last_pos});
|
||||
}
|
||||
|
||||
fn consume_whitespace_counting_blank_lines(rdr: string_reader,
|
||||
fn consume_whitespace_counting_blank_lines(rdr: @mut StringReader,
|
||||
comments: &mut ~[cmnt]) {
|
||||
while is_whitespace(rdr.curr) && !is_eof(rdr) {
|
||||
if rdr.col == CharPos(0u) && rdr.curr == '\n' {
|
||||
|
@ -160,7 +160,7 @@ fn consume_whitespace_counting_blank_lines(rdr: string_reader,
|
|||
}
|
||||
|
||||
|
||||
fn read_shebang_comment(rdr: string_reader, code_to_the_left: bool,
|
||||
fn read_shebang_comment(rdr: @mut StringReader, code_to_the_left: bool,
|
||||
comments: &mut ~[cmnt]) {
|
||||
debug!(">>> shebang comment");
|
||||
let p = rdr.last_pos;
|
||||
|
@ -172,7 +172,7 @@ fn read_shebang_comment(rdr: string_reader, code_to_the_left: bool,
|
|||
});
|
||||
}
|
||||
|
||||
fn read_line_comments(rdr: string_reader, code_to_the_left: bool,
|
||||
fn read_line_comments(rdr: @mut StringReader, code_to_the_left: bool,
|
||||
comments: &mut ~[cmnt]) {
|
||||
debug!(">>> line comments");
|
||||
let p = rdr.last_pos;
|
||||
|
@ -221,8 +221,9 @@ fn trim_whitespace_prefix_and_push_line(lines: &mut ~[~str],
|
|||
lines.push(s1);
|
||||
}
|
||||
|
||||
fn read_block_comment(rdr: string_reader, code_to_the_left: bool,
|
||||
comments: &mut ~[cmnt]) {
|
||||
fn read_block_comment(rdr: @mut StringReader,
|
||||
code_to_the_left: bool,
|
||||
comments: &mut ~[cmnt]) {
|
||||
debug!(">>> block comment");
|
||||
let p = rdr.last_pos;
|
||||
let mut lines: ~[~str] = ~[];
|
||||
|
@ -280,13 +281,14 @@ fn read_block_comment(rdr: string_reader, code_to_the_left: bool,
|
|||
comments.push({style: style, lines: lines, pos: p});
|
||||
}
|
||||
|
||||
fn peeking_at_comment(rdr: string_reader) -> bool {
|
||||
fn peeking_at_comment(rdr: @mut StringReader) -> bool {
|
||||
return ((rdr.curr == '/' && nextch(rdr) == '/') ||
|
||||
(rdr.curr == '/' && nextch(rdr) == '*')) ||
|
||||
(rdr.curr == '#' && nextch(rdr) == '!');
|
||||
}
|
||||
|
||||
fn consume_comment(rdr: string_reader, code_to_the_left: bool,
|
||||
fn consume_comment(rdr: @mut StringReader,
|
||||
code_to_the_left: bool,
|
||||
comments: &mut ~[cmnt]) {
|
||||
debug!(">>> consume comment");
|
||||
if rdr.curr == '/' && nextch(rdr) == '/' {
|
||||
|
@ -309,8 +311,9 @@ pub fn gather_comments_and_literals(span_diagnostic: diagnostic::span_handler,
|
|||
let itr = parse::token::mk_fake_ident_interner();
|
||||
let cm = CodeMap::new();
|
||||
let filemap = cm.new_filemap(path, src);
|
||||
let rdr = lexer::new_low_level_string_reader(
|
||||
span_diagnostic, filemap, itr);
|
||||
let rdr = lexer::new_low_level_string_reader(span_diagnostic,
|
||||
filemap,
|
||||
itr);
|
||||
|
||||
let mut comments: ~[cmnt] = ~[];
|
||||
let mut literals: ~[lit] = ~[];
|
||||
|
|
|
@ -24,45 +24,45 @@ use core::either;
|
|||
use core::str;
|
||||
use core::u64;
|
||||
|
||||
pub use ext::tt::transcribe::{tt_reader, tt_reader_, new_tt_reader};
|
||||
pub use ext::tt::transcribe::{TtReader, new_tt_reader};
|
||||
|
||||
//use std;
|
||||
|
||||
pub trait reader {
|
||||
fn is_eof(&self) -> bool;
|
||||
fn next_token(&self) -> TokenAndSpan;
|
||||
fn fatal(&self,~str) -> !;
|
||||
fn span_diag(&self) -> span_handler;
|
||||
pure fn interner(&self) -> @token::ident_interner;
|
||||
fn peek(&self) -> TokenAndSpan;
|
||||
fn dup(&self) -> reader;
|
||||
fn is_eof(@mut self) -> bool;
|
||||
fn next_token(@mut self) -> TokenAndSpan;
|
||||
fn fatal(@mut self, ~str) -> !;
|
||||
fn span_diag(@mut self) -> span_handler;
|
||||
pure fn interner(@mut self) -> @token::ident_interner;
|
||||
fn peek(@mut self) -> TokenAndSpan;
|
||||
fn dup(@mut self) -> reader;
|
||||
}
|
||||
|
||||
#[deriving_eq]
|
||||
pub struct TokenAndSpan {tok: token::Token, sp: span}
|
||||
|
||||
pub type string_reader = @string_reader_;
|
||||
pub type string_reader_ = {
|
||||
pub struct StringReader {
|
||||
span_diagnostic: span_handler,
|
||||
src: @~str,
|
||||
// The absolute offset within the codemap of the next character to read
|
||||
mut pos: BytePos,
|
||||
pos: BytePos,
|
||||
// The absolute offset within the codemap of the last character read(curr)
|
||||
mut last_pos: BytePos,
|
||||
last_pos: BytePos,
|
||||
// The column of the next character to read
|
||||
mut col: CharPos,
|
||||
col: CharPos,
|
||||
// The last character to be read
|
||||
mut curr: char,
|
||||
curr: char,
|
||||
filemap: @codemap::FileMap,
|
||||
interner: @token::ident_interner,
|
||||
/* cached: */
|
||||
mut peek_tok: token::Token,
|
||||
mut peek_span: span
|
||||
};
|
||||
peek_tok: token::Token,
|
||||
peek_span: span
|
||||
}
|
||||
|
||||
pub fn new_string_reader(span_diagnostic: span_handler,
|
||||
filemap: @codemap::FileMap,
|
||||
itr: @token::ident_interner) -> string_reader {
|
||||
itr: @token::ident_interner)
|
||||
-> @mut StringReader {
|
||||
let r = new_low_level_string_reader(span_diagnostic, filemap, itr);
|
||||
string_advance_token(r); /* fill in peek_* */
|
||||
return r;
|
||||
|
@ -72,18 +72,20 @@ pub fn new_string_reader(span_diagnostic: span_handler,
|
|||
pub fn new_low_level_string_reader(span_diagnostic: span_handler,
|
||||
filemap: @codemap::FileMap,
|
||||
itr: @token::ident_interner)
|
||||
-> string_reader {
|
||||
-> @mut StringReader {
|
||||
// Force the initial reader bump to start on a fresh line
|
||||
let initial_char = '\n';
|
||||
let r = @{span_diagnostic: span_diagnostic, src: filemap.src,
|
||||
mut pos: filemap.start_pos,
|
||||
mut last_pos: filemap.start_pos,
|
||||
mut col: CharPos(0),
|
||||
mut curr: initial_char,
|
||||
filemap: filemap, interner: itr,
|
||||
/* dummy values; not read */
|
||||
mut peek_tok: token::EOF,
|
||||
mut peek_span: codemap::dummy_sp()};
|
||||
let r = @mut StringReader {
|
||||
span_diagnostic: span_diagnostic, src: filemap.src,
|
||||
pos: filemap.start_pos,
|
||||
last_pos: filemap.start_pos,
|
||||
col: CharPos(0),
|
||||
curr: initial_char,
|
||||
filemap: filemap, interner: itr,
|
||||
/* dummy values; not read */
|
||||
peek_tok: token::EOF,
|
||||
peek_span: codemap::dummy_sp()
|
||||
};
|
||||
bump(r);
|
||||
return r;
|
||||
}
|
||||
|
@ -91,58 +93,56 @@ pub fn new_low_level_string_reader(span_diagnostic: span_handler,
|
|||
// duplicating the string reader is probably a bad idea, in
|
||||
// that using them will cause interleaved pushes of line
|
||||
// offsets to the underlying filemap...
|
||||
fn dup_string_reader(r: &string_reader_) -> string_reader {
|
||||
@{span_diagnostic: r.span_diagnostic, src: r.src,
|
||||
mut pos: r.pos,
|
||||
mut last_pos: r.last_pos,
|
||||
mut col: r.col, mut curr: r.curr,
|
||||
filemap: r.filemap, interner: r.interner,
|
||||
mut peek_tok: r.peek_tok, mut peek_span: r.peek_span}
|
||||
fn dup_string_reader(r: @mut StringReader) -> @mut StringReader {
|
||||
@mut StringReader {
|
||||
span_diagnostic: r.span_diagnostic,
|
||||
src: r.src,
|
||||
pos: r.pos,
|
||||
last_pos: r.last_pos,
|
||||
col: r.col,
|
||||
curr: r.curr,
|
||||
filemap: r.filemap,
|
||||
interner: r.interner,
|
||||
peek_tok: r.peek_tok,
|
||||
peek_span: r.peek_span
|
||||
}
|
||||
}
|
||||
|
||||
impl string_reader_: reader {
|
||||
fn is_eof(&self) -> bool { is_eof(self) }
|
||||
impl StringReader: reader {
|
||||
fn is_eof(@mut self) -> bool { is_eof(self) }
|
||||
// return the next token. EFFECT: advances the string_reader.
|
||||
fn next_token(&self) -> TokenAndSpan {
|
||||
fn next_token(@mut self) -> TokenAndSpan {
|
||||
let ret_val = TokenAndSpan {tok: self.peek_tok, sp: self.peek_span};
|
||||
string_advance_token(self);
|
||||
return ret_val;
|
||||
}
|
||||
fn fatal(&self, m: ~str) -> ! {
|
||||
fn fatal(@mut self, m: ~str) -> ! {
|
||||
self.span_diagnostic.span_fatal(copy self.peek_span, m)
|
||||
}
|
||||
fn span_diag(&self) -> span_handler { self.span_diagnostic }
|
||||
pure fn interner(&self) -> @token::ident_interner { self.interner }
|
||||
fn peek(&self) -> TokenAndSpan {
|
||||
fn span_diag(@mut self) -> span_handler { self.span_diagnostic }
|
||||
pure fn interner(@mut self) -> @token::ident_interner { self.interner }
|
||||
fn peek(@mut self) -> TokenAndSpan {
|
||||
TokenAndSpan {tok: self.peek_tok, sp: self.peek_span}
|
||||
}
|
||||
fn dup(&self) -> reader { dup_string_reader(self) as reader }
|
||||
fn dup(@mut self) -> reader { dup_string_reader(self) as reader }
|
||||
}
|
||||
|
||||
pub impl tt_reader_: reader {
|
||||
fn is_eof(&self) -> bool { self.cur_tok == token::EOF }
|
||||
fn next_token(&self) -> TokenAndSpan {
|
||||
/* weird resolve bug: if the following `if`, or any of its
|
||||
statements are removed, we get resolution errors */
|
||||
if false {
|
||||
let _ignore_me = 0;
|
||||
let _me_too = self.cur.readme[self.cur.idx];
|
||||
}
|
||||
tt_next_token(self)
|
||||
}
|
||||
fn fatal(&self, m: ~str) -> ! {
|
||||
pub impl TtReader: reader {
|
||||
fn is_eof(@mut self) -> bool { self.cur_tok == token::EOF }
|
||||
fn next_token(@mut self) -> TokenAndSpan { tt_next_token(self) }
|
||||
fn fatal(@mut self, m: ~str) -> ! {
|
||||
self.sp_diag.span_fatal(copy self.cur_span, m);
|
||||
}
|
||||
fn span_diag(&self) -> span_handler { self.sp_diag }
|
||||
pure fn interner(&self) -> @token::ident_interner { self.interner }
|
||||
fn peek(&self) -> TokenAndSpan {
|
||||
fn span_diag(@mut self) -> span_handler { self.sp_diag }
|
||||
pure fn interner(@mut self) -> @token::ident_interner { self.interner }
|
||||
fn peek(@mut self) -> TokenAndSpan {
|
||||
TokenAndSpan { tok: self.cur_tok, sp: self.cur_span }
|
||||
}
|
||||
fn dup(&self) -> reader { dup_tt_reader(self) as reader }
|
||||
fn dup(@mut self) -> reader { dup_tt_reader(self) as reader }
|
||||
}
|
||||
|
||||
// EFFECT: advance peek_tok and peek_span to refer to the next token.
|
||||
fn string_advance_token(r: &string_reader_) {
|
||||
fn string_advance_token(r: @mut StringReader) {
|
||||
match (consume_whitespace_and_comments(r)) {
|
||||
Some(comment) => {
|
||||
r.peek_tok = comment.tok;
|
||||
|
@ -160,11 +160,11 @@ fn string_advance_token(r: &string_reader_) {
|
|||
}
|
||||
}
|
||||
|
||||
fn byte_offset(rdr: &string_reader_) -> BytePos {
|
||||
fn byte_offset(rdr: @mut StringReader) -> BytePos {
|
||||
(rdr.pos - rdr.filemap.start_pos)
|
||||
}
|
||||
|
||||
pub fn get_str_from(rdr: &string_reader_, start: BytePos) -> ~str {
|
||||
pub fn get_str_from(rdr: @mut StringReader, start: BytePos) -> ~str {
|
||||
unsafe {
|
||||
// I'm pretty skeptical about this subtraction. What if there's a
|
||||
// multi-byte character before the mark?
|
||||
|
@ -175,7 +175,7 @@ pub fn get_str_from(rdr: &string_reader_, start: BytePos) -> ~str {
|
|||
|
||||
// EFFECT: advance the StringReader by one character. If a newline is
|
||||
// discovered, add it to the FileMap's list of line start offsets.
|
||||
pub fn bump(rdr: &string_reader_) {
|
||||
pub fn bump(rdr: @mut StringReader) {
|
||||
rdr.last_pos = rdr.pos;
|
||||
let current_byte_offset = byte_offset(rdr).to_uint();;
|
||||
if current_byte_offset < (*rdr.src).len() {
|
||||
|
@ -199,10 +199,10 @@ pub fn bump(rdr: &string_reader_) {
|
|||
rdr.curr = -1 as char;
|
||||
}
|
||||
}
|
||||
pub fn is_eof(rdr: &string_reader_) -> bool {
|
||||
pub fn is_eof(rdr: @mut StringReader) -> bool {
|
||||
rdr.curr == -1 as char
|
||||
}
|
||||
pub fn nextch(rdr: &string_reader_) -> char {
|
||||
pub fn nextch(rdr: @mut StringReader) -> char {
|
||||
let offset = byte_offset(rdr).to_uint();
|
||||
if offset < (*rdr.src).len() {
|
||||
return str::char_at(*rdr.src, offset);
|
||||
|
@ -247,8 +247,8 @@ fn is_bin_digit(c: char) -> bool { return c == '0' || c == '1'; }
|
|||
|
||||
// EFFECT: eats whitespace and comments.
|
||||
// returns a Some(sugared-doc-attr) if one exists, None otherwise.
|
||||
fn consume_whitespace_and_comments(rdr: &string_reader_)
|
||||
-> Option<TokenAndSpan> {
|
||||
fn consume_whitespace_and_comments(rdr: @mut StringReader)
|
||||
-> Option<TokenAndSpan> {
|
||||
while is_whitespace(rdr.curr) { bump(rdr); }
|
||||
return consume_any_line_comment(rdr);
|
||||
}
|
||||
|
@ -256,8 +256,8 @@ fn consume_whitespace_and_comments(rdr: &string_reader_)
|
|||
// PRECONDITION: rdr.curr is not whitespace
|
||||
// EFFECT: eats any kind of comment.
|
||||
// returns a Some(sugared-doc-attr) if one exists, None otherwise
|
||||
fn consume_any_line_comment(rdr: &string_reader_)
|
||||
-> Option<TokenAndSpan> {
|
||||
fn consume_any_line_comment(rdr: @mut StringReader)
|
||||
-> Option<TokenAndSpan> {
|
||||
if rdr.curr == '/' {
|
||||
match nextch(rdr) {
|
||||
'/' => {
|
||||
|
@ -299,9 +299,8 @@ fn consume_any_line_comment(rdr: &string_reader_)
|
|||
}
|
||||
|
||||
// might return a sugared-doc-attr
|
||||
fn consume_block_comment(rdr: &string_reader_)
|
||||
-> Option<TokenAndSpan> {
|
||||
|
||||
fn consume_block_comment(rdr: @mut StringReader)
|
||||
-> Option<TokenAndSpan> {
|
||||
// block comments starting with "/**" or "/*!" are doc-comments
|
||||
if rdr.curr == '*' || rdr.curr == '!' {
|
||||
let start_bpos = rdr.pos - BytePos(2u);
|
||||
|
@ -338,7 +337,7 @@ fn consume_block_comment(rdr: &string_reader_)
|
|||
return consume_whitespace_and_comments(rdr);
|
||||
}
|
||||
|
||||
fn scan_exponent(rdr: &string_reader_) -> Option<~str> {
|
||||
fn scan_exponent(rdr: @mut StringReader) -> Option<~str> {
|
||||
let mut c = rdr.curr;
|
||||
let mut rslt = ~"";
|
||||
if c == 'e' || c == 'E' {
|
||||
|
@ -356,7 +355,7 @@ fn scan_exponent(rdr: &string_reader_) -> Option<~str> {
|
|||
} else { return None::<~str>; }
|
||||
}
|
||||
|
||||
fn scan_digits(rdr: &string_reader_, radix: uint) -> ~str {
|
||||
fn scan_digits(rdr: @mut StringReader, radix: uint) -> ~str {
|
||||
let mut rslt = ~"";
|
||||
loop {
|
||||
let c = rdr.curr;
|
||||
|
@ -371,7 +370,7 @@ fn scan_digits(rdr: &string_reader_, radix: uint) -> ~str {
|
|||
};
|
||||
}
|
||||
|
||||
fn scan_number(c: char, rdr: &string_reader_) -> token::Token {
|
||||
fn scan_number(c: char, rdr: @mut StringReader) -> token::Token {
|
||||
let mut num_str, base = 10u, c = c, n = nextch(rdr);
|
||||
if c == '0' && n == 'x' {
|
||||
bump(rdr);
|
||||
|
@ -487,7 +486,7 @@ fn scan_number(c: char, rdr: &string_reader_) -> token::Token {
|
|||
}
|
||||
}
|
||||
|
||||
fn scan_numeric_escape(rdr: &string_reader_, n_hex_digits: uint) -> char {
|
||||
fn scan_numeric_escape(rdr: @mut StringReader, n_hex_digits: uint) -> char {
|
||||
let mut accum_int = 0, i = n_hex_digits;
|
||||
while i != 0u {
|
||||
let n = rdr.curr;
|
||||
|
@ -502,7 +501,7 @@ fn scan_numeric_escape(rdr: &string_reader_, n_hex_digits: uint) -> char {
|
|||
return accum_int as char;
|
||||
}
|
||||
|
||||
fn next_token_inner(rdr: &string_reader_) -> token::Token {
|
||||
fn next_token_inner(rdr: @mut StringReader) -> token::Token {
|
||||
let mut accum_str = ~"";
|
||||
let mut c = rdr.curr;
|
||||
if (c >= 'a' && c <= 'z')
|
||||
|
@ -527,7 +526,7 @@ fn next_token_inner(rdr: &string_reader_) -> token::Token {
|
|||
if is_dec_digit(c) {
|
||||
return scan_number(c, rdr);
|
||||
}
|
||||
fn binop(rdr: &string_reader_, op: token::binop) -> token::Token {
|
||||
fn binop(rdr: @mut StringReader, op: token::binop) -> token::Token {
|
||||
bump(rdr);
|
||||
if rdr.curr == '=' {
|
||||
bump(rdr);
|
||||
|
@ -720,7 +719,7 @@ fn next_token_inner(rdr: &string_reader_) -> token::Token {
|
|||
}
|
||||
}
|
||||
|
||||
fn consume_whitespace(rdr: &string_reader_) {
|
||||
fn consume_whitespace(rdr: @mut StringReader) {
|
||||
while is_whitespace(rdr.curr) && !is_eof(rdr) { bump(rdr); }
|
||||
}
|
||||
|
||||
|
|
|
@ -15,9 +15,9 @@ use ast::node_id;
|
|||
use ast;
|
||||
use codemap::{span, CodeMap, FileMap, CharPos, BytePos};
|
||||
use codemap;
|
||||
use diagnostic::{span_handler, mk_span_handler, mk_handler, emitter};
|
||||
use diagnostic::{span_handler, mk_span_handler, mk_handler, Emitter};
|
||||
use parse::attr::parser_attr;
|
||||
use parse::lexer::{reader, string_reader};
|
||||
use parse::lexer::{reader, StringReader};
|
||||
use parse::parser::Parser;
|
||||
use parse::token::{ident_interner, mk_ident_interner};
|
||||
use util::interner;
|
||||
|
@ -54,7 +54,7 @@ pub type parse_sess = @{
|
|||
interner: @ident_interner,
|
||||
};
|
||||
|
||||
pub fn new_parse_sess(demitter: Option<emitter>) -> parse_sess {
|
||||
pub fn new_parse_sess(demitter: Option<Emitter>) -> parse_sess {
|
||||
let cm = @CodeMap::new();
|
||||
return @{cm: cm,
|
||||
mut next_id: 1,
|
||||
|
@ -166,18 +166,22 @@ pub fn new_parser_from_source_str(sess: parse_sess, cfg: ast::crate_cfg,
|
|||
+name: ~str, +ss: codemap::FileSubstr,
|
||||
source: @~str) -> Parser {
|
||||
let filemap = sess.cm.new_filemap_w_substr(name, ss, source);
|
||||
let srdr = lexer::new_string_reader(sess.span_diagnostic, filemap,
|
||||
let srdr = lexer::new_string_reader(sess.span_diagnostic,
|
||||
filemap,
|
||||
sess.interner);
|
||||
return Parser(sess, cfg, srdr as reader);
|
||||
}
|
||||
|
||||
pub fn new_parser_from_file(sess: parse_sess, cfg: ast::crate_cfg,
|
||||
path: &Path) -> Result<Parser, ~str> {
|
||||
pub fn new_parser_from_file(sess: parse_sess,
|
||||
cfg: ast::crate_cfg,
|
||||
path: &Path)
|
||||
-> Result<Parser, ~str> {
|
||||
match io::read_whole_file_str(path) {
|
||||
result::Ok(move src) => {
|
||||
|
||||
let filemap = sess.cm.new_filemap(path.to_str(), @move src);
|
||||
let srdr = lexer::new_string_reader(sess.span_diagnostic, filemap,
|
||||
let srdr = lexer::new_string_reader(sess.span_diagnostic,
|
||||
filemap,
|
||||
sess.interner);
|
||||
|
||||
Ok(Parser(sess, cfg, srdr as reader))
|
||||
|
|
|
@ -32,7 +32,7 @@ use core::vec;
|
|||
* I am implementing this algorithm because it comes with 20 pages of
|
||||
* documentation explaining its theory, and because it addresses the set of
|
||||
* concerns I've seen other pretty-printers fall down on. Weirdly. Even though
|
||||
* it's 32 years old and not written in Haskell. What can I say?
|
||||
* it's 32 years old. What can I say?
|
||||
*
|
||||
* Despite some redundancies and quirks in the way it's implemented in that
|
||||
* paper, I've opted to keep the implementation here as similar as I can,
|
||||
|
@ -69,20 +69,9 @@ use core::vec;
|
|||
* line (which it can't) and so naturally place the content on its own line to
|
||||
* avoid combining it with other lines and making matters even worse.
|
||||
*/
|
||||
#[deriving_eq]
|
||||
pub enum breaks { consistent, inconsistent, }
|
||||
|
||||
pub impl breaks : cmp::Eq {
|
||||
pure fn eq(&self, other: &breaks) -> bool {
|
||||
match ((*self), (*other)) {
|
||||
(consistent, consistent) => true,
|
||||
(inconsistent, inconsistent) => true,
|
||||
(consistent, _) => false,
|
||||
(inconsistent, _) => false,
|
||||
}
|
||||
}
|
||||
pure fn ne(&self, other: &breaks) -> bool { !(*self).eq(other) }
|
||||
}
|
||||
|
||||
pub type break_t = {offset: int, blank_space: int};
|
||||
|
||||
pub type begin_t = {offset: int, breaks: breaks};
|
||||
|
@ -96,11 +85,11 @@ pub enum token {
|
|||
}
|
||||
|
||||
pub impl token {
|
||||
fn is_eof() -> bool {
|
||||
match self { EOF => true, _ => false }
|
||||
fn is_eof(&self) -> bool {
|
||||
match *self { EOF => true, _ => false }
|
||||
}
|
||||
fn is_hardbreak_tok() -> bool {
|
||||
match self {
|
||||
fn is_hardbreak_tok(&self) -> bool {
|
||||
match *self {
|
||||
BREAK({offset: 0, blank_space: bs }) if bs == size_infinity =>
|
||||
true,
|
||||
_ =>
|
||||
|
@ -111,11 +100,11 @@ pub impl token {
|
|||
|
||||
pub fn tok_str(++t: token) -> ~str {
|
||||
match t {
|
||||
STRING(s, len) => return fmt!("STR(%s,%d)", *s, len),
|
||||
BREAK(_) => return ~"BREAK",
|
||||
BEGIN(_) => return ~"BEGIN",
|
||||
END => return ~"END",
|
||||
EOF => return ~"EOF"
|
||||
STRING(s, len) => return fmt!("STR(%s,%d)", *s, len),
|
||||
BREAK(_) => return ~"BREAK",
|
||||
BEGIN(_) => return ~"BEGIN",
|
||||
END => return ~"END",
|
||||
EOF => return ~"EOF"
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -143,7 +132,7 @@ pub type print_stack_elt = {offset: int, pbreak: print_stack_break};
|
|||
|
||||
pub const size_infinity: int = 0xffff;
|
||||
|
||||
pub fn mk_printer(out: io::Writer, linewidth: uint) -> printer {
|
||||
pub fn mk_printer(out: @io::Writer, linewidth: uint) -> @mut Printer {
|
||||
// Yes 3, it makes the ring buffers big enough to never
|
||||
// fall behind.
|
||||
let n: uint = 3 * linewidth;
|
||||
|
@ -151,22 +140,24 @@ pub fn mk_printer(out: io::Writer, linewidth: uint) -> printer {
|
|||
let mut token: ~[token] = vec::from_elem(n, EOF);
|
||||
let mut size: ~[int] = vec::from_elem(n, 0);
|
||||
let mut scan_stack: ~[uint] = vec::from_elem(n, 0u);
|
||||
printer_(@{out: out,
|
||||
buf_len: n,
|
||||
mut margin: linewidth as int,
|
||||
mut space: linewidth as int,
|
||||
mut left: 0,
|
||||
mut right: 0,
|
||||
mut token: move token,
|
||||
mut size: move size,
|
||||
mut left_total: 0,
|
||||
mut right_total: 0,
|
||||
mut scan_stack: move scan_stack,
|
||||
mut scan_stack_empty: true,
|
||||
mut top: 0,
|
||||
mut bottom: 0,
|
||||
print_stack: DVec(),
|
||||
mut pending_indentation: 0 })
|
||||
@mut Printer {
|
||||
out: @out,
|
||||
buf_len: n,
|
||||
margin: linewidth as int,
|
||||
space: linewidth as int,
|
||||
left: 0,
|
||||
right: 0,
|
||||
token: move token,
|
||||
size: move size,
|
||||
left_total: 0,
|
||||
right_total: 0,
|
||||
scan_stack: move scan_stack,
|
||||
scan_stack_empty: true,
|
||||
top: 0,
|
||||
bottom: 0,
|
||||
print_stack: @mut ~[],
|
||||
pending_indentation: 0
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
@ -247,42 +238,38 @@ pub fn mk_printer(out: io::Writer, linewidth: uint) -> printer {
|
|||
* the method called 'pretty_print', and the 'PRINT' process is the method
|
||||
* called 'print'.
|
||||
*/
|
||||
pub type printer_ = {
|
||||
out: io::Writer,
|
||||
pub struct Printer {
|
||||
out: @@io::Writer,
|
||||
buf_len: uint,
|
||||
mut margin: int, // width of lines we're constrained to
|
||||
mut space: int, // number of spaces left on line
|
||||
mut left: uint, // index of left side of input stream
|
||||
mut right: uint, // index of right side of input stream
|
||||
mut token: ~[token], // ring-buffr stream goes through
|
||||
mut size: ~[int], // ring-buffer of calculated sizes
|
||||
mut left_total: int, // running size of stream "...left"
|
||||
mut right_total: int, // running size of stream "...right"
|
||||
margin: int, // width of lines we're constrained to
|
||||
space: int, // number of spaces left on line
|
||||
left: uint, // index of left side of input stream
|
||||
right: uint, // index of right side of input stream
|
||||
token: ~[token], // ring-buffr stream goes through
|
||||
size: ~[int], // ring-buffer of calculated sizes
|
||||
left_total: int, // running size of stream "...left"
|
||||
right_total: int, // running size of stream "...right"
|
||||
// pseudo-stack, really a ring too. Holds the
|
||||
// primary-ring-buffers index of the BEGIN that started the
|
||||
// current block, possibly with the most recent BREAK after that
|
||||
// BEGIN (if there is any) on top of it. Stuff is flushed off the
|
||||
// bottom as it becomes irrelevant due to the primary ring-buffer
|
||||
// advancing.
|
||||
mut scan_stack: ~[uint],
|
||||
mut scan_stack_empty: bool, // top==bottom disambiguator
|
||||
mut top: uint, // index of top of scan_stack
|
||||
mut bottom: uint, // index of bottom of scan_stack
|
||||
scan_stack: ~[uint],
|
||||
scan_stack_empty: bool, // top==bottom disambiguator
|
||||
top: uint, // index of top of scan_stack
|
||||
bottom: uint, // index of bottom of scan_stack
|
||||
// stack of blocks-in-progress being flushed by print
|
||||
print_stack: DVec<print_stack_elt>,
|
||||
print_stack: @mut ~[print_stack_elt],
|
||||
// buffered indentation to avoid writing trailing whitespace
|
||||
mut pending_indentation: int,
|
||||
};
|
||||
|
||||
pub enum printer {
|
||||
printer_(@printer_)
|
||||
pending_indentation: int,
|
||||
}
|
||||
|
||||
pub impl printer {
|
||||
fn last_token() -> token { self.token[self.right] }
|
||||
pub impl Printer {
|
||||
fn last_token(&mut self) -> token { self.token[self.right] }
|
||||
// be very careful with this!
|
||||
fn replace_last_token(t: token) { self.token[self.right] = t; }
|
||||
fn pretty_print(t: token) {
|
||||
fn replace_last_token(&mut self, t: token) { self.token[self.right] = t; }
|
||||
fn pretty_print(&mut self, t: token) {
|
||||
debug!("pp ~[%u,%u]", self.left, self.right);
|
||||
match t {
|
||||
EOF => {
|
||||
|
@ -350,7 +337,7 @@ pub impl printer {
|
|||
}
|
||||
}
|
||||
}
|
||||
fn check_stream() {
|
||||
fn check_stream(&mut self) {
|
||||
debug!("check_stream ~[%u, %u] with left_total=%d, right_total=%d",
|
||||
self.left, self.right, self.left_total, self.right_total);
|
||||
if self.right_total - self.left_total > self.space {
|
||||
|
@ -366,7 +353,7 @@ pub impl printer {
|
|||
if self.left != self.right { self.check_stream(); }
|
||||
}
|
||||
}
|
||||
fn scan_push(x: uint) {
|
||||
fn scan_push(&mut self, x: uint) {
|
||||
debug!("scan_push %u", x);
|
||||
if self.scan_stack_empty {
|
||||
self.scan_stack_empty = false;
|
||||
|
@ -377,7 +364,7 @@ pub impl printer {
|
|||
}
|
||||
self.scan_stack[self.top] = x;
|
||||
}
|
||||
fn scan_pop() -> uint {
|
||||
fn scan_pop(&mut self) -> uint {
|
||||
assert (!self.scan_stack_empty);
|
||||
let x = self.scan_stack[self.top];
|
||||
if self.top == self.bottom {
|
||||
|
@ -385,11 +372,11 @@ pub impl printer {
|
|||
} else { self.top += self.buf_len - 1u; self.top %= self.buf_len; }
|
||||
return x;
|
||||
}
|
||||
fn scan_top() -> uint {
|
||||
fn scan_top(&mut self) -> uint {
|
||||
assert (!self.scan_stack_empty);
|
||||
return self.scan_stack[self.top];
|
||||
}
|
||||
fn scan_pop_bottom() -> uint {
|
||||
fn scan_pop_bottom(&mut self) -> uint {
|
||||
assert (!self.scan_stack_empty);
|
||||
let x = self.scan_stack[self.bottom];
|
||||
if self.top == self.bottom {
|
||||
|
@ -397,12 +384,12 @@ pub impl printer {
|
|||
} else { self.bottom += 1u; self.bottom %= self.buf_len; }
|
||||
return x;
|
||||
}
|
||||
fn advance_right() {
|
||||
fn advance_right(&mut self) {
|
||||
self.right += 1u;
|
||||
self.right %= self.buf_len;
|
||||
assert (self.right != self.left);
|
||||
}
|
||||
fn advance_left(++x: token, L: int) {
|
||||
fn advance_left(&mut self, ++x: token, L: int) {
|
||||
debug!("advnce_left ~[%u,%u], sizeof(%u)=%d", self.left, self.right,
|
||||
self.left, L);
|
||||
if L >= 0 {
|
||||
|
@ -420,7 +407,7 @@ pub impl printer {
|
|||
}
|
||||
}
|
||||
}
|
||||
fn check_stack(k: int) {
|
||||
fn check_stack(&mut self, k: int) {
|
||||
if !self.scan_stack_empty {
|
||||
let x = self.scan_top();
|
||||
match copy self.token[x] {
|
||||
|
@ -443,17 +430,17 @@ pub impl printer {
|
|||
}
|
||||
}
|
||||
}
|
||||
fn print_newline(amount: int) {
|
||||
fn print_newline(&mut self, amount: int) {
|
||||
debug!("NEWLINE %d", amount);
|
||||
self.out.write_str(~"\n");
|
||||
(*self.out).write_str(~"\n");
|
||||
self.pending_indentation = 0;
|
||||
self.indent(amount);
|
||||
}
|
||||
fn indent(amount: int) {
|
||||
fn indent(&mut self, amount: int) {
|
||||
debug!("INDENT %d", amount);
|
||||
self.pending_indentation += amount;
|
||||
}
|
||||
fn get_top() -> print_stack_elt {
|
||||
fn get_top(&mut self) -> print_stack_elt {
|
||||
let n = self.print_stack.len();
|
||||
if n != 0u {
|
||||
self.print_stack[n - 1u]
|
||||
|
@ -461,14 +448,14 @@ pub impl printer {
|
|||
{offset: 0, pbreak: broken(inconsistent)}
|
||||
}
|
||||
}
|
||||
fn print_str(s: ~str) {
|
||||
fn print_str(&mut self, s: ~str) {
|
||||
while self.pending_indentation > 0 {
|
||||
self.out.write_str(~" ");
|
||||
(*self.out).write_str(~" ");
|
||||
self.pending_indentation -= 1;
|
||||
}
|
||||
self.out.write_str(s);
|
||||
(*self.out).write_str(s);
|
||||
}
|
||||
fn print(x: token, L: int) {
|
||||
fn print(&mut self, x: token, L: int) {
|
||||
debug!("print %s %d (remaining line space=%d)", tok_str(x), L,
|
||||
self.space);
|
||||
log(debug, buf_str(copy self.token,
|
||||
|
@ -539,39 +526,41 @@ pub impl printer {
|
|||
}
|
||||
|
||||
// Convenience functions to talk to the printer.
|
||||
pub fn box(p: printer, indent: uint, b: breaks) {
|
||||
pub fn box(p: @mut Printer, indent: uint, b: breaks) {
|
||||
p.pretty_print(BEGIN({offset: indent as int, breaks: b}));
|
||||
}
|
||||
|
||||
pub fn ibox(p: printer, indent: uint) { box(p, indent, inconsistent); }
|
||||
pub fn ibox(p: @mut Printer, indent: uint) { box(p, indent, inconsistent); }
|
||||
|
||||
pub fn cbox(p: printer, indent: uint) { box(p, indent, consistent); }
|
||||
pub fn cbox(p: @mut Printer, indent: uint) { box(p, indent, consistent); }
|
||||
|
||||
pub fn break_offset(p: printer, n: uint, off: int) {
|
||||
pub fn break_offset(p: @mut Printer, n: uint, off: int) {
|
||||
p.pretty_print(BREAK({offset: off, blank_space: n as int}));
|
||||
}
|
||||
|
||||
pub fn end(p: printer) { p.pretty_print(END); }
|
||||
pub fn end(p: @mut Printer) { p.pretty_print(END); }
|
||||
|
||||
pub fn eof(p: printer) { p.pretty_print(EOF); }
|
||||
pub fn eof(p: @mut Printer) { p.pretty_print(EOF); }
|
||||
|
||||
pub fn word(p: printer, wrd: ~str) {
|
||||
pub fn word(p: @mut Printer, wrd: ~str) {
|
||||
p.pretty_print(STRING(@wrd, str::len(wrd) as int));
|
||||
}
|
||||
|
||||
pub fn huge_word(p: printer, wrd: ~str) {
|
||||
pub fn huge_word(p: @mut Printer, wrd: ~str) {
|
||||
p.pretty_print(STRING(@wrd, size_infinity));
|
||||
}
|
||||
|
||||
pub fn zero_word(p: printer, wrd: ~str) { p.pretty_print(STRING(@wrd, 0)); }
|
||||
pub fn zero_word(p: @mut Printer, wrd: ~str) {
|
||||
p.pretty_print(STRING(@wrd, 0));
|
||||
}
|
||||
|
||||
pub fn spaces(p: printer, n: uint) { break_offset(p, n, 0); }
|
||||
pub fn spaces(p: @mut Printer, n: uint) { break_offset(p, n, 0); }
|
||||
|
||||
pub fn zerobreak(p: printer) { spaces(p, 0u); }
|
||||
pub fn zerobreak(p: @mut Printer) { spaces(p, 0u); }
|
||||
|
||||
pub fn space(p: printer) { spaces(p, 1u); }
|
||||
pub fn space(p: @mut Printer) { spaces(p, 1u); }
|
||||
|
||||
pub fn hardbreak(p: printer) { spaces(p, size_infinity as uint); }
|
||||
pub fn hardbreak(p: @mut Printer) { spaces(p, size_infinity as uint); }
|
||||
|
||||
pub fn hardbreak_tok_offset(off: int) -> token {
|
||||
return BREAK({offset: off, blank_space: size_infinity});
|
||||
|
|
|
@ -23,7 +23,7 @@ use parse::classify::{stmt_ends_with_semi};
|
|||
use parse::token::ident_interner;
|
||||
use parse::{comments, lexer, token};
|
||||
use parse;
|
||||
use print::pp::{break_offset, word, printer, space, zerobreak, hardbreak};
|
||||
use print::pp::{break_offset, word, Printer, space, zerobreak, hardbreak};
|
||||
use print::pp::{breaks, consistent, inconsistent, eof};
|
||||
use print::pp;
|
||||
use print::pprust;
|
||||
|
@ -37,12 +37,12 @@ use core::str;
|
|||
use core::u64;
|
||||
use core::vec;
|
||||
|
||||
// The ps is stored here to prevent recursive type.
|
||||
// The @ps is stored here to prevent recursive type.
|
||||
pub enum ann_node {
|
||||
node_block(ps, ast::blk),
|
||||
node_item(ps, @ast::item),
|
||||
node_expr(ps, @ast::expr),
|
||||
node_pat(ps, @ast::pat),
|
||||
node_block(@ps, ast::blk),
|
||||
node_item(@ps, @ast::item),
|
||||
node_expr(@ps, @ast::expr),
|
||||
node_pat(@ps, @ast::pat),
|
||||
}
|
||||
pub struct pp_ann {
|
||||
pre: fn@(ann_node),
|
||||
|
@ -54,37 +54,46 @@ pub fn no_ann() -> pp_ann {
|
|||
return pp_ann {pre: ignore, post: ignore};
|
||||
}
|
||||
|
||||
pub type ps =
|
||||
@{s: pp::printer,
|
||||
cm: Option<@CodeMap>,
|
||||
intr: @token::ident_interner,
|
||||
comments: Option<~[comments::cmnt]>,
|
||||
literals: Option<~[comments::lit]>,
|
||||
mut cur_cmnt: uint,
|
||||
mut cur_lit: uint,
|
||||
boxes: DVec<pp::breaks>,
|
||||
ann: pp_ann};
|
||||
pub struct CurrentCommentAndLiteral {
|
||||
cur_cmnt: uint,
|
||||
cur_lit: uint,
|
||||
}
|
||||
|
||||
pub fn ibox(s: ps, u: uint) {
|
||||
pub struct ps {
|
||||
s: @mut pp::Printer,
|
||||
cm: Option<@CodeMap>,
|
||||
intr: @token::ident_interner,
|
||||
comments: Option<~[comments::cmnt]>,
|
||||
literals: Option<~[comments::lit]>,
|
||||
cur_cmnt_and_lit: @mut CurrentCommentAndLiteral,
|
||||
boxes: DVec<pp::breaks>,
|
||||
ann: pp_ann
|
||||
}
|
||||
|
||||
pub fn ibox(s: @ps, u: uint) {
|
||||
s.boxes.push(pp::inconsistent);
|
||||
pp::ibox(s.s, u);
|
||||
}
|
||||
|
||||
pub fn end(s: ps) {
|
||||
pub fn end(s: @ps) {
|
||||
s.boxes.pop();
|
||||
pp::end(s.s);
|
||||
}
|
||||
|
||||
pub fn rust_printer(writer: io::Writer, intr: @ident_interner) -> ps {
|
||||
return @{s: pp::mk_printer(writer, default_columns),
|
||||
cm: None::<@CodeMap>,
|
||||
intr: intr,
|
||||
comments: None::<~[comments::cmnt]>,
|
||||
literals: None::<~[comments::lit]>,
|
||||
mut cur_cmnt: 0u,
|
||||
mut cur_lit: 0u,
|
||||
boxes: DVec(),
|
||||
ann: no_ann()};
|
||||
pub fn rust_printer(writer: io::Writer, intr: @ident_interner) -> @ps {
|
||||
return @ps {
|
||||
s: pp::mk_printer(writer, default_columns),
|
||||
cm: None::<@CodeMap>,
|
||||
intr: intr,
|
||||
comments: None::<~[comments::cmnt]>,
|
||||
literals: None::<~[comments::lit]>,
|
||||
cur_cmnt_and_lit: @mut CurrentCommentAndLiteral {
|
||||
cur_cmnt: 0,
|
||||
cur_lit: 0
|
||||
},
|
||||
boxes: DVec(),
|
||||
ann: no_ann()
|
||||
};
|
||||
}
|
||||
|
||||
pub const indent_unit: uint = 4u;
|
||||
|
@ -101,23 +110,26 @@ pub fn print_crate(cm: @CodeMap, intr: @ident_interner,
|
|||
out: io::Writer, ann: pp_ann, is_expanded: bool) {
|
||||
let r = comments::gather_comments_and_literals(span_diagnostic,
|
||||
filename, in);
|
||||
let s =
|
||||
@{s: pp::mk_printer(out, default_columns),
|
||||
cm: Some(cm),
|
||||
intr: intr,
|
||||
comments: Some(r.cmnts),
|
||||
// If the code is post expansion, don't use the table of
|
||||
// literals, since it doesn't correspond with the literals
|
||||
// in the AST anymore.
|
||||
literals: if is_expanded { None } else { Some(r.lits) },
|
||||
mut cur_cmnt: 0u,
|
||||
mut cur_lit: 0u,
|
||||
boxes: DVec(),
|
||||
ann: ann};
|
||||
let s = @ps {
|
||||
s: pp::mk_printer(out, default_columns),
|
||||
cm: Some(cm),
|
||||
intr: intr,
|
||||
comments: Some(r.cmnts),
|
||||
// If the code is post expansion, don't use the table of
|
||||
// literals, since it doesn't correspond with the literals
|
||||
// in the AST anymore.
|
||||
literals: if is_expanded { None } else { Some(r.lits) },
|
||||
cur_cmnt_and_lit: @mut CurrentCommentAndLiteral {
|
||||
cur_cmnt: 0,
|
||||
cur_lit: 0
|
||||
},
|
||||
boxes: DVec(),
|
||||
ann: ann
|
||||
};
|
||||
print_crate_(s, crate);
|
||||
}
|
||||
|
||||
pub fn print_crate_(s: ps, &&crate: @ast::crate) {
|
||||
pub fn print_crate_(s: @ps, &&crate: @ast::crate) {
|
||||
print_mod(s, crate.node.module, crate.node.attrs);
|
||||
print_remaining_comments(s);
|
||||
eof(s.s);
|
||||
|
@ -194,27 +206,27 @@ pub fn variant_to_str(var: ast::variant, intr: @ident_interner) -> ~str {
|
|||
to_str(var, print_variant, intr)
|
||||
}
|
||||
|
||||
pub fn cbox(s: ps, u: uint) {
|
||||
pub fn cbox(s: @ps, u: uint) {
|
||||
s.boxes.push(pp::consistent);
|
||||
pp::cbox(s.s, u);
|
||||
}
|
||||
|
||||
pub fn box(s: ps, u: uint, b: pp::breaks) {
|
||||
pub fn box(s: @ps, u: uint, b: pp::breaks) {
|
||||
s.boxes.push(b);
|
||||
pp::box(s.s, u, b);
|
||||
}
|
||||
|
||||
pub fn nbsp(s: ps) { word(s.s, ~" "); }
|
||||
pub fn nbsp(s: @ps) { word(s.s, ~" "); }
|
||||
|
||||
pub fn word_nbsp(s: ps, w: ~str) { word(s.s, w); nbsp(s); }
|
||||
pub fn word_nbsp(s: @ps, w: ~str) { word(s.s, w); nbsp(s); }
|
||||
|
||||
pub fn word_space(s: ps, w: ~str) { word(s.s, w); space(s.s); }
|
||||
pub fn word_space(s: @ps, w: ~str) { word(s.s, w); space(s.s); }
|
||||
|
||||
pub fn popen(s: ps) { word(s.s, ~"("); }
|
||||
pub fn popen(s: @ps) { word(s.s, ~"("); }
|
||||
|
||||
pub fn pclose(s: ps) { word(s.s, ~")"); }
|
||||
pub fn pclose(s: @ps) { word(s.s, ~")"); }
|
||||
|
||||
pub fn head(s: ps, w: ~str) {
|
||||
pub fn head(s: @ps, w: ~str) {
|
||||
// outer-box is consistent
|
||||
cbox(s, indent_unit);
|
||||
// head-box is inconsistent
|
||||
|
@ -225,15 +237,15 @@ pub fn head(s: ps, w: ~str) {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn bopen(s: ps) {
|
||||
pub fn bopen(s: @ps) {
|
||||
word(s.s, ~"{");
|
||||
end(s); // close the head-box
|
||||
}
|
||||
|
||||
pub fn bclose_(s: ps, span: codemap::span, indented: uint) {
|
||||
pub fn bclose_(s: @ps, span: codemap::span, indented: uint) {
|
||||
bclose_maybe_open(s, span, indented, true);
|
||||
}
|
||||
pub fn bclose_maybe_open (s: ps, span: codemap::span, indented: uint,
|
||||
pub fn bclose_maybe_open (s: @ps, span: codemap::span, indented: uint,
|
||||
close_box: bool) {
|
||||
maybe_print_comment(s, span.hi);
|
||||
break_offset_if_not_bol(s, 1u, -(indented as int));
|
||||
|
@ -242,29 +254,29 @@ pub fn bclose_maybe_open (s: ps, span: codemap::span, indented: uint,
|
|||
end(s); // close the outer-box
|
||||
}
|
||||
}
|
||||
pub fn bclose(s: ps, span: codemap::span) { bclose_(s, span, indent_unit); }
|
||||
pub fn bclose(s: @ps, span: codemap::span) { bclose_(s, span, indent_unit); }
|
||||
|
||||
pub fn is_begin(s: ps) -> bool {
|
||||
pub fn is_begin(s: @ps) -> bool {
|
||||
match s.s.last_token() { pp::BEGIN(_) => true, _ => false }
|
||||
}
|
||||
|
||||
pub fn is_end(s: ps) -> bool {
|
||||
pub fn is_end(s: @ps) -> bool {
|
||||
match s.s.last_token() { pp::END => true, _ => false }
|
||||
}
|
||||
|
||||
pub fn is_bol(s: ps) -> bool {
|
||||
pub fn is_bol(s: @ps) -> bool {
|
||||
return s.s.last_token().is_eof() || s.s.last_token().is_hardbreak_tok();
|
||||
}
|
||||
|
||||
pub fn in_cbox(s: ps) -> bool {
|
||||
pub fn in_cbox(s: @ps) -> bool {
|
||||
let len = s.boxes.len();
|
||||
if len == 0u { return false; }
|
||||
return s.boxes[len - 1u] == pp::consistent;
|
||||
}
|
||||
|
||||
pub fn hardbreak_if_not_bol(s: ps) { if !is_bol(s) { hardbreak(s.s); } }
|
||||
pub fn space_if_not_bol(s: ps) { if !is_bol(s) { space(s.s); } }
|
||||
pub fn break_offset_if_not_bol(s: ps, n: uint, off: int) {
|
||||
pub fn hardbreak_if_not_bol(s: @ps) { if !is_bol(s) { hardbreak(s.s); } }
|
||||
pub fn space_if_not_bol(s: @ps) { if !is_bol(s) { space(s.s); } }
|
||||
pub fn break_offset_if_not_bol(s: @ps, n: uint, off: int) {
|
||||
if !is_bol(s) {
|
||||
break_offset(s.s, n, off);
|
||||
} else {
|
||||
|
@ -279,7 +291,7 @@ pub fn break_offset_if_not_bol(s: ps, n: uint, off: int) {
|
|||
|
||||
// Synthesizes a comment that was not textually present in the original source
|
||||
// file.
|
||||
pub fn synth_comment(s: ps, text: ~str) {
|
||||
pub fn synth_comment(s: @ps, text: ~str) {
|
||||
word(s.s, ~"/*");
|
||||
space(s.s);
|
||||
word(s.s, text);
|
||||
|
@ -287,7 +299,7 @@ pub fn synth_comment(s: ps, text: ~str) {
|
|||
word(s.s, ~"*/");
|
||||
}
|
||||
|
||||
pub fn commasep<IN>(s: ps, b: breaks, elts: ~[IN], op: fn(ps, IN)) {
|
||||
pub fn commasep<IN>(s: @ps, b: breaks, elts: ~[IN], op: fn(@ps, IN)) {
|
||||
box(s, 0u, b);
|
||||
let mut first = true;
|
||||
for elts.each |elt| {
|
||||
|
@ -298,7 +310,7 @@ pub fn commasep<IN>(s: ps, b: breaks, elts: ~[IN], op: fn(ps, IN)) {
|
|||
}
|
||||
|
||||
|
||||
pub fn commasep_cmnt<IN>(s: ps, b: breaks, elts: ~[IN], op: fn(ps, IN),
|
||||
pub fn commasep_cmnt<IN>(s: @ps, b: breaks, elts: ~[IN], op: fn(@ps, IN),
|
||||
get_span: fn(IN) -> codemap::span) {
|
||||
box(s, 0u, b);
|
||||
let len = vec::len::<IN>(elts);
|
||||
|
@ -317,12 +329,12 @@ pub fn commasep_cmnt<IN>(s: ps, b: breaks, elts: ~[IN], op: fn(ps, IN),
|
|||
end(s);
|
||||
}
|
||||
|
||||
pub fn commasep_exprs(s: ps, b: breaks, exprs: ~[@ast::expr]) {
|
||||
pub fn commasep_exprs(s: @ps, b: breaks, exprs: ~[@ast::expr]) {
|
||||
fn expr_span(&&expr: @ast::expr) -> codemap::span { return expr.span; }
|
||||
commasep_cmnt(s, b, exprs, print_expr, expr_span);
|
||||
}
|
||||
|
||||
pub fn print_mod(s: ps, _mod: ast::_mod, attrs: ~[ast::attribute]) {
|
||||
pub fn print_mod(s: @ps, _mod: ast::_mod, attrs: ~[ast::attribute]) {
|
||||
print_inner_attributes(s, attrs);
|
||||
for _mod.view_items.each |vitem| {
|
||||
print_view_item(s, *vitem);
|
||||
|
@ -330,7 +342,7 @@ pub fn print_mod(s: ps, _mod: ast::_mod, attrs: ~[ast::attribute]) {
|
|||
for _mod.items.each |item| { print_item(s, *item); }
|
||||
}
|
||||
|
||||
pub fn print_foreign_mod(s: ps, nmod: ast::foreign_mod,
|
||||
pub fn print_foreign_mod(s: @ps, nmod: ast::foreign_mod,
|
||||
attrs: ~[ast::attribute]) {
|
||||
print_inner_attributes(s, attrs);
|
||||
for nmod.view_items.each |vitem| {
|
||||
|
@ -339,7 +351,7 @@ pub fn print_foreign_mod(s: ps, nmod: ast::foreign_mod,
|
|||
for nmod.items.each |item| { print_foreign_item(s, *item); }
|
||||
}
|
||||
|
||||
pub fn print_region(s: ps, prefix: ~str, region: @ast::region, sep: ~str) {
|
||||
pub fn print_region(s: @ps, prefix: ~str, region: @ast::region, sep: ~str) {
|
||||
word(s.s, prefix);
|
||||
match region.node {
|
||||
ast::re_anon => {
|
||||
|
@ -358,11 +370,11 @@ pub fn print_region(s: ps, prefix: ~str, region: @ast::region, sep: ~str) {
|
|||
word(s.s, sep);
|
||||
}
|
||||
|
||||
pub fn print_type(s: ps, &&ty: @ast::Ty) {
|
||||
pub fn print_type(s: @ps, &&ty: @ast::Ty) {
|
||||
print_type_ex(s, ty, false);
|
||||
}
|
||||
|
||||
pub fn print_type_ex(s: ps, &&ty: @ast::Ty, print_colons: bool) {
|
||||
pub fn print_type_ex(s: @ps, &&ty: @ast::Ty, print_colons: bool) {
|
||||
maybe_print_comment(s, ty.span.lo);
|
||||
ibox(s, 0u);
|
||||
match ty.node {
|
||||
|
@ -387,7 +399,7 @@ pub fn print_type_ex(s: ps, &&ty: @ast::Ty, print_colons: bool) {
|
|||
}
|
||||
ast::ty_rec(ref fields) => {
|
||||
word(s.s, ~"{");
|
||||
fn print_field(s: ps, f: ast::ty_field) {
|
||||
fn print_field(s: @ps, f: ast::ty_field) {
|
||||
cbox(s, indent_unit);
|
||||
print_mutability(s, f.node.mt.mutbl);
|
||||
print_ident(s, f.node.ident);
|
||||
|
@ -438,7 +450,7 @@ pub fn print_type_ex(s: ps, &&ty: @ast::Ty, print_colons: bool) {
|
|||
end(s);
|
||||
}
|
||||
|
||||
pub fn print_foreign_item(s: ps, item: @ast::foreign_item) {
|
||||
pub fn print_foreign_item(s: @ps, item: @ast::foreign_item) {
|
||||
hardbreak_if_not_bol(s);
|
||||
maybe_print_comment(s, item.span.lo);
|
||||
print_outer_attributes(s, item.attrs);
|
||||
|
@ -462,7 +474,7 @@ pub fn print_foreign_item(s: ps, item: @ast::foreign_item) {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn print_item(s: ps, &&item: @ast::item) {
|
||||
pub fn print_item(s: @ps, &&item: @ast::item) {
|
||||
hardbreak_if_not_bol(s);
|
||||
maybe_print_comment(s, item.span.lo);
|
||||
print_outer_attributes(s, item.attrs);
|
||||
|
@ -597,7 +609,7 @@ pub fn print_item(s: ps, &&item: @ast::item) {
|
|||
(s.ann.post)(ann_node);
|
||||
}
|
||||
|
||||
pub fn print_enum_def(s: ps, enum_definition: ast::enum_def,
|
||||
pub fn print_enum_def(s: @ps, enum_definition: ast::enum_def,
|
||||
params: ~[ast::ty_param], ident: ast::ident,
|
||||
span: codemap::span, visibility: ast::visibility) {
|
||||
let mut newtype =
|
||||
|
@ -632,7 +644,7 @@ pub fn print_enum_def(s: ps, enum_definition: ast::enum_def,
|
|||
}
|
||||
}
|
||||
|
||||
pub fn print_variants(s: ps,
|
||||
pub fn print_variants(s: @ps,
|
||||
variants: ~[ast::variant],
|
||||
span: codemap::span) {
|
||||
bopen(s);
|
||||
|
@ -665,7 +677,7 @@ pub fn visibility_qualified(vis: ast::visibility, s: ~str) -> ~str {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn print_visibility(s: ps, vis: ast::visibility) {
|
||||
pub fn print_visibility(s: @ps, vis: ast::visibility) {
|
||||
match vis {
|
||||
ast::private | ast::public =>
|
||||
word_nbsp(s, visibility_to_str(vis)),
|
||||
|
@ -673,7 +685,7 @@ pub fn print_visibility(s: ps, vis: ast::visibility) {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn print_struct(s: ps,
|
||||
pub fn print_struct(s: @ps,
|
||||
struct_def: @ast::struct_def,
|
||||
tps: ~[ast::ty_param],
|
||||
ident: ast::ident,
|
||||
|
@ -742,7 +754,7 @@ pub fn print_struct(s: ps,
|
|||
/// appropriate macro, transcribe back into the grammar we just parsed from,
|
||||
/// and then pretty-print the resulting AST nodes (so, e.g., we print
|
||||
/// expression arguments as expressions). It can be done! I think.
|
||||
pub fn print_tt(s: ps, tt: ast::token_tree) {
|
||||
pub fn print_tt(s: @ps, tt: ast::token_tree) {
|
||||
match tt {
|
||||
ast::tt_delim(ref tts) => print_tts(s, *tts),
|
||||
ast::tt_tok(_, ref tk) => {
|
||||
|
@ -765,7 +777,7 @@ pub fn print_tt(s: ps, tt: ast::token_tree) {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn print_tts(s: ps, &&tts: &[ast::token_tree]) {
|
||||
pub fn print_tts(s: @ps, &&tts: &[ast::token_tree]) {
|
||||
ibox(s, 0);
|
||||
for tts.eachi |i, tt| {
|
||||
if i != 0 {
|
||||
|
@ -776,14 +788,14 @@ pub fn print_tts(s: ps, &&tts: &[ast::token_tree]) {
|
|||
end(s);
|
||||
}
|
||||
|
||||
pub fn print_variant(s: ps, v: ast::variant) {
|
||||
pub fn print_variant(s: @ps, v: ast::variant) {
|
||||
print_visibility(s, v.node.vis);
|
||||
match v.node.kind {
|
||||
ast::tuple_variant_kind(args) => {
|
||||
print_ident(s, v.node.name);
|
||||
if !args.is_empty() {
|
||||
popen(s);
|
||||
fn print_variant_arg(s: ps, arg: ast::variant_arg) {
|
||||
fn print_variant_arg(s: @ps, arg: ast::variant_arg) {
|
||||
print_type(s, arg.ty);
|
||||
}
|
||||
commasep(s, consistent, args, print_variant_arg);
|
||||
|
@ -808,7 +820,7 @@ pub fn print_variant(s: ps, v: ast::variant) {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn print_ty_method(s: ps, m: ast::ty_method) {
|
||||
pub fn print_ty_method(s: @ps, m: ast::ty_method) {
|
||||
hardbreak_if_not_bol(s);
|
||||
maybe_print_comment(s, m.span.lo);
|
||||
print_outer_attributes(s, m.attrs);
|
||||
|
@ -818,14 +830,14 @@ pub fn print_ty_method(s: ps, m: ast::ty_method) {
|
|||
word(s.s, ~";");
|
||||
}
|
||||
|
||||
pub fn print_trait_method(s: ps, m: ast::trait_method) {
|
||||
pub fn print_trait_method(s: @ps, m: ast::trait_method) {
|
||||
match m {
|
||||
required(ref ty_m) => print_ty_method(s, (*ty_m)),
|
||||
provided(m) => print_method(s, m)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn print_method(s: ps, meth: @ast::method) {
|
||||
pub fn print_method(s: @ps, meth: @ast::method) {
|
||||
hardbreak_if_not_bol(s);
|
||||
maybe_print_comment(s, meth.span.lo);
|
||||
print_outer_attributes(s, meth.attrs);
|
||||
|
@ -836,7 +848,7 @@ pub fn print_method(s: ps, meth: @ast::method) {
|
|||
print_block_with_attrs(s, meth.body, meth.attrs);
|
||||
}
|
||||
|
||||
pub fn print_outer_attributes(s: ps, attrs: ~[ast::attribute]) {
|
||||
pub fn print_outer_attributes(s: @ps, attrs: ~[ast::attribute]) {
|
||||
let mut count = 0;
|
||||
for attrs.each |attr| {
|
||||
match attr.node.style {
|
||||
|
@ -847,7 +859,7 @@ pub fn print_outer_attributes(s: ps, attrs: ~[ast::attribute]) {
|
|||
if count > 0 { hardbreak_if_not_bol(s); }
|
||||
}
|
||||
|
||||
pub fn print_inner_attributes(s: ps, attrs: ~[ast::attribute]) {
|
||||
pub fn print_inner_attributes(s: @ps, attrs: ~[ast::attribute]) {
|
||||
let mut count = 0;
|
||||
for attrs.each |attr| {
|
||||
match attr.node.style {
|
||||
|
@ -864,7 +876,7 @@ pub fn print_inner_attributes(s: ps, attrs: ~[ast::attribute]) {
|
|||
if count > 0 { hardbreak_if_not_bol(s); }
|
||||
}
|
||||
|
||||
pub fn print_attribute(s: ps, attr: ast::attribute) {
|
||||
pub fn print_attribute(s: @ps, attr: ast::attribute) {
|
||||
hardbreak_if_not_bol(s);
|
||||
maybe_print_comment(s, attr.span.lo);
|
||||
if attr.node.is_sugared_doc {
|
||||
|
@ -879,7 +891,7 @@ pub fn print_attribute(s: ps, attr: ast::attribute) {
|
|||
}
|
||||
|
||||
|
||||
pub fn print_stmt(s: ps, st: ast::stmt) {
|
||||
pub fn print_stmt(s: @ps, st: ast::stmt) {
|
||||
maybe_print_comment(s, st.span.lo);
|
||||
match st.node {
|
||||
ast::stmt_decl(decl, _) => {
|
||||
|
@ -904,21 +916,21 @@ pub fn print_stmt(s: ps, st: ast::stmt) {
|
|||
maybe_print_trailing_comment(s, st.span, None);
|
||||
}
|
||||
|
||||
pub fn print_block(s: ps, blk: ast::blk) {
|
||||
pub fn print_block(s: @ps, blk: ast::blk) {
|
||||
print_possibly_embedded_block(s, blk, block_normal, indent_unit);
|
||||
}
|
||||
|
||||
pub fn print_block_unclosed(s: ps, blk: ast::blk) {
|
||||
pub fn print_block_unclosed(s: @ps, blk: ast::blk) {
|
||||
print_possibly_embedded_block_(s, blk, block_normal, indent_unit, ~[],
|
||||
false);
|
||||
}
|
||||
|
||||
pub fn print_block_unclosed_indent(s: ps, blk: ast::blk, indented: uint) {
|
||||
pub fn print_block_unclosed_indent(s: @ps, blk: ast::blk, indented: uint) {
|
||||
print_possibly_embedded_block_(s, blk, block_normal, indented, ~[],
|
||||
false);
|
||||
}
|
||||
|
||||
pub fn print_block_with_attrs(s: ps,
|
||||
pub fn print_block_with_attrs(s: @ps,
|
||||
blk: ast::blk,
|
||||
attrs: ~[ast::attribute]) {
|
||||
print_possibly_embedded_block_(s, blk, block_normal, indent_unit, attrs,
|
||||
|
@ -927,7 +939,7 @@ pub fn print_block_with_attrs(s: ps,
|
|||
|
||||
pub enum embed_type { block_block_fn, block_normal, }
|
||||
|
||||
pub fn print_possibly_embedded_block(s: ps,
|
||||
pub fn print_possibly_embedded_block(s: @ps,
|
||||
blk: ast::blk,
|
||||
embedded: embed_type,
|
||||
indented: uint) {
|
||||
|
@ -935,7 +947,7 @@ pub fn print_possibly_embedded_block(s: ps,
|
|||
s, blk, embedded, indented, ~[], true);
|
||||
}
|
||||
|
||||
pub fn print_possibly_embedded_block_(s: ps,
|
||||
pub fn print_possibly_embedded_block_(s: @ps,
|
||||
blk: ast::blk,
|
||||
embedded: embed_type,
|
||||
indented: uint,
|
||||
|
@ -971,14 +983,14 @@ pub fn print_possibly_embedded_block_(s: ps,
|
|||
(s.ann.post)(ann_node);
|
||||
}
|
||||
|
||||
pub fn print_if(s: ps, test: @ast::expr, blk: ast::blk,
|
||||
pub fn print_if(s: @ps, test: @ast::expr, blk: ast::blk,
|
||||
elseopt: Option<@ast::expr>, chk: bool) {
|
||||
head(s, ~"if");
|
||||
if chk { word_nbsp(s, ~"check"); }
|
||||
print_expr(s, test);
|
||||
space(s.s);
|
||||
print_block(s, blk);
|
||||
fn do_else(s: ps, els: Option<@ast::expr>) {
|
||||
fn do_else(s: @ps, els: Option<@ast::expr>) {
|
||||
match els {
|
||||
Some(_else) => {
|
||||
match _else.node {
|
||||
|
@ -1011,7 +1023,7 @@ pub fn print_if(s: ps, test: @ast::expr, blk: ast::blk,
|
|||
do_else(s, elseopt);
|
||||
}
|
||||
|
||||
pub fn print_mac(s: ps, m: ast::mac) {
|
||||
pub fn print_mac(s: @ps, m: ast::mac) {
|
||||
match m.node {
|
||||
ast::mac_invoc_tt(pth, ref tts) => {
|
||||
print_path(s, pth, false);
|
||||
|
@ -1023,7 +1035,7 @@ pub fn print_mac(s: ps, m: ast::mac) {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn print_vstore(s: ps, t: ast::vstore) {
|
||||
pub fn print_vstore(s: @ps, t: ast::vstore) {
|
||||
match t {
|
||||
ast::vstore_fixed(Some(i)) => word(s.s, fmt!("%u", i)),
|
||||
ast::vstore_fixed(None) => word(s.s, ~"_"),
|
||||
|
@ -1033,7 +1045,7 @@ pub fn print_vstore(s: ps, t: ast::vstore) {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn print_expr_vstore(s: ps, t: ast::expr_vstore) {
|
||||
pub fn print_expr_vstore(s: @ps, t: ast::expr_vstore) {
|
||||
match t {
|
||||
ast::expr_vstore_fixed(Some(i)) => word(s.s, fmt!("%u", i)),
|
||||
ast::expr_vstore_fixed(None) => word(s.s, ~"_"),
|
||||
|
@ -1051,7 +1063,7 @@ pub fn print_expr_vstore(s: ps, t: ast::expr_vstore) {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn print_call_pre(s: ps,
|
||||
pub fn print_call_pre(s: @ps,
|
||||
sugar: ast::CallSugar,
|
||||
base_args: &mut ~[@ast::expr])
|
||||
-> Option<@ast::expr> {
|
||||
|
@ -1068,7 +1080,7 @@ pub fn print_call_pre(s: ps,
|
|||
}
|
||||
}
|
||||
|
||||
pub fn print_call_post(s: ps,
|
||||
pub fn print_call_post(s: @ps,
|
||||
sugar: ast::CallSugar,
|
||||
blk: &Option<@ast::expr>,
|
||||
base_args: &mut ~[@ast::expr]) {
|
||||
|
@ -1095,8 +1107,8 @@ pub fn print_call_post(s: ps,
|
|||
}
|
||||
}
|
||||
|
||||
pub fn print_expr(s: ps, &&expr: @ast::expr) {
|
||||
fn print_field(s: ps, field: ast::field) {
|
||||
pub fn print_expr(s: @ps, &&expr: @ast::expr) {
|
||||
fn print_field(s: @ps, field: ast::field) {
|
||||
ibox(s, indent_unit);
|
||||
if field.node.mutbl == ast::m_mutbl { word_nbsp(s, ~"mut"); }
|
||||
print_ident(s, field.node.ident);
|
||||
|
@ -1446,7 +1458,7 @@ pub fn print_expr(s: ps, &&expr: @ast::expr) {
|
|||
end(s);
|
||||
}
|
||||
|
||||
pub fn print_local_decl(s: ps, loc: @ast::local) {
|
||||
pub fn print_local_decl(s: @ps, loc: @ast::local) {
|
||||
print_irrefutable_pat(s, loc.node.pat);
|
||||
match loc.node.ty.node {
|
||||
ast::ty_infer => (),
|
||||
|
@ -1454,7 +1466,7 @@ pub fn print_local_decl(s: ps, loc: @ast::local) {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn print_decl(s: ps, decl: @ast::decl) {
|
||||
pub fn print_decl(s: @ps, decl: @ast::decl) {
|
||||
maybe_print_comment(s, decl.span.lo);
|
||||
match decl.node {
|
||||
ast::decl_local(locs) => {
|
||||
|
@ -1468,7 +1480,7 @@ pub fn print_decl(s: ps, decl: @ast::decl) {
|
|||
word_nbsp(s, ~"mut");
|
||||
}
|
||||
|
||||
fn print_local(s: ps, &&loc: @ast::local) {
|
||||
fn print_local(s: @ps, &&loc: @ast::local) {
|
||||
ibox(s, indent_unit);
|
||||
print_local_decl(s, loc);
|
||||
end(s);
|
||||
|
@ -1488,18 +1500,18 @@ pub fn print_decl(s: ps, decl: @ast::decl) {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn print_ident(s: ps, ident: ast::ident) {
|
||||
pub fn print_ident(s: @ps, ident: ast::ident) {
|
||||
word(s.s, *s.intr.get(ident));
|
||||
}
|
||||
|
||||
pub fn print_for_decl(s: ps, loc: @ast::local, coll: @ast::expr) {
|
||||
pub fn print_for_decl(s: @ps, loc: @ast::local, coll: @ast::expr) {
|
||||
print_local_decl(s, loc);
|
||||
space(s.s);
|
||||
word_space(s, ~"in");
|
||||
print_expr(s, coll);
|
||||
}
|
||||
|
||||
pub fn print_path(s: ps, &&path: @ast::path, colons_before_params: bool) {
|
||||
pub fn print_path(s: @ps, &&path: @ast::path, colons_before_params: bool) {
|
||||
maybe_print_comment(s, path.span.lo);
|
||||
if path.global { word(s.s, ~"::"); }
|
||||
let mut first = true;
|
||||
|
@ -1526,15 +1538,15 @@ pub fn print_path(s: ps, &&path: @ast::path, colons_before_params: bool) {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn print_irrefutable_pat(s: ps, &&pat: @ast::pat) {
|
||||
pub fn print_irrefutable_pat(s: @ps, &&pat: @ast::pat) {
|
||||
print_pat(s, pat, false)
|
||||
}
|
||||
|
||||
pub fn print_refutable_pat(s: ps, &&pat: @ast::pat) {
|
||||
pub fn print_refutable_pat(s: @ps, &&pat: @ast::pat) {
|
||||
print_pat(s, pat, true)
|
||||
}
|
||||
|
||||
pub fn print_pat(s: ps, &&pat: @ast::pat, refutable: bool) {
|
||||
pub fn print_pat(s: @ps, &&pat: @ast::pat, refutable: bool) {
|
||||
maybe_print_comment(s, pat.span.lo);
|
||||
let ann_node = node_pat(s, pat);
|
||||
(s.ann.pre)(ann_node);
|
||||
|
@ -1580,7 +1592,7 @@ pub fn print_pat(s: ps, &&pat: @ast::pat, refutable: bool) {
|
|||
}
|
||||
ast::pat_rec(fields, etc) => {
|
||||
word(s.s, ~"{");
|
||||
fn print_field(s: ps, f: ast::field_pat, refutable: bool) {
|
||||
fn print_field(s: @ps, f: ast::field_pat, refutable: bool) {
|
||||
cbox(s, indent_unit);
|
||||
print_ident(s, f.ident);
|
||||
word_space(s, ~":");
|
||||
|
@ -1600,7 +1612,7 @@ pub fn print_pat(s: ps, &&pat: @ast::pat, refutable: bool) {
|
|||
ast::pat_struct(path, fields, etc) => {
|
||||
print_path(s, path, true);
|
||||
word(s.s, ~"{");
|
||||
fn print_field(s: ps, f: ast::field_pat, refutable: bool) {
|
||||
fn print_field(s: @ps, f: ast::field_pat, refutable: bool) {
|
||||
cbox(s, indent_unit);
|
||||
print_ident(s, f.ident);
|
||||
word_space(s, ~":");
|
||||
|
@ -1656,7 +1668,7 @@ pub fn print_pat(s: ps, &&pat: @ast::pat, refutable: bool) {
|
|||
}
|
||||
|
||||
// Returns whether it printed anything
|
||||
pub fn print_self_ty(s: ps, self_ty: ast::self_ty_) -> bool {
|
||||
pub fn print_self_ty(s: @ps, self_ty: ast::self_ty_) -> bool {
|
||||
match self_ty {
|
||||
ast::sty_static | ast::sty_by_ref => { return false; }
|
||||
ast::sty_value => { word(s.s, ~"self"); }
|
||||
|
@ -1673,7 +1685,7 @@ pub fn print_self_ty(s: ps, self_ty: ast::self_ty_) -> bool {
|
|||
return true;
|
||||
}
|
||||
|
||||
pub fn print_fn(s: ps,
|
||||
pub fn print_fn(s: @ps,
|
||||
decl: ast::fn_decl,
|
||||
purity: Option<ast::purity>,
|
||||
name: ast::ident,
|
||||
|
@ -1688,7 +1700,7 @@ pub fn print_fn(s: ps,
|
|||
print_fn_args_and_ret(s, decl, opt_self_ty);
|
||||
}
|
||||
|
||||
pub fn print_fn_args(s: ps, decl: ast::fn_decl,
|
||||
pub fn print_fn_args(s: @ps, decl: ast::fn_decl,
|
||||
opt_self_ty: Option<ast::self_ty_>) {
|
||||
// It is unfortunate to duplicate the commasep logic, but we we want the
|
||||
// self type and the args all in the same box.
|
||||
|
@ -1706,7 +1718,7 @@ pub fn print_fn_args(s: ps, decl: ast::fn_decl,
|
|||
end(s);
|
||||
}
|
||||
|
||||
pub fn print_fn_args_and_ret(s: ps, decl: ast::fn_decl,
|
||||
pub fn print_fn_args_and_ret(s: @ps, decl: ast::fn_decl,
|
||||
opt_self_ty: Option<ast::self_ty_>) {
|
||||
popen(s);
|
||||
print_fn_args(s, decl, opt_self_ty);
|
||||
|
@ -1723,7 +1735,7 @@ pub fn print_fn_args_and_ret(s: ps, decl: ast::fn_decl,
|
|||
}
|
||||
}
|
||||
|
||||
pub fn print_fn_block_args(s: ps, decl: ast::fn_decl) {
|
||||
pub fn print_fn_block_args(s: @ps, decl: ast::fn_decl) {
|
||||
word(s.s, ~"|");
|
||||
print_fn_args(s, decl, None);
|
||||
word(s.s, ~"|");
|
||||
|
@ -1749,12 +1761,12 @@ pub fn mode_to_str(m: ast::mode) -> ~str {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn print_arg_mode(s: ps, m: ast::mode) {
|
||||
pub fn print_arg_mode(s: @ps, m: ast::mode) {
|
||||
let ms = mode_to_str(m);
|
||||
if ms != ~"" { word(s.s, ms); }
|
||||
}
|
||||
|
||||
pub fn print_bounds(s: ps, bounds: @~[ast::ty_param_bound]) {
|
||||
pub fn print_bounds(s: @ps, bounds: @~[ast::ty_param_bound]) {
|
||||
if !bounds.is_empty() {
|
||||
word(s.s, ~":");
|
||||
let mut first = true;
|
||||
|
@ -1774,10 +1786,10 @@ pub fn print_bounds(s: ps, bounds: @~[ast::ty_param_bound]) {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn print_type_params(s: ps, &¶ms: ~[ast::ty_param]) {
|
||||
pub fn print_type_params(s: @ps, &¶ms: ~[ast::ty_param]) {
|
||||
if vec::len(params) > 0u {
|
||||
word(s.s, ~"<");
|
||||
fn printParam(s: ps, param: ast::ty_param) {
|
||||
fn printParam(s: @ps, param: ast::ty_param) {
|
||||
print_ident(s, param.ident);
|
||||
print_bounds(s, param.bounds);
|
||||
}
|
||||
|
@ -1786,7 +1798,7 @@ pub fn print_type_params(s: ps, &¶ms: ~[ast::ty_param]) {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn print_meta_item(s: ps, &&item: @ast::meta_item) {
|
||||
pub fn print_meta_item(s: @ps, &&item: @ast::meta_item) {
|
||||
ibox(s, indent_unit);
|
||||
match item.node {
|
||||
ast::meta_word(ref name) => word(s.s, (*name)),
|
||||
|
@ -1805,7 +1817,7 @@ pub fn print_meta_item(s: ps, &&item: @ast::meta_item) {
|
|||
end(s);
|
||||
}
|
||||
|
||||
pub fn print_view_path(s: ps, &&vp: @ast::view_path) {
|
||||
pub fn print_view_path(s: @ps, &&vp: @ast::view_path) {
|
||||
match vp.node {
|
||||
ast::view_path_simple(ident, path, namespace, _) => {
|
||||
if namespace == ast::module_ns {
|
||||
|
@ -1835,11 +1847,11 @@ pub fn print_view_path(s: ps, &&vp: @ast::view_path) {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn print_view_paths(s: ps, vps: ~[@ast::view_path]) {
|
||||
pub fn print_view_paths(s: @ps, vps: ~[@ast::view_path]) {
|
||||
commasep(s, inconsistent, vps, print_view_path);
|
||||
}
|
||||
|
||||
pub fn print_view_item(s: ps, item: @ast::view_item) {
|
||||
pub fn print_view_item(s: @ps, item: @ast::view_item) {
|
||||
hardbreak_if_not_bol(s);
|
||||
maybe_print_comment(s, item.span.lo);
|
||||
print_outer_attributes(s, item.attrs);
|
||||
|
@ -1865,7 +1877,7 @@ pub fn print_view_item(s: ps, item: @ast::view_item) {
|
|||
end(s); // end outer head-block
|
||||
}
|
||||
|
||||
pub fn print_mutability(s: ps, mutbl: ast::mutability) {
|
||||
pub fn print_mutability(s: @ps, mutbl: ast::mutability) {
|
||||
match mutbl {
|
||||
ast::m_mutbl => word_nbsp(s, ~"mut"),
|
||||
ast::m_const => word_nbsp(s, ~"const"),
|
||||
|
@ -1873,12 +1885,12 @@ pub fn print_mutability(s: ps, mutbl: ast::mutability) {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn print_mt(s: ps, mt: ast::mt) {
|
||||
pub fn print_mt(s: @ps, mt: ast::mt) {
|
||||
print_mutability(s, mt.mutbl);
|
||||
print_type(s, mt.ty);
|
||||
}
|
||||
|
||||
pub fn print_arg(s: ps, input: ast::arg) {
|
||||
pub fn print_arg(s: @ps, input: ast::arg) {
|
||||
ibox(s, indent_unit);
|
||||
print_arg_mode(s, input.mode);
|
||||
if input.is_mutbl {
|
||||
|
@ -1905,7 +1917,7 @@ pub fn print_arg(s: ps, input: ast::arg) {
|
|||
end(s);
|
||||
}
|
||||
|
||||
pub fn print_ty_fn(s: ps,
|
||||
pub fn print_ty_fn(s: @ps,
|
||||
opt_abi: Option<ast::Abi>,
|
||||
opt_sigil: Option<ast::Sigil>,
|
||||
opt_region: Option<@ast::region>,
|
||||
|
@ -1961,7 +1973,7 @@ pub fn print_ty_fn(s: ps,
|
|||
end(s);
|
||||
}
|
||||
|
||||
pub fn maybe_print_trailing_comment(s: ps, span: codemap::span,
|
||||
pub fn maybe_print_trailing_comment(s: @ps, span: codemap::span,
|
||||
next_pos: Option<BytePos>) {
|
||||
let mut cm;
|
||||
match s.cm { Some(ccm) => cm = ccm, _ => return }
|
||||
|
@ -1975,26 +1987,29 @@ pub fn maybe_print_trailing_comment(s: ps, span: codemap::span,
|
|||
if span.hi < (*cmnt).pos && (*cmnt).pos < next &&
|
||||
span_line.line == comment_line.line {
|
||||
print_comment(s, (*cmnt));
|
||||
s.cur_cmnt += 1u;
|
||||
s.cur_cmnt_and_lit.cur_cmnt += 1u;
|
||||
}
|
||||
}
|
||||
_ => ()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn print_remaining_comments(s: ps) {
|
||||
pub fn print_remaining_comments(s: @ps) {
|
||||
// If there aren't any remaining comments, then we need to manually
|
||||
// make sure there is a line break at the end.
|
||||
if next_comment(s).is_none() { hardbreak(s.s); }
|
||||
loop {
|
||||
match next_comment(s) {
|
||||
Some(ref cmnt) => { print_comment(s, (*cmnt)); s.cur_cmnt += 1u; }
|
||||
Some(ref cmnt) => {
|
||||
print_comment(s, (*cmnt));
|
||||
s.cur_cmnt_and_lit.cur_cmnt += 1u;
|
||||
}
|
||||
_ => break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn print_literal(s: ps, &&lit: @ast::lit) {
|
||||
pub fn print_literal(s: @ps, &&lit: @ast::lit) {
|
||||
maybe_print_comment(s, lit.span.lo);
|
||||
match next_lit(s, lit.span.lo) {
|
||||
Some(ref ltrl) => {
|
||||
|
@ -2046,13 +2061,13 @@ pub fn lit_to_str(l: @ast::lit) -> ~str {
|
|||
return to_str(l, print_literal, parse::token::mk_fake_ident_interner());
|
||||
}
|
||||
|
||||
pub fn next_lit(s: ps, pos: BytePos) -> Option<comments::lit> {
|
||||
pub fn next_lit(s: @ps, pos: BytePos) -> Option<comments::lit> {
|
||||
match s.literals {
|
||||
Some(ref lits) => {
|
||||
while s.cur_lit < vec::len((*lits)) {
|
||||
let ltrl = (*lits)[s.cur_lit];
|
||||
while s.cur_cmnt_and_lit.cur_lit < vec::len((*lits)) {
|
||||
let ltrl = (*lits)[s.cur_cmnt_and_lit.cur_lit];
|
||||
if ltrl.pos > pos { return None; }
|
||||
s.cur_lit += 1u;
|
||||
s.cur_cmnt_and_lit.cur_lit += 1u;
|
||||
if ltrl.pos == pos { return Some(ltrl); }
|
||||
}
|
||||
return None;
|
||||
|
@ -2061,13 +2076,13 @@ pub fn next_lit(s: ps, pos: BytePos) -> Option<comments::lit> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn maybe_print_comment(s: ps, pos: BytePos) {
|
||||
pub fn maybe_print_comment(s: @ps, pos: BytePos) {
|
||||
loop {
|
||||
match next_comment(s) {
|
||||
Some(ref cmnt) => {
|
||||
if (*cmnt).pos < pos {
|
||||
print_comment(s, (*cmnt));
|
||||
s.cur_cmnt += 1u;
|
||||
s.cur_cmnt_and_lit.cur_cmnt += 1u;
|
||||
} else { break; }
|
||||
}
|
||||
_ => break
|
||||
|
@ -2075,7 +2090,7 @@ pub fn maybe_print_comment(s: ps, pos: BytePos) {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn print_comment(s: ps, cmnt: comments::cmnt) {
|
||||
pub fn print_comment(s: @ps, cmnt: comments::cmnt) {
|
||||
match cmnt.style {
|
||||
comments::mixed => {
|
||||
assert (vec::len(cmnt.lines) == 1u);
|
||||
|
@ -2119,13 +2134,13 @@ pub fn print_comment(s: ps, cmnt: comments::cmnt) {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn print_string(s: ps, st: ~str) {
|
||||
pub fn print_string(s: @ps, st: ~str) {
|
||||
word(s.s, ~"\"");
|
||||
word(s.s, str::escape_default(st));
|
||||
word(s.s, ~"\"");
|
||||
}
|
||||
|
||||
pub fn to_str<T>(t: T, f: fn@(ps, T), intr: @ident_interner) -> ~str {
|
||||
pub fn to_str<T>(t: T, f: fn@(@ps, T), intr: @ident_interner) -> ~str {
|
||||
do io::with_str_writer |wr| {
|
||||
let s = rust_printer(wr, intr);
|
||||
f(s, t);
|
||||
|
@ -2133,18 +2148,18 @@ pub fn to_str<T>(t: T, f: fn@(ps, T), intr: @ident_interner) -> ~str {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn next_comment(s: ps) -> Option<comments::cmnt> {
|
||||
pub fn next_comment(s: @ps) -> Option<comments::cmnt> {
|
||||
match s.comments {
|
||||
Some(ref cmnts) => {
|
||||
if s.cur_cmnt < vec::len((*cmnts)) {
|
||||
return Some((*cmnts)[s.cur_cmnt]);
|
||||
if s.cur_cmnt_and_lit.cur_cmnt < vec::len((*cmnts)) {
|
||||
return Some((*cmnts)[s.cur_cmnt_and_lit.cur_cmnt]);
|
||||
} else { return None::<comments::cmnt>; }
|
||||
}
|
||||
_ => return None::<comments::cmnt>
|
||||
}
|
||||
}
|
||||
|
||||
pub fn print_self_ty_if_static(s: ps,
|
||||
pub fn print_self_ty_if_static(s: @ps,
|
||||
opt_self_ty: Option<ast::self_ty_>) {
|
||||
match opt_self_ty {
|
||||
Some(ast::sty_static) => { word(s.s, ~"static "); }
|
||||
|
@ -2152,7 +2167,7 @@ pub fn print_self_ty_if_static(s: ps,
|
|||
}
|
||||
}
|
||||
|
||||
pub fn print_opt_purity(s: ps, opt_purity: Option<ast::purity>) {
|
||||
pub fn print_opt_purity(s: @ps, opt_purity: Option<ast::purity>) {
|
||||
match opt_purity {
|
||||
Some(ast::impure_fn) => { }
|
||||
Some(purity) => {
|
||||
|
@ -2162,14 +2177,14 @@ pub fn print_opt_purity(s: ps, opt_purity: Option<ast::purity>) {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn print_opt_abi(s: ps, opt_abi: Option<ast::Abi>) {
|
||||
pub fn print_opt_abi(s: @ps, opt_abi: Option<ast::Abi>) {
|
||||
match opt_abi {
|
||||
Some(ast::RustAbi) => { word_nbsp(s, ~"extern"); }
|
||||
None => {}
|
||||
};
|
||||
}
|
||||
|
||||
pub fn print_opt_sigil(s: ps, opt_sigil: Option<ast::Sigil>) {
|
||||
pub fn print_opt_sigil(s: @ps, opt_sigil: Option<ast::Sigil>) {
|
||||
match opt_sigil {
|
||||
Some(ast::BorrowedSigil) => { word(s.s, ~"&"); }
|
||||
Some(ast::OwnedSigil) => { word(s.s, ~"~"); }
|
||||
|
@ -2178,7 +2193,7 @@ pub fn print_opt_sigil(s: ps, opt_sigil: Option<ast::Sigil>) {
|
|||
};
|
||||
}
|
||||
|
||||
pub fn print_fn_header_info(s: ps,
|
||||
pub fn print_fn_header_info(s: @ps,
|
||||
opt_sty: Option<ast::self_ty_>,
|
||||
opt_purity: Option<ast::purity>,
|
||||
onceness: ast::Onceness,
|
||||
|
@ -2215,14 +2230,14 @@ pub pure fn onceness_to_str(o: ast::Onceness) -> ~str {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn print_purity(s: ps, p: ast::purity) {
|
||||
pub fn print_purity(s: @ps, p: ast::purity) {
|
||||
match p {
|
||||
ast::impure_fn => (),
|
||||
_ => word_nbsp(s, purity_to_str(p))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn print_onceness(s: ps, o: ast::Onceness) {
|
||||
pub fn print_onceness(s: @ps, o: ast::Onceness) {
|
||||
match o {
|
||||
ast::Once => { word_nbsp(s, ~"once"); }
|
||||
ast::Many => {}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue