1
Fork 0

librustc: Lots of de-muting. rs=demuting

This commit is contained in:
Patrick Walton 2013-02-04 14:02:01 -08:00
parent 2bc9655bc1
commit 472797b04a
63 changed files with 2434 additions and 2086 deletions

View file

@ -81,7 +81,7 @@ pub fn init(root: &Path) {
let p = root.push("gpg"); let p = root.push("gpg");
if !os::path_is_dir(&p) { if !os::path_is_dir(&p) {
os::make_dir(&p, 0x1c0i32); os::make_dir(&p, 0x1c0i32);
let p = run::start_program(~"gpg", ~[~"--homedir", let mut p = run::start_program(~"gpg", ~[~"--homedir",
p.to_str(), p.to_str(),
~"--import"]); ~"--import"]);
p.input().write_str(signing_key()); p.input().write_str(signing_key());

View file

@ -24,26 +24,26 @@ use option::{None, Option, Some};
use option; use option;
use vec; use vec;
pub type DListLink<T> = Option<@DListNode<T>>; pub type DListLink<T> = Option<@mut DListNode<T>>;
pub struct DListNode<T> { pub struct DListNode<T> {
data: T, data: T,
mut linked: bool, // for assertions linked: bool, // for assertions
mut prev: DListLink<T>, prev: DListLink<T>,
mut next: DListLink<T>, next: DListLink<T>,
} }
pub struct DList<T> { pub struct DList<T> {
mut size: uint, size: uint,
mut hd: DListLink<T>, hd: DListLink<T>,
mut tl: DListLink<T>, tl: DListLink<T>,
} }
priv impl<T> DListNode<T> { priv impl<T> DListNode<T> {
pure fn assert_links(@self) { pure fn assert_links(@mut self) {
match self.next { match self.next {
Some(neighbour) => match neighbour.prev { Some(neighbour) => match neighbour.prev {
Some(me) => if !managed::ptr_eq(self, me) { Some(me) => if !managed::mut_ptr_eq(self, me) {
die!(~"Asymmetric next-link in dlist node.") die!(~"Asymmetric next-link in dlist node.")
}, },
None => die!(~"One-way next-link in dlist node.") None => die!(~"One-way next-link in dlist node.")
@ -52,7 +52,7 @@ priv impl<T> DListNode<T> {
} }
match self.prev { match self.prev {
Some(neighbour) => match neighbour.next { Some(neighbour) => match neighbour.next {
Some(me) => if !managed::ptr_eq(me, self) { Some(me) => if !managed::mut_ptr_eq(me, self) {
die!(~"Asymmetric prev-link in dlist node.") die!(~"Asymmetric prev-link in dlist node.")
}, },
None => die!(~"One-way prev-link in dlist node.") None => die!(~"One-way prev-link in dlist node.")
@ -64,24 +64,24 @@ priv impl<T> DListNode<T> {
impl<T> DListNode<T> { impl<T> DListNode<T> {
/// Get the next node in the list, if there is one. /// Get the next node in the list, if there is one.
pure fn next_link(@self) -> DListLink<T> { pure fn next_link(@mut self) -> DListLink<T> {
self.assert_links(); self.assert_links();
self.next self.next
} }
/// Get the next node in the list, failing if there isn't one. /// Get the next node in the list, failing if there isn't one.
pure fn next_node(@self) -> @DListNode<T> { pure fn next_node(@mut self) -> @mut DListNode<T> {
match self.next_link() { match self.next_link() {
Some(nobe) => nobe, Some(nobe) => nobe,
None => die!(~"This dlist node has no next neighbour.") None => die!(~"This dlist node has no next neighbour.")
} }
} }
/// Get the previous node in the list, if there is one. /// Get the previous node in the list, if there is one.
pure fn prev_link(@self) -> DListLink<T> { pure fn prev_link(@mut self) -> DListLink<T> {
self.assert_links(); self.assert_links();
self.prev self.prev
} }
/// Get the previous node in the list, failing if there isn't one. /// Get the previous node in the list, failing if there isn't one.
pure fn prev_node(@self) -> @DListNode<T> { pure fn prev_node(@mut self) -> @mut DListNode<T> {
match self.prev_link() { match self.prev_link() {
Some(nobe) => nobe, Some(nobe) => nobe,
None => die!(~"This dlist node has no previous neighbour.") None => die!(~"This dlist node has no previous neighbour.")
@ -90,23 +90,23 @@ impl<T> DListNode<T> {
} }
/// Creates a new dlist node with the given data. /// Creates a new dlist node with the given data.
pub pure fn new_dlist_node<T>(data: T) -> @DListNode<T> { pub pure fn new_dlist_node<T>(data: T) -> @mut DListNode<T> {
@DListNode { data: data, linked: false, prev: None, next: None } @mut DListNode { data: data, linked: false, prev: None, next: None }
} }
/// Creates a new, empty dlist. /// Creates a new, empty dlist.
pub pure fn DList<T>() -> @DList<T> { pub pure fn DList<T>() -> @mut DList<T> {
@DList { size: 0, hd: None, tl: None } @mut DList { size: 0, hd: None, tl: None }
} }
/// Creates a new dlist with a single element /// Creates a new dlist with a single element
pub pure fn from_elem<T>(data: T) -> @DList<T> { pub pure fn from_elem<T>(data: T) -> @mut DList<T> {
let list = DList(); let list = DList();
unsafe { list.push(data); } unsafe { list.push(data); }
list list
} }
pub fn from_vec<T: Copy>(vec: &[T]) -> @DList<T> { pub fn from_vec<T: Copy>(vec: &[T]) -> @mut DList<T> {
do vec::foldl(DList(), vec) |list,data| { do vec::foldl(DList(), vec) |list,data| {
list.push(*data); // Iterating left-to-right -- add newly to the tail. list.push(*data); // Iterating left-to-right -- add newly to the tail.
list list
@ -115,7 +115,7 @@ pub fn from_vec<T: Copy>(vec: &[T]) -> @DList<T> {
/// Produce a list from a list of lists, leaving no elements behind in the /// Produce a list from a list of lists, leaving no elements behind in the
/// input. O(number of sub-lists). /// input. O(number of sub-lists).
pub fn concat<T>(lists: @DList<@DList<T>>) -> @DList<T> { pub fn concat<T>(lists: @mut DList<@mut DList<T>>) -> @mut DList<T> {
let result = DList(); let result = DList();
while !lists.is_empty() { while !lists.is_empty() {
result.append(lists.pop().get()); result.append(lists.pop().get());
@ -125,9 +125,14 @@ pub fn concat<T>(lists: @DList<@DList<T>>) -> @DList<T> {
priv impl<T> DList<T> { priv impl<T> DList<T> {
static pure fn new_link(data: T) -> DListLink<T> { static pure fn new_link(data: T) -> DListLink<T> {
Some(@DListNode { data: data, linked: true, prev: None, next: None }) Some(@mut DListNode {
data: data,
linked: true,
prev: None,
next: None
})
} }
pure fn assert_mine(@self, nobe: @DListNode<T>) { pure fn assert_mine(@mut self, nobe: @mut DListNode<T>) {
// These asserts could be stronger if we had node-root back-pointers, // These asserts could be stronger if we had node-root back-pointers,
// but those wouldn't allow for O(1) append. // but those wouldn't allow for O(1) append.
if self.size == 0 { if self.size == 0 {
@ -135,15 +140,15 @@ priv impl<T> DList<T> {
} }
if !nobe.linked { die!(~"That node isn't linked to any dlist.") } if !nobe.linked { die!(~"That node isn't linked to any dlist.") }
if !((nobe.prev.is_some() if !((nobe.prev.is_some()
|| managed::ptr_eq(self.hd.expect(~"headless dlist?"), || managed::mut_ptr_eq(self.hd.expect(~"headless dlist?"),
nobe)) && nobe)) &&
(nobe.next.is_some() (nobe.next.is_some()
|| managed::ptr_eq(self.tl.expect(~"tailless dlist?"), || managed::mut_ptr_eq(self.tl.expect(~"tailless dlist?"),
nobe))) { nobe))) {
die!(~"That node isn't on this dlist.") die!(~"That node isn't on this dlist.")
} }
} }
fn make_mine(nobe: @DListNode<T>) { fn make_mine(nobe: @mut DListNode<T>) {
if nobe.prev.is_some() || nobe.next.is_some() || nobe.linked { if nobe.prev.is_some() || nobe.next.is_some() || nobe.linked {
die!(~"Cannot insert node that's already on a dlist!") die!(~"Cannot insert node that's already on a dlist!")
} }
@ -152,7 +157,7 @@ priv impl<T> DList<T> {
// Link two nodes together. If either of them are 'none', also sets // Link two nodes together. If either of them are 'none', also sets
// the head and/or tail pointers appropriately. // the head and/or tail pointers appropriately.
#[inline(always)] #[inline(always)]
fn link(before: DListLink<T>, after: DListLink<T>) { fn link(&mut self, before: DListLink<T>, after: DListLink<T>) {
match before { match before {
Some(neighbour) => neighbour.next = after, Some(neighbour) => neighbour.next = after,
None => self.hd = after None => self.hd = after
@ -163,7 +168,7 @@ priv impl<T> DList<T> {
} }
} }
// Remove a node from the list. // Remove a node from the list.
fn unlink(@self, nobe: @DListNode<T>) { fn unlink(@mut self, nobe: @mut DListNode<T>) {
self.assert_mine(nobe); self.assert_mine(nobe);
assert self.size > 0; assert self.size > 0;
self.link(nobe.prev, nobe.next); self.link(nobe.prev, nobe.next);
@ -173,24 +178,28 @@ priv impl<T> DList<T> {
self.size -= 1; self.size -= 1;
} }
fn add_head(@self, nobe: DListLink<T>) { fn add_head(@mut self, nobe: DListLink<T>) {
self.link(nobe, self.hd); // Might set tail too. self.link(nobe, self.hd); // Might set tail too.
self.hd = nobe; self.hd = nobe;
self.size += 1; self.size += 1;
} }
fn add_tail(@self, nobe: DListLink<T>) { fn add_tail(@mut self, nobe: DListLink<T>) {
self.link(self.tl, nobe); // Might set head too. self.link(self.tl, nobe); // Might set head too.
self.tl = nobe; self.tl = nobe;
self.size += 1; self.size += 1;
} }
fn insert_left(@self, nobe: DListLink<T>, neighbour: @DListNode<T>) { fn insert_left(@mut self,
nobe: DListLink<T>,
neighbour: @mut DListNode<T>) {
self.assert_mine(neighbour); self.assert_mine(neighbour);
assert self.size > 0; assert self.size > 0;
self.link(neighbour.prev, nobe); self.link(neighbour.prev, nobe);
self.link(nobe, Some(neighbour)); self.link(nobe, Some(neighbour));
self.size += 1; self.size += 1;
} }
fn insert_right(@self, neighbour: @DListNode<T>, nobe: DListLink<T>) { fn insert_right(@mut self,
neighbour: @mut DListNode<T>,
nobe: DListLink<T>) {
self.assert_mine(neighbour); self.assert_mine(neighbour);
assert self.size > 0; assert self.size > 0;
self.link(nobe, neighbour.next); self.link(nobe, neighbour.next);
@ -201,32 +210,32 @@ priv impl<T> DList<T> {
impl<T> DList<T> { impl<T> DList<T> {
/// Get the size of the list. O(1). /// Get the size of the list. O(1).
pure fn len(@self) -> uint { self.size } pure fn len(@mut self) -> uint { self.size }
/// Returns true if the list is empty. O(1). /// Returns true if the list is empty. O(1).
pure fn is_empty(@self) -> bool { self.len() == 0 } pure fn is_empty(@mut self) -> bool { self.len() == 0 }
/// Add data to the head of the list. O(1). /// Add data to the head of the list. O(1).
fn push_head(@self, data: T) { fn push_head(@mut self, data: T) {
self.add_head(DList::new_link(data)); self.add_head(DList::new_link(data));
} }
/** /**
* Add data to the head of the list, and get the new containing * Add data to the head of the list, and get the new containing
* node. O(1). * node. O(1).
*/ */
fn push_head_n(@self, data: T) -> @DListNode<T> { fn push_head_n(@mut self, data: T) -> @mut DListNode<T> {
let mut nobe = DList::new_link(data); let mut nobe = DList::new_link(data);
self.add_head(nobe); self.add_head(nobe);
nobe.get() nobe.get()
} }
/// Add data to the tail of the list. O(1). /// Add data to the tail of the list. O(1).
fn push(@self, data: T) { fn push(@mut self, data: T) {
self.add_tail(DList::new_link(data)); self.add_tail(DList::new_link(data));
} }
/** /**
* Add data to the tail of the list, and get the new containing * Add data to the tail of the list, and get the new containing
* node. O(1). * node. O(1).
*/ */
fn push_n(@self, data: T) -> @DListNode<T> { fn push_n(@mut self, data: T) -> @mut DListNode<T> {
let mut nobe = DList::new_link(data); let mut nobe = DList::new_link(data);
self.add_tail(nobe); self.add_tail(nobe);
nobe.get() nobe.get()
@ -235,14 +244,16 @@ impl<T> DList<T> {
* Insert data into the middle of the list, left of the given node. * Insert data into the middle of the list, left of the given node.
* O(1). * O(1).
*/ */
fn insert_before(@self, data: T, neighbour: @DListNode<T>) { fn insert_before(@mut self, data: T, neighbour: @mut DListNode<T>) {
self.insert_left(DList::new_link(data), neighbour); self.insert_left(DList::new_link(data), neighbour);
} }
/** /**
* Insert an existing node in the middle of the list, left of the * Insert an existing node in the middle of the list, left of the
* given node. O(1). * given node. O(1).
*/ */
fn insert_n_before(@self, nobe: @DListNode<T>, neighbour: @DListNode<T>) { fn insert_n_before(@mut self,
nobe: @mut DListNode<T>,
neighbour: @mut DListNode<T>) {
self.make_mine(nobe); self.make_mine(nobe);
self.insert_left(Some(nobe), neighbour); self.insert_left(Some(nobe), neighbour);
} }
@ -251,10 +262,10 @@ impl<T> DList<T> {
* and get its containing node. O(1). * and get its containing node. O(1).
*/ */
fn insert_before_n( fn insert_before_n(
@self, @mut self,
data: T, data: T,
neighbour: @DListNode<T> neighbour: @mut DListNode<T>
) -> @DListNode<T> { ) -> @mut DListNode<T> {
let mut nobe = DList::new_link(data); let mut nobe = DList::new_link(data);
self.insert_left(nobe, neighbour); self.insert_left(nobe, neighbour);
nobe.get() nobe.get()
@ -263,14 +274,16 @@ impl<T> DList<T> {
* Insert data into the middle of the list, right of the given node. * Insert data into the middle of the list, right of the given node.
* O(1). * O(1).
*/ */
fn insert_after(@self, data: T, neighbour: @DListNode<T>) { fn insert_after(@mut self, data: T, neighbour: @mut DListNode<T>) {
self.insert_right(neighbour, DList::new_link(data)); self.insert_right(neighbour, DList::new_link(data));
} }
/** /**
* Insert an existing node in the middle of the list, right of the * Insert an existing node in the middle of the list, right of the
* given node. O(1). * given node. O(1).
*/ */
fn insert_n_after(@self, nobe: @DListNode<T>, neighbour: @DListNode<T>) { fn insert_n_after(@mut self,
nobe: @mut DListNode<T>,
neighbour: @mut DListNode<T>) {
self.make_mine(nobe); self.make_mine(nobe);
self.insert_right(neighbour, Some(nobe)); self.insert_right(neighbour, Some(nobe));
} }
@ -279,34 +292,34 @@ impl<T> DList<T> {
* and get its containing node. O(1). * and get its containing node. O(1).
*/ */
fn insert_after_n( fn insert_after_n(
@self, @mut self,
data: T, data: T,
neighbour: @DListNode<T> neighbour: @mut DListNode<T>
) -> @DListNode<T> { ) -> @mut DListNode<T> {
let mut nobe = DList::new_link(data); let mut nobe = DList::new_link(data);
self.insert_right(neighbour, nobe); self.insert_right(neighbour, nobe);
nobe.get() nobe.get()
} }
/// Remove a node from the head of the list. O(1). /// Remove a node from the head of the list. O(1).
fn pop_n(@self) -> DListLink<T> { fn pop_n(@mut self) -> DListLink<T> {
let hd = self.peek_n(); let hd = self.peek_n();
hd.map(|nobe| self.unlink(*nobe)); hd.map(|nobe| self.unlink(*nobe));
hd hd
} }
/// Remove a node from the tail of the list. O(1). /// Remove a node from the tail of the list. O(1).
fn pop_tail_n(@self) -> DListLink<T> { fn pop_tail_n(@mut self) -> DListLink<T> {
let tl = self.peek_tail_n(); let tl = self.peek_tail_n();
tl.map(|nobe| self.unlink(*nobe)); tl.map(|nobe| self.unlink(*nobe));
tl tl
} }
/// Get the node at the list's head. O(1). /// Get the node at the list's head. O(1).
pure fn peek_n(@self) -> DListLink<T> { self.hd } pure fn peek_n(@mut self) -> DListLink<T> { self.hd }
/// Get the node at the list's tail. O(1). /// Get the node at the list's tail. O(1).
pure fn peek_tail_n(@self) -> DListLink<T> { self.tl } pure fn peek_tail_n(@mut self) -> DListLink<T> { self.tl }
/// Get the node at the list's head, failing if empty. O(1). /// Get the node at the list's head, failing if empty. O(1).
pure fn head_n(@self) -> @DListNode<T> { pure fn head_n(@mut self) -> @mut DListNode<T> {
match self.hd { match self.hd {
Some(nobe) => nobe, Some(nobe) => nobe,
None => die!( None => die!(
@ -314,7 +327,7 @@ impl<T> DList<T> {
} }
} }
/// Get the node at the list's tail, failing if empty. O(1). /// Get the node at the list's tail, failing if empty. O(1).
pure fn tail_n(@self) -> @DListNode<T> { pure fn tail_n(@mut self) -> @mut DListNode<T> {
match self.tl { match self.tl {
Some(nobe) => nobe, Some(nobe) => nobe,
None => die!( None => die!(
@ -323,14 +336,14 @@ impl<T> DList<T> {
} }
/// Remove a node from anywhere in the list. O(1). /// Remove a node from anywhere in the list. O(1).
fn remove(@self, nobe: @DListNode<T>) { self.unlink(nobe); } fn remove(@mut self, nobe: @mut DListNode<T>) { self.unlink(nobe); }
/** /**
* Empty another list onto the end of this list, joining this list's tail * Empty another list onto the end of this list, joining this list's tail
* to the other list's head. O(1). * to the other list's head. O(1).
*/ */
fn append(@self, them: @DList<T>) { fn append(@mut self, them: @mut DList<T>) {
if managed::ptr_eq(self, them) { if managed::mut_ptr_eq(self, them) {
die!(~"Cannot append a dlist to itself!") die!(~"Cannot append a dlist to itself!")
} }
if them.len() > 0 { if them.len() > 0 {
@ -346,8 +359,8 @@ impl<T> DList<T> {
* Empty another list onto the start of this list, joining the other * Empty another list onto the start of this list, joining the other
* list's tail to this list's head. O(1). * list's tail to this list's head. O(1).
*/ */
fn prepend(@self, them: @DList<T>) { fn prepend(@mut self, them: @mut DList<T>) {
if managed::ptr_eq(self, them) { if managed::mut_ptr_eq(self, them) {
die!(~"Cannot prepend a dlist to itself!") die!(~"Cannot prepend a dlist to itself!")
} }
if them.len() > 0 { if them.len() > 0 {
@ -361,7 +374,7 @@ impl<T> DList<T> {
} }
/// Reverse the list's elements in place. O(n). /// Reverse the list's elements in place. O(n).
fn reverse(@self) { fn reverse(@mut self) {
do option::while_some(self.hd) |nobe| { do option::while_some(self.hd) |nobe| {
let next_nobe = nobe.next; let next_nobe = nobe.next;
self.remove(nobe); self.remove(nobe);
@ -375,7 +388,7 @@ impl<T> DList<T> {
* Remove everything from the list. This is important because the cyclic * Remove everything from the list. This is important because the cyclic
* links won't otherwise be automatically refcounted-collected. O(n). * links won't otherwise be automatically refcounted-collected. O(n).
*/ */
fn clear(@self) { fn clear(@mut self) {
// Cute as it would be to simply detach the list and proclaim "O(1)!", // Cute as it would be to simply detach the list and proclaim "O(1)!",
// the GC would still be a hidden O(n). Better to be honest about it. // the GC would still be a hidden O(n). Better to be honest about it.
while !self.is_empty() { while !self.is_empty() {
@ -384,7 +397,7 @@ impl<T> DList<T> {
} }
/// Iterate over nodes. /// Iterate over nodes.
pure fn each_node(@self, f: fn(@DListNode<T>) -> bool) { pure fn each_node(@mut self, f: fn(@mut DListNode<T>) -> bool) {
let mut link = self.peek_n(); let mut link = self.peek_n();
while link.is_some() { while link.is_some() {
let nobe = link.get(); let nobe = link.get();
@ -394,7 +407,7 @@ impl<T> DList<T> {
} }
/// Check data structure integrity. O(n). /// Check data structure integrity. O(n).
fn assert_consistent(@self) { fn assert_consistent(@mut self) {
if self.hd.is_none() || self.tl.is_none() { if self.hd.is_none() || self.tl.is_none() {
assert self.hd.is_none() && self.tl.is_none(); assert self.hd.is_none() && self.tl.is_none();
} }
@ -413,7 +426,7 @@ impl<T> DList<T> {
rabbit = rabbit.get().next; rabbit = rabbit.get().next;
} }
if rabbit.is_some() { if rabbit.is_some() {
assert !managed::ptr_eq(rabbit.get(), nobe); assert !managed::mut_ptr_eq(rabbit.get(), nobe);
} }
// advance // advance
link = nobe.next_link(); link = nobe.next_link();
@ -434,7 +447,7 @@ impl<T> DList<T> {
rabbit = rabbit.get().prev; rabbit = rabbit.get().prev;
} }
if rabbit.is_some() { if rabbit.is_some() {
assert !managed::ptr_eq(rabbit.get(), nobe); assert !managed::mut_ptr_eq(rabbit.get(), nobe);
} }
// advance // advance
link = nobe.prev_link(); link = nobe.prev_link();
@ -446,33 +459,33 @@ impl<T> DList<T> {
impl<T: Copy> DList<T> { impl<T: Copy> DList<T> {
/// Remove data from the head of the list. O(1). /// Remove data from the head of the list. O(1).
fn pop(@self) -> Option<T> { fn pop(@mut self) -> Option<T> {
self.pop_n().map(|nobe| nobe.data) self.pop_n().map(|nobe| nobe.data)
} }
/// Remove data from the tail of the list. O(1). /// Remove data from the tail of the list. O(1).
fn pop_tail(@self) -> Option<T> { fn pop_tail(@mut self) -> Option<T> {
self.pop_tail_n().map(|nobe| nobe.data) self.pop_tail_n().map(|nobe| nobe.data)
} }
/// Get data at the list's head. O(1). /// Get data at the list's head. O(1).
pure fn peek(@self) -> Option<T> { pure fn peek(@mut self) -> Option<T> {
self.peek_n().map(|nobe| nobe.data) self.peek_n().map(|nobe| nobe.data)
} }
/// Get data at the list's tail. O(1). /// Get data at the list's tail. O(1).
pure fn peek_tail(@self) -> Option<T> { pure fn peek_tail(@mut self) -> Option<T> {
self.peek_tail_n().map (|nobe| nobe.data) self.peek_tail_n().map (|nobe| nobe.data)
} }
/// Get data at the list's head, failing if empty. O(1). /// Get data at the list's head, failing if empty. O(1).
pure fn head(@self) -> T { self.head_n().data } pure fn head(@mut self) -> T { self.head_n().data }
/// Get data at the list's tail, failing if empty. O(1). /// Get data at the list's tail, failing if empty. O(1).
pure fn tail(@self) -> T { self.tail_n().data } pure fn tail(@mut self) -> T { self.tail_n().data }
/// Get the elements of the list as a vector. O(n). /// Get the elements of the list as a vector. O(n).
pure fn to_vec(@self) -> ~[T] { pure fn to_vec(@mut self) -> ~[T] {
let mut v = vec::with_capacity(self.size); let mut v = vec::with_capacity(self.size);
unsafe { unsafe {
// Take this out of the unchecked when iter's functions are pure // Take this out of the unchecked when iter's functions are pure

View file

@ -9,6 +9,7 @@
// except according to those terms. // except according to those terms.
mod inst { mod inst {
use cast;
use dlist; use dlist;
use dlist::DList; use dlist::DList;
use managed; use managed;
@ -16,7 +17,7 @@ mod inst {
use option; use option;
#[allow(non_camel_case_types)] #[allow(non_camel_case_types)]
pub type IMPL_T<A> = @DList<A>; pub type IMPL_T<A> = @mut DList<A>;
/** /**
* Iterates through the current contents. * Iterates through the current contents.
@ -30,17 +31,22 @@ mod inst {
while option::is_some(&link) { while option::is_some(&link) {
let nobe = option::get(link); let nobe = option::get(link);
assert nobe.linked; assert nobe.linked;
if !f(&nobe.data) { break; }
{
let frozen_nobe = &*nobe;
if !f(&frozen_nobe.data) { break; }
}
// Check (weakly) that the user didn't do a remove. // Check (weakly) that the user didn't do a remove.
if self.size == 0 { if self.size == 0 {
die!(~"The dlist became empty during iteration??") die!(~"The dlist became empty during iteration??")
} }
if !nobe.linked || if !nobe.linked ||
(!((nobe.prev.is_some() (!((nobe.prev.is_some()
|| managed::ptr_eq(self.hd.expect(~"headless dlist?"), || managed::mut_ptr_eq(self.hd.expect(~"headless dlist?"),
nobe)) nobe))
&& (nobe.next.is_some() && (nobe.next.is_some()
|| managed::ptr_eq(self.tl.expect(~"tailless dlist?"), || managed::mut_ptr_eq(self.tl.expect(~"tailless dlist?"),
nobe)))) { nobe)))) {
die!(~"Removing a dlist node during iteration is forbidden!") die!(~"Removing a dlist node during iteration is forbidden!")
} }

View file

@ -39,6 +39,12 @@ pub pure fn ptr_eq<T>(a: @T, b: @T) -> bool {
unsafe { ptr::addr_of(&(*a)) == ptr::addr_of(&(*b)) } unsafe { ptr::addr_of(&(*a)) == ptr::addr_of(&(*b)) }
} }
#[inline(always)]
pub pure fn mut_ptr_eq<T>(a: @mut T, b: @mut T) -> bool {
//! Determine if two mutable shared boxes point to the same object
unsafe { ptr::addr_of(&(*a)) == ptr::addr_of(&(*b)) }
}
#[cfg(notest)] #[cfg(notest)]
impl<T:Eq> @const T : Eq { impl<T:Eq> @const T : Eq {
#[inline(always)] #[inline(always)]

View file

@ -321,8 +321,7 @@ pub fn waitpid(pid: pid_t) -> c_int {
#[cfg(unix)] #[cfg(unix)]
pub fn pipe() -> {in: c_int, out: c_int} { pub fn pipe() -> {in: c_int, out: c_int} {
unsafe { unsafe {
let fds = {mut in: 0 as c_int, let mut fds = {in: 0 as c_int, out: 0 as c_int};
mut out: 0 as c_int };
assert (libc::pipe(ptr::mut_addr_of(&(fds.in))) == (0 as c_int)); assert (libc::pipe(ptr::mut_addr_of(&(fds.in))) == (0 as c_int));
return {in: fds.in, out: fds.out}; return {in: fds.in, out: fds.out};
} }
@ -338,8 +337,7 @@ pub fn pipe() -> {in: c_int, out: c_int} {
// fully understand. Here we explicitly make the pipe non-inheritable, // fully understand. Here we explicitly make the pipe non-inheritable,
// which means to pass it to a subprocess they need to be duplicated // which means to pass it to a subprocess they need to be duplicated
// first, as in rust_run_program. // first, as in rust_run_program.
let fds = { mut in: 0 as c_int, let mut fds = { in: 0 as c_int, out: 0 as c_int };
mut out: 0 as c_int };
let res = libc::pipe(ptr::mut_addr_of(&(fds.in)), let res = libc::pipe(ptr::mut_addr_of(&(fds.in)),
1024 as c_uint, 1024 as c_uint,
(libc::O_BINARY | libc::O_NOINHERIT) as c_int); (libc::O_BINARY | libc::O_NOINHERIT) as c_int);

View file

@ -11,6 +11,7 @@
#[allow(structural_records)]; #[allow(structural_records)];
//! Process spawning //! Process spawning
use cast;
use io; use io;
use io::ReaderUtil; use io::ReaderUtil;
use libc; use libc;
@ -36,28 +37,28 @@ extern mod rustrt {
/// A value representing a child process /// A value representing a child process
pub trait Program { pub trait Program {
/// Returns the process id of the program /// Returns the process id of the program
fn get_id() -> pid_t; fn get_id(&mut self) -> pid_t;
/// Returns an io::writer that can be used to write to stdin /// Returns an io::writer that can be used to write to stdin
fn input() -> io::Writer; fn input(&mut self) -> io::Writer;
/// Returns an io::reader that can be used to read from stdout /// Returns an io::reader that can be used to read from stdout
fn output() -> io::Reader; fn output(&mut self) -> io::Reader;
/// Returns an io::reader that can be used to read from stderr /// Returns an io::reader that can be used to read from stderr
fn err() -> io::Reader; fn err(&mut self) -> io::Reader;
/// Closes the handle to the child processes standard input /// Closes the handle to the child processes standard input
fn close_input(); fn close_input(&mut self);
/** /**
* Waits for the child process to terminate. Closes the handle * Waits for the child process to terminate. Closes the handle
* to stdin if necessary. * to stdin if necessary.
*/ */
fn finish() -> int; fn finish(&mut self) -> int;
/// Closes open handles /// Closes open handles
fn destroy(); fn destroy(&mut self);
} }
@ -219,13 +220,13 @@ pub fn start_program(prog: &str, args: &[~str]) -> Program {
struct ProgRepr { struct ProgRepr {
pid: pid_t, pid: pid_t,
mut in_fd: c_int, in_fd: c_int,
out_file: *libc::FILE, out_file: *libc::FILE,
err_file: *libc::FILE, err_file: *libc::FILE,
mut finished: bool, finished: bool,
} }
fn close_repr_input(r: &ProgRepr) { fn close_repr_input(r: &mut ProgRepr) {
let invalid_fd = -1i32; let invalid_fd = -1i32;
if r.in_fd != invalid_fd { if r.in_fd != invalid_fd {
unsafe { unsafe {
@ -234,22 +235,27 @@ pub fn start_program(prog: &str, args: &[~str]) -> Program {
r.in_fd = invalid_fd; r.in_fd = invalid_fd;
} }
} }
fn finish_repr(r: &ProgRepr) -> int { fn finish_repr(r: &mut ProgRepr) -> int {
if r.finished { return 0; } if r.finished { return 0; }
r.finished = true; r.finished = true;
close_repr_input(r); close_repr_input(&mut *r);
return waitpid(r.pid); return waitpid(r.pid);
} }
fn destroy_repr(r: &ProgRepr) { fn destroy_repr(r: &mut ProgRepr) {
unsafe { unsafe {
finish_repr(r); finish_repr(&mut *r);
libc::fclose(r.out_file); libc::fclose(r.out_file);
libc::fclose(r.err_file); libc::fclose(r.err_file);
} }
} }
struct ProgRes { struct ProgRes {
r: ProgRepr, r: ProgRepr,
drop { destroy_repr(&self.r); } drop {
unsafe {
// XXX: This is bad.
destroy_repr(cast::transmute(&self.r));
}
}
} }
fn ProgRes(r: ProgRepr) -> ProgRes { fn ProgRes(r: ProgRepr) -> ProgRes {
@ -259,21 +265,21 @@ pub fn start_program(prog: &str, args: &[~str]) -> Program {
} }
impl ProgRes: Program { impl ProgRes: Program {
fn get_id() -> pid_t { return self.r.pid; } fn get_id(&mut self) -> pid_t { return self.r.pid; }
fn input() -> io::Writer { fn input(&mut self) -> io::Writer {
io::fd_writer(self.r.in_fd, false) io::fd_writer(self.r.in_fd, false)
} }
fn output() -> io::Reader { fn output(&mut self) -> io::Reader {
io::FILE_reader(self.r.out_file, false) io::FILE_reader(self.r.out_file, false)
} }
fn err() -> io::Reader { fn err(&mut self) -> io::Reader {
io::FILE_reader(self.r.err_file, false) io::FILE_reader(self.r.err_file, false)
} }
fn close_input() { close_repr_input(&self.r); } fn close_input(&mut self) { close_repr_input(&mut self.r); }
fn finish() -> int { finish_repr(&self.r) } fn finish(&mut self) -> int { finish_repr(&mut self.r) }
fn destroy() { destroy_repr(&self.r); } fn destroy(&mut self) { destroy_repr(&mut self.r); }
} }
let repr = ProgRepr { let mut repr = ProgRepr {
pid: pid, pid: pid,
in_fd: pipe_input.out, in_fd: pipe_input.out,
out_file: os::fdopen(pipe_output.in), out_file: os::fdopen(pipe_output.in),

View file

@ -523,7 +523,7 @@ pub fn build_link_meta(sess: Session, c: &ast::crate, output: &Path,
} }
fn warn_missing(sess: Session, name: &str, default: &str) { fn warn_missing(sess: Session, name: &str, default: &str) {
if !sess.building_library { return; } if !*sess.building_library { return; }
sess.warn(fmt!("missing crate link meta `%s`, using `%s` as default", sess.warn(fmt!("missing crate link meta `%s`, using `%s` as default",
name, default)); name, default));
} }
@ -730,7 +730,7 @@ pub fn link_binary(sess: Session,
} }
} }
let output = if sess.building_library { let output = if *sess.building_library {
let long_libname = output_dll_filename(sess.targ_cfg.os, lm); let long_libname = output_dll_filename(sess.targ_cfg.os, lm);
debug!("link_meta.name: %s", lm.name); debug!("link_meta.name: %s", lm.name);
debug!("long_libname: %s", long_libname); debug!("long_libname: %s", long_libname);
@ -806,7 +806,7 @@ pub fn link_binary(sess: Session,
let used_libs = cstore::get_used_libraries(cstore); let used_libs = cstore::get_used_libraries(cstore);
for used_libs.each |l| { cc_args.push(~"-l" + *l); } for used_libs.each |l| { cc_args.push(~"-l" + *l); }
if sess.building_library { if *sess.building_library {
cc_args.push(lib_cmd); cc_args.push(lib_cmd);
// On mac we need to tell the linker to let this library // On mac we need to tell the linker to let this library

View file

@ -199,7 +199,7 @@ pub fn compile_upto(sess: Session, cfg: ast::crate_cfg,
|| parse_input(sess, copy cfg, input) ); || parse_input(sess, copy cfg, input) );
if upto == cu_parse { return {crate: crate, tcx: None}; } if upto == cu_parse { return {crate: crate, tcx: None}; }
sess.building_library = session::building_library( *sess.building_library = session::building_library(
sess.opts.crate_type, crate, sess.opts.test); sess.opts.crate_type, crate, sess.opts.test);
crate = time(time_passes, ~"configuration", || crate = time(time_passes, ~"configuration", ||
@ -335,7 +335,7 @@ pub fn compile_upto(sess: Session, cfg: ast::crate_cfg,
let stop_after_codegen = let stop_after_codegen =
sess.opts.output_type != link::output_type_exe || sess.opts.output_type != link::output_type_exe ||
(sess.opts.static && sess.building_library) || (sess.opts.static && *sess.building_library) ||
sess.opts.jit; sess.opts.jit;
if stop_after_codegen { return {crate: crate, tcx: None}; } if stop_after_codegen { return {crate: crate, tcx: None}; }
@ -466,7 +466,7 @@ pub fn get_arch(triple: ~str) -> Option<session::arch> {
} }
pub fn build_target_config(sopts: @session::options, pub fn build_target_config(sopts: @session::options,
demitter: diagnostic::emitter) demitter: diagnostic::Emitter)
-> @session::config { -> @session::config {
let os = match get_os(sopts.target_triple) { let os = match get_os(sopts.target_triple) {
Some(os) => os, Some(os) => os,
@ -512,7 +512,7 @@ pub fn host_triple() -> ~str {
pub fn build_session_options(+binary: ~str, pub fn build_session_options(+binary: ~str,
matches: &getopts::Matches, matches: &getopts::Matches,
demitter: diagnostic::emitter) demitter: diagnostic::Emitter)
-> @session::options { -> @session::options {
let crate_type = if opt_present(matches, ~"lib") { let crate_type = if opt_present(matches, ~"lib") {
session::lib_crate session::lib_crate
@ -651,7 +651,7 @@ pub fn build_session_options(+binary: ~str,
} }
pub fn build_session(sopts: @session::options, pub fn build_session(sopts: @session::options,
demitter: diagnostic::emitter) -> Session { demitter: diagnostic::Emitter) -> Session {
let codemap = @codemap::CodeMap::new(); let codemap = @codemap::CodeMap::new();
let diagnostic_handler = let diagnostic_handler =
diagnostic::mk_handler(Some(demitter)); diagnostic::mk_handler(Some(demitter));
@ -662,30 +662,32 @@ pub fn build_session(sopts: @session::options,
pub fn build_session_(sopts: @session::options, pub fn build_session_(sopts: @session::options,
cm: @codemap::CodeMap, cm: @codemap::CodeMap,
demitter: diagnostic::emitter, demitter: diagnostic::Emitter,
span_diagnostic_handler: diagnostic::span_handler) span_diagnostic_handler: diagnostic::span_handler)
-> Session { -> Session {
let target_cfg = build_target_config(sopts, demitter); let target_cfg = build_target_config(sopts, demitter);
let p_s = parse::new_parse_sess_special_handler(span_diagnostic_handler, let p_s = parse::new_parse_sess_special_handler(span_diagnostic_handler,
cm); cm);
let cstore = cstore::mk_cstore(p_s.interner); let cstore = @mut cstore::mk_cstore(p_s.interner);
let filesearch = filesearch::mk_filesearch( let filesearch = filesearch::mk_filesearch(
sopts.maybe_sysroot, sopts.maybe_sysroot,
sopts.target_triple, sopts.target_triple,
/*bad*/copy sopts.addl_lib_search_paths); /*bad*/copy sopts.addl_lib_search_paths);
let lint_settings = lint::mk_lint_settings(); let lint_settings = lint::mk_lint_settings();
Session_(@{targ_cfg: target_cfg, @Session_ {
targ_cfg: target_cfg,
opts: sopts, opts: sopts,
cstore: cstore, cstore: cstore,
parse_sess: p_s, parse_sess: p_s,
codemap: cm, codemap: cm,
// For a library crate, this is always none // For a library crate, this is always none
mut main_fn: None, main_fn: @mut None,
span_diagnostic: span_diagnostic_handler, span_diagnostic: span_diagnostic_handler,
filesearch: filesearch, filesearch: filesearch,
mut building_library: false, building_library: @mut false,
working_dir: os::getcwd(), working_dir: os::getcwd(),
lint_settings: lint_settings}) lint_settings: lint_settings
}
} }
pub fn parse_pretty(sess: Session, &&name: ~str) -> pp_mode { pub fn parse_pretty(sess: Session, &&name: ~str) -> pp_mode {
@ -780,7 +782,7 @@ pub fn build_output_filenames(input: input,
let sopts = sess.opts; let sopts = sess.opts;
let stop_after_codegen = let stop_after_codegen =
sopts.output_type != link::output_type_exe || sopts.output_type != link::output_type_exe ||
sopts.static && sess.building_library; sopts.static && *sess.building_library;
let obj_suffix = let obj_suffix =
@ -811,7 +813,7 @@ pub fn build_output_filenames(input: input,
str_input(_) => ~"rust_out" str_input(_) => ~"rust_out"
}; };
if sess.building_library { if *sess.building_library {
out_path = dirpath.push(os::dll_filename(stem)); out_path = dirpath.push(os::dll_filename(stem));
obj_path = dirpath.push(stem).with_filetype(obj_suffix); obj_path = dirpath.push(stem).with_filetype(obj_suffix);
} else { } else {
@ -828,7 +830,7 @@ pub fn build_output_filenames(input: input,
(*out_file).with_filetype(obj_suffix) (*out_file).with_filetype(obj_suffix)
}; };
if sess.building_library { if *sess.building_library {
// FIXME (#2401): We might want to warn here; we're actually not // FIXME (#2401): We might want to warn here; we're actually not
// going to respect the user's choice of library name when it // going to respect the user's choice of library name when it
// comes time to link, we'll be linking to // comes time to link, we'll be linking to
@ -844,7 +846,7 @@ pub fn build_output_filenames(input: input,
obj_filename: obj_path}; obj_filename: obj_path};
} }
pub fn early_error(emitter: diagnostic::emitter, msg: ~str) -> ! { pub fn early_error(emitter: diagnostic::Emitter, msg: ~str) -> ! {
emitter(None, msg, diagnostic::fatal); emitter(None, msg, diagnostic::fatal);
die!(); die!();
} }

View file

@ -149,23 +149,23 @@ pub type options =
pub type crate_metadata = {name: ~str, data: ~[u8]}; pub type crate_metadata = {name: ~str, data: ~[u8]};
pub type Session_ = {targ_cfg: @config, pub struct Session_ {
targ_cfg: @config,
opts: @options, opts: @options,
cstore: metadata::cstore::CStore, cstore: @mut metadata::cstore::CStore,
parse_sess: parse_sess, parse_sess: parse_sess,
codemap: @codemap::CodeMap, codemap: @codemap::CodeMap,
// For a library crate, this is always none // For a library crate, this is always none
mut main_fn: Option<(node_id, codemap::span)>, main_fn: @mut Option<(node_id, codemap::span)>,
span_diagnostic: diagnostic::span_handler, span_diagnostic: diagnostic::span_handler,
filesearch: filesearch::FileSearch, filesearch: filesearch::FileSearch,
mut building_library: bool, building_library: @mut bool,
working_dir: Path, working_dir: Path,
lint_settings: lint::lint_settings}; lint_settings: lint::lint_settings
pub enum Session {
Session_(@Session_)
} }
pub type Session = @Session_;
pub impl Session { pub impl Session {
fn span_fatal(sp: span, msg: ~str) -> ! { fn span_fatal(sp: span, msg: ~str) -> ! {
self.span_diagnostic.span_fatal(sp, msg) self.span_diagnostic.span_fatal(sp, msg)
@ -303,7 +303,8 @@ pub fn expect<T: Copy>(sess: Session,
diagnostic::expect(sess.diagnostic(), opt, msg) diagnostic::expect(sess.diagnostic(), opt, msg)
} }
pub fn building_library(req_crate_type: crate_type, crate: @ast::crate, pub fn building_library(req_crate_type: crate_type,
crate: @ast::crate,
testing: bool) -> bool { testing: bool) -> bool {
match req_crate_type { match req_crate_type {
bin_crate => false, bin_crate => false,

View file

@ -30,20 +30,25 @@ use syntax::attr::attrs_contains_name;
type node_id_gen = fn@() -> ast::node_id; type node_id_gen = fn@() -> ast::node_id;
type test = {span: span, path: ~[ast::ident], type test = {
ignore: bool, should_fail: bool}; span: span,
path: ~[ast::ident],
ignore: bool,
should_fail: bool
};
type test_ctxt = struct TestCtxt {
@{sess: session::Session, sess: session::Session,
crate: @ast::crate, crate: @ast::crate,
mut path: ~[ast::ident], path: ~[ast::ident],
testfns: DVec<test>}; testfns: ~[test]
}
// Traverse the crate, collecting all the test functions, eliding any // Traverse the crate, collecting all the test functions, eliding any
// existing main functions, and synthesizing a main test harness // existing main functions, and synthesizing a main test harness
pub fn modify_for_testing(sess: session::Session, pub fn modify_for_testing(sess: session::Session,
crate: @ast::crate) -> @ast::crate { crate: @ast::crate)
-> @ast::crate {
// We generate the test harness when building in the 'test' // We generate the test harness when building in the 'test'
// configuration, either with the '--test' or '--cfg test' // configuration, either with the '--test' or '--cfg test'
// command line options. // command line options.
@ -58,12 +63,14 @@ pub fn modify_for_testing(sess: session::Session,
} }
fn generate_test_harness(sess: session::Session, fn generate_test_harness(sess: session::Session,
crate: @ast::crate) -> @ast::crate { crate: @ast::crate)
let cx: test_ctxt = -> @ast::crate {
@{sess: sess, let cx: @mut TestCtxt = @mut TestCtxt {
sess: sess,
crate: crate, crate: crate,
mut path: ~[], path: ~[],
testfns: DVec()}; testfns: ~[]
};
let precursor = @fold::AstFoldFns { let precursor = @fold::AstFoldFns {
fold_crate: fold::wrap(|a,b| fold_crate(cx, a, b) ), fold_crate: fold::wrap(|a,b| fold_crate(cx, a, b) ),
@ -83,13 +90,15 @@ fn strip_test_functions(crate: @ast::crate) -> @ast::crate {
} }
} }
fn fold_mod(cx: test_ctxt, m: ast::_mod, fld: fold::ast_fold) -> ast::_mod { fn fold_mod(cx: @mut TestCtxt,
m: ast::_mod,
fld: fold::ast_fold)
-> ast::_mod {
// Remove any #[main] from the AST so it doesn't clash with // Remove any #[main] from the AST so it doesn't clash with
// the one we're going to add. Only if compiling an executable. // the one we're going to add. Only if compiling an executable.
fn nomain(cx: test_ctxt, item: @ast::item) -> @ast::item { fn nomain(cx: @mut TestCtxt, item: @ast::item) -> @ast::item {
if !cx.sess.building_library { if !*cx.sess.building_library {
@ast::item{attrs: item.attrs.filtered(|attr| { @ast::item{attrs: item.attrs.filtered(|attr| {
attr::get_attr_name(*attr) != ~"main" attr::get_attr_name(*attr) != ~"main"
}),.. copy *item} }),.. copy *item}
@ -104,8 +113,10 @@ fn fold_mod(cx: test_ctxt, m: ast::_mod, fld: fold::ast_fold) -> ast::_mod {
fold::noop_fold_mod(mod_nomain, fld) fold::noop_fold_mod(mod_nomain, fld)
} }
fn fold_crate(cx: test_ctxt, c: ast::crate_, fld: fold::ast_fold) -> fn fold_crate(cx: @mut TestCtxt,
ast::crate_ { c: ast::crate_,
fld: fold::ast_fold)
-> ast::crate_ {
let folded = fold::noop_fold_crate(c, fld); let folded = fold::noop_fold_crate(c, fld);
// Add a special __test module to the crate that will contain code // Add a special __test module to the crate that will contain code
@ -115,9 +126,8 @@ fn fold_crate(cx: test_ctxt, c: ast::crate_, fld: fold::ast_fold) ->
} }
fn fold_item(cx: test_ctxt, &&i: @ast::item, fld: fold::ast_fold) -> fn fold_item(cx: @mut TestCtxt, &&i: @ast::item, fld: fold::ast_fold)
Option<@ast::item> { -> Option<@ast::item> {
cx.path.push(i.ident); cx.path.push(i.ident);
debug!("current path: %s", debug!("current path: %s",
ast_util::path_name_i(cx.path, cx.sess.parse_sess.interner)); ast_util::path_name_i(cx.path, cx.sess.parse_sess.interner));
@ -125,7 +135,8 @@ fn fold_item(cx: test_ctxt, &&i: @ast::item, fld: fold::ast_fold) ->
if is_test_fn(i) { if is_test_fn(i) {
match i.node { match i.node {
ast::item_fn(_, purity, _, _) if purity == ast::unsafe_fn => { ast::item_fn(_, purity, _, _) if purity == ast::unsafe_fn => {
cx.sess.span_fatal( let sess = cx.sess;
sess.span_fatal(
i.span, i.span,
~"unsafe functions cannot be used for tests"); ~"unsafe functions cannot be used for tests");
} }
@ -165,7 +176,7 @@ fn is_test_fn(i: @ast::item) -> bool {
return has_test_attr && has_test_signature(i); return has_test_attr && has_test_signature(i);
} }
fn is_ignored(cx: test_ctxt, i: @ast::item) -> bool { fn is_ignored(cx: @mut TestCtxt, i: @ast::item) -> bool {
let ignoreattrs = attr::find_attrs_by_name(i.attrs, "ignore"); let ignoreattrs = attr::find_attrs_by_name(i.attrs, "ignore");
let ignoreitems = attr::attr_metas(ignoreattrs); let ignoreitems = attr::attr_metas(ignoreattrs);
return if !ignoreitems.is_empty() { return if !ignoreitems.is_empty() {
@ -183,7 +194,7 @@ fn should_fail(i: @ast::item) -> bool {
vec::len(attr::find_attrs_by_name(i.attrs, ~"should_fail")) > 0u vec::len(attr::find_attrs_by_name(i.attrs, ~"should_fail")) > 0u
} }
fn add_test_module(cx: test_ctxt, +m: ast::_mod) -> ast::_mod { fn add_test_module(cx: @mut TestCtxt, +m: ast::_mod) -> ast::_mod {
let testmod = mk_test_module(cx); let testmod = mk_test_module(cx);
ast::_mod { ast::_mod {
items: vec::append_one(/*bad*/copy m.items, testmod), items: vec::append_one(/*bad*/copy m.items, testmod),
@ -207,7 +218,7 @@ mod __test {
*/ */
fn mk_test_module(cx: test_ctxt) -> @ast::item { fn mk_test_module(cx: @mut TestCtxt) -> @ast::item {
// Link to std // Link to std
let std = mk_std(cx); let std = mk_std(cx);
let view_items = if is_std(cx) { ~[] } else { ~[std] }; let view_items = if is_std(cx) { ~[] } else { ~[std] };
@ -225,17 +236,18 @@ fn mk_test_module(cx: test_ctxt) -> @ast::item {
// This attribute tells resolve to let us call unexported functions // This attribute tells resolve to let us call unexported functions
let resolve_unexported_attr = let resolve_unexported_attr =
attr::mk_attr(attr::mk_word_item(~"!resolve_unexported")); attr::mk_attr(attr::mk_word_item(~"!resolve_unexported"));
let sess = cx.sess;
let item = ast::item { let item = ast::item {
ident: cx.sess.ident_of(~"__test"), ident: sess.ident_of(~"__test"),
attrs: ~[resolve_unexported_attr], attrs: ~[resolve_unexported_attr],
id: cx.sess.next_node_id(), id: sess.next_node_id(),
node: item_, node: item_,
vis: ast::public, vis: ast::public,
span: dummy_sp(), span: dummy_sp(),
}; };
debug!("Synthetic test module:\n%s\n", debug!("Synthetic test module:\n%s\n",
pprust::item_to_str(@copy item, cx.sess.intr())); pprust::item_to_str(@copy item, sess.intr()));
return @item; return @item;
} }
@ -260,14 +272,15 @@ fn path_node_global(+ids: ~[ast::ident]) -> @ast::path {
types: ~[] } types: ~[] }
} }
fn mk_std(cx: test_ctxt) -> @ast::view_item { fn mk_std(cx: @mut TestCtxt) -> @ast::view_item {
let vers = ast::lit_str(@~"0.6"); let vers = ast::lit_str(@~"0.6");
let vers = nospan(vers); let vers = nospan(vers);
let mi = ast::meta_name_value(~"vers", vers); let mi = ast::meta_name_value(~"vers", vers);
let mi = nospan(mi); let mi = nospan(mi);
let vi = ast::view_item_use(cx.sess.ident_of(~"std"), let sess = cx.sess;
let vi = ast::view_item_use(sess.ident_of(~"std"),
~[@mi], ~[@mi],
cx.sess.next_node_id()); sess.next_node_id());
let vi = ast::view_item { let vi = ast::view_item {
node: vi, node: vi,
attrs: ~[], attrs: ~[],
@ -278,7 +291,7 @@ fn mk_std(cx: test_ctxt) -> @ast::view_item {
return @vi; return @vi;
} }
fn mk_tests(cx: test_ctxt) -> @ast::item { fn mk_tests(cx: @mut TestCtxt) -> @ast::item {
let ret_ty = mk_test_desc_and_fn_vec_ty(cx); let ret_ty = mk_test_desc_and_fn_vec_ty(cx);
let decl = ast::fn_decl { let decl = ast::fn_decl {
@ -290,15 +303,17 @@ fn mk_tests(cx: test_ctxt) -> @ast::item {
// The vector of test_descs for this crate // The vector of test_descs for this crate
let test_descs = mk_test_desc_and_fn_vec(cx); let test_descs = mk_test_desc_and_fn_vec(cx);
let body_: ast::blk_ = let sess = cx.sess;
default_block(~[], option::Some(test_descs), cx.sess.next_node_id()); let body_: ast::blk_ = default_block(~[],
option::Some(test_descs),
sess.next_node_id());
let body = nospan(body_); let body = nospan(body_);
let item_ = ast::item_fn(decl, ast::impure_fn, ~[], body); let item_ = ast::item_fn(decl, ast::impure_fn, ~[], body);
let item = ast::item { let item = ast::item {
ident: cx.sess.ident_of(~"tests"), ident: sess.ident_of(~"tests"),
attrs: ~[], attrs: ~[],
id: cx.sess.next_node_id(), id: sess.next_node_id(),
node: item_, node: item_,
vis: ast::public, vis: ast::public,
span: dummy_sp(), span: dummy_sp(),
@ -306,7 +321,7 @@ fn mk_tests(cx: test_ctxt) -> @ast::item {
return @item; return @item;
} }
fn is_std(cx: test_ctxt) -> bool { fn is_std(cx: @mut TestCtxt) -> bool {
let is_std = { let is_std = {
let items = attr::find_linkage_metas(cx.crate.node.attrs); let items = attr::find_linkage_metas(cx.crate.node.attrs);
match attr::last_meta_item_value_str_by_name(items, ~"name") { match attr::last_meta_item_value_str_by_name(items, ~"name") {
@ -317,28 +332,28 @@ fn is_std(cx: test_ctxt) -> bool {
return is_std; return is_std;
} }
fn mk_path(cx: test_ctxt, +path: ~[ast::ident]) -> @ast::path { fn mk_path(cx: @mut TestCtxt, +path: ~[ast::ident]) -> @ast::path {
// For tests that are inside of std we don't want to prefix // For tests that are inside of std we don't want to prefix
// the paths with std:: // the paths with std::
if is_std(cx) { path_node_global(path) } let sess = cx.sess;
else { if is_std(cx) {
path_node( path_node_global(path)
~[cx.sess.ident_of(~"self"), } else {
cx.sess.ident_of(~"std")] path_node(~[ sess.ident_of(~"self"), sess.ident_of(~"std") ] + path)
+ path)
} }
} }
// The ast::Ty of ~[std::test::TestDescAndFn] // The ast::Ty of ~[std::test::test_desc]
fn mk_test_desc_and_fn_vec_ty(cx: test_ctxt) -> @ast::Ty { fn mk_test_desc_and_fn_vec_ty(cx: @mut TestCtxt) -> @ast::Ty {
let test_desc_and_fn_ty_path = let sess = cx.sess;
mk_path(cx, ~[cx.sess.ident_of(~"test"), let test_desc_and_fn_ty_path = mk_path(cx, ~[
cx.sess.ident_of(~"TestDescAndFn")]); sess.ident_of(~"test"),
sess.ident_of(~"TestDescAndFn")
]);
let test_desc_and_fn_ty = ast::Ty { let test_desc_and_fn_ty = ast::Ty {
id: cx.sess.next_node_id(), id: sess.next_node_id(),
node: ast::ty_path(test_desc_and_fn_ty_path, node: ast::ty_path(test_desc_and_fn_ty_path, sess.next_node_id()),
cx.sess.next_node_id()),
span: dummy_sp(), span: dummy_sp(),
}; };
@ -346,104 +361,109 @@ fn mk_test_desc_and_fn_vec_ty(cx: test_ctxt) -> @ast::Ty {
mutbl: ast::m_imm}; mutbl: ast::m_imm};
let inner_ty = @ast::Ty { let inner_ty = @ast::Ty {
id: cx.sess.next_node_id(), id: sess.next_node_id(),
node: ast::ty_vec(vec_mt), node: ast::ty_vec(vec_mt),
span: dummy_sp(), span: dummy_sp(),
}; };
@ast::Ty { @ast::Ty {
id: cx.sess.next_node_id(), id: sess.next_node_id(),
node: ast::ty_uniq(ast::mt { ty: inner_ty, mutbl: ast::m_imm }), node: ast::ty_uniq(ast::mt { ty: inner_ty, mutbl: ast::m_imm }),
span: dummy_sp(), span: dummy_sp(),
} }
} }
fn mk_test_desc_and_fn_vec(cx: test_ctxt) -> @ast::expr { fn mk_test_desc_and_fn_vec(cx: @mut TestCtxt) -> @ast::expr {
debug!("building test vector from %u tests", cx.testfns.len()); debug!("building test vector from %u tests", cx.testfns.len());
let mut descs = ~[]; let mut descs = ~[];
for cx.testfns.each |test| { {
let testfns = &mut cx.testfns;
for testfns.each |test| {
descs.push(mk_test_desc_and_fn_rec(cx, *test)); descs.push(mk_test_desc_and_fn_rec(cx, *test));
} }
}
let sess = cx.sess;
let inner_expr = @ast::expr { let inner_expr = @ast::expr {
id: cx.sess.next_node_id(), id: sess.next_node_id(),
callee_id: cx.sess.next_node_id(), callee_id: sess.next_node_id(),
node: ast::expr_vec(descs, ast::m_imm), node: ast::expr_vec(descs, ast::m_imm),
span: dummy_sp(), span: dummy_sp(),
}; };
@ast::expr { @ast::expr {
id: cx.sess.next_node_id(), id: sess.next_node_id(),
callee_id: cx.sess.next_node_id(), callee_id: sess.next_node_id(),
node: ast::expr_vstore(inner_expr, ast::expr_vstore_uniq), node: ast::expr_vstore(inner_expr, ast::expr_vstore_uniq),
span: dummy_sp(), span: dummy_sp(),
} }
} }
fn mk_test_desc_and_fn_rec(cx: test_ctxt, test: test) -> @ast::expr { fn mk_test_desc_and_fn_rec(cx: @mut TestCtxt, test: test) -> @ast::expr {
let span = test.span; let span = test.span;
let path = /*bad*/copy test.path; let path = /*bad*/copy test.path;
debug!("encoding %s", ast_util::path_name_i(path, let sess = cx.sess;
cx.sess.parse_sess.interner)); debug!("encoding %s",
ast_util::path_name_i(path, sess.parse_sess.interner));
let name_lit: ast::lit = let name_lit: ast::lit =
nospan(ast::lit_str(@ast_util::path_name_i( nospan(ast::lit_str(@ast_util::path_name_i(
path, cx.sess.parse_sess.interner))); path,
sess.parse_sess.interner)));
let name_expr_inner = @ast::expr { let name_expr_inner = @ast::expr {
id: cx.sess.next_node_id(), id: sess.next_node_id(),
callee_id: cx.sess.next_node_id(), callee_id: sess.next_node_id(),
node: ast::expr_lit(@name_lit), node: ast::expr_lit(@name_lit),
span: span, span: span,
}; };
let name_expr = ast::expr { let name_expr = ast::expr {
id: cx.sess.next_node_id(), id: sess.next_node_id(),
callee_id: cx.sess.next_node_id(), callee_id: sess.next_node_id(),
node: ast::expr_vstore(name_expr_inner, ast::expr_vstore_uniq), node: ast::expr_vstore(name_expr_inner, ast::expr_vstore_uniq),
span: dummy_sp(), span: dummy_sp(),
}; };
let name_field = nospan(ast::field_ { let name_field = nospan(ast::field_ {
mutbl: ast::m_imm, mutbl: ast::m_imm,
ident: cx.sess.ident_of(~"name"), ident: sess.ident_of(~"name"),
expr: @name_expr, expr: @name_expr,
}); });
let ignore_lit: ast::lit = nospan(ast::lit_bool(test.ignore)); let ignore_lit: ast::lit = nospan(ast::lit_bool(test.ignore));
let ignore_expr = ast::expr { let ignore_expr = ast::expr {
id: cx.sess.next_node_id(), id: sess.next_node_id(),
callee_id: cx.sess.next_node_id(), callee_id: sess.next_node_id(),
node: ast::expr_lit(@ignore_lit), node: ast::expr_lit(@ignore_lit),
span: span, span: span,
}; };
let ignore_field = nospan(ast::field_ { let ignore_field = nospan(ast::field_ {
mutbl: ast::m_imm, mutbl: ast::m_imm,
ident: cx.sess.ident_of(~"ignore"), ident: sess.ident_of(~"ignore"),
expr: @ignore_expr, expr: @ignore_expr,
}); });
let fail_lit: ast::lit = nospan(ast::lit_bool(test.should_fail)); let fail_lit: ast::lit = nospan(ast::lit_bool(test.should_fail));
let fail_expr = ast::expr { let fail_expr = ast::expr {
id: cx.sess.next_node_id(), id: sess.next_node_id(),
callee_id: cx.sess.next_node_id(), callee_id: sess.next_node_id(),
node: ast::expr_lit(@fail_lit), node: ast::expr_lit(@fail_lit),
span: span, span: span,
}; };
let fail_field = nospan(ast::field_ { let fail_field = nospan(ast::field_ {
mutbl: ast::m_imm, mutbl: ast::m_imm,
ident: cx.sess.ident_of(~"should_fail"), ident: sess.ident_of(~"should_fail"),
expr: @fail_expr, expr: @fail_expr,
}); });
let test_desc_path = let test_desc_path =
mk_path(cx, ~[cx.sess.ident_of(~"test"), mk_path(cx, ~[ sess.ident_of(~"test"), sess.ident_of(~"TestDesc") ]);
cx.sess.ident_of(~"TestDesc")]);
let desc_rec_ = ast::expr_struct( let desc_rec_ = ast::expr_struct(
test_desc_path, test_desc_path,
@ -452,40 +472,40 @@ fn mk_test_desc_and_fn_rec(cx: test_ctxt, test: test) -> @ast::expr {
); );
let desc_rec = @ast::expr { let desc_rec = @ast::expr {
id: cx.sess.next_node_id(), id: sess.next_node_id(),
callee_id: cx.sess.next_node_id(), callee_id: sess.next_node_id(),
node: desc_rec_, node: desc_rec_,
span: span, span: span,
}; };
let desc_field = nospan(ast::field_ { let desc_field = nospan(ast::field_ {
mutbl: ast::m_imm, mutbl: ast::m_imm,
ident: cx.sess.ident_of(~"desc"), ident: sess.ident_of(~"desc"),
expr: desc_rec expr: desc_rec
}); });
let fn_path = path_node_global(path); let fn_path = path_node_global(path);
let fn_expr = @ast::expr { let fn_expr = @ast::expr {
id: cx.sess.next_node_id(), id: sess.next_node_id(),
callee_id: cx.sess.next_node_id(), callee_id: sess.next_node_id(),
node: ast::expr_path(fn_path), node: ast::expr_path(fn_path),
span: span, span: span,
}; };
let fn_field = nospan(ast::field_ { let fn_field = nospan(ast::field_ {
mutbl: ast::m_imm, mutbl: ast::m_imm,
ident: cx.sess.ident_of(~"testfn"), ident: sess.ident_of(~"testfn"),
expr: fn_expr, expr: fn_expr,
}); });
let test_desc_and_fn_path = let test_desc_and_fn_path =
mk_path(cx, ~[cx.sess.ident_of(~"test"), mk_path(cx, ~[sess.ident_of(~"test"),
cx.sess.ident_of(~"TestDescAndFn")]); sess.ident_of(~"TestDescAndFn")]);
let desc_and_fn_rec = @ast::expr { let desc_and_fn_rec = @ast::expr {
id: cx.sess.next_node_id(), id: sess.next_node_id(),
callee_id: cx.sess.next_node_id(), callee_id: sess.next_node_id(),
node: ast::expr_struct(test_desc_and_fn_path, node: ast::expr_struct(test_desc_and_fn_path,
~[fn_field, desc_field], ~[fn_field, desc_field],
option::None), option::None),
@ -495,9 +515,10 @@ fn mk_test_desc_and_fn_rec(cx: test_ctxt, test: test) -> @ast::expr {
return desc_and_fn_rec; return desc_and_fn_rec;
} }
fn mk_main(cx: test_ctxt) -> @ast::item { fn mk_main(cx: @mut TestCtxt) -> @ast::item {
let sess = cx.sess;
let ret_ty = ast::Ty { let ret_ty = ast::Ty {
id: cx.sess.next_node_id(), id: sess.next_node_id(),
node: ast::ty_nil, node: ast::ty_nil,
span: dummy_sp(), span: dummy_sp(),
}; };
@ -511,15 +532,16 @@ fn mk_main(cx: test_ctxt) -> @ast::item {
let test_main_call_expr = mk_test_main_call(cx); let test_main_call_expr = mk_test_main_call(cx);
let body_: ast::blk_ = let body_: ast::blk_ =
default_block(~[], option::Some(test_main_call_expr), default_block(~[],
cx.sess.next_node_id()); option::Some(test_main_call_expr),
sess.next_node_id());
let body = codemap::spanned { node: body_, span: dummy_sp() }; let body = codemap::spanned { node: body_, span: dummy_sp() };
let item_ = ast::item_fn(decl, ast::impure_fn, ~[], body); let item_ = ast::item_fn(decl, ast::impure_fn, ~[], body);
let item = ast::item { let item = ast::item {
ident: cx.sess.ident_of(~"main"), ident: sess.ident_of(~"main"),
attrs: ~[attr::mk_attr(attr::mk_word_item(~"main"))], attrs: ~[attr::mk_attr(attr::mk_word_item(~"main"))],
id: cx.sess.next_node_id(), id: sess.next_node_id(),
node: item_, node: item_,
vis: ast::public, vis: ast::public,
span: dummy_sp(), span: dummy_sp(),
@ -527,64 +549,64 @@ fn mk_main(cx: test_ctxt) -> @ast::item {
return @item; return @item;
} }
fn mk_test_main_call(cx: test_ctxt) -> @ast::expr { fn mk_test_main_call(cx: @mut TestCtxt) -> @ast::expr {
// Call os::args to generate the vector of test_descs // Call os::args to generate the vector of test_descs
let sess = cx.sess;
let args_path = path_node_global(~[ let args_path = path_node_global(~[
cx.sess.ident_of(~"os"), sess.ident_of(~"os"),
cx.sess.ident_of(~"args") sess.ident_of(~"args")
]); ]);
let args_path_expr = ast::expr { let args_path_expr = ast::expr {
id: cx.sess.next_node_id(), id: sess.next_node_id(),
callee_id: cx.sess.next_node_id(), callee_id: sess.next_node_id(),
node: ast::expr_path(args_path), node: ast::expr_path(args_path),
span: dummy_sp(), span: dummy_sp(),
}; };
let args_call_expr = ast::expr { let args_call_expr = ast::expr {
id: cx.sess.next_node_id(), id: sess.next_node_id(),
callee_id: cx.sess.next_node_id(), callee_id: sess.next_node_id(),
node: ast::expr_call(@args_path_expr, ~[], ast::NoSugar), node: ast::expr_call(@args_path_expr, ~[], ast::NoSugar),
span: dummy_sp(), span: dummy_sp(),
}; };
// Call __test::test to generate the vector of test_descs // Call __test::test to generate the vector of test_descs
let test_path = path_node(~[cx.sess.ident_of(~"tests")]); let test_path = path_node(~[ sess.ident_of(~"tests") ]);
let test_path_expr = ast::expr { let test_path_expr = ast::expr {
id: cx.sess.next_node_id(), id: sess.next_node_id(),
callee_id: cx.sess.next_node_id(), callee_id: sess.next_node_id(),
node: ast::expr_path(test_path), node: ast::expr_path(test_path),
span: dummy_sp(), span: dummy_sp(),
}; };
let test_call_expr = ast::expr { let test_call_expr = ast::expr {
id: cx.sess.next_node_id(), id: sess.next_node_id(),
callee_id: cx.sess.next_node_id(), callee_id: sess.next_node_id(),
node: ast::expr_call(@test_path_expr, ~[], ast::NoSugar), node: ast::expr_call(@test_path_expr, ~[], ast::NoSugar),
span: dummy_sp(), span: dummy_sp(),
}; };
// Call std::test::test_main // Call std::test::test_main
let test_main_path = let test_main_path = mk_path(cx, ~[
mk_path(cx, ~[cx.sess.ident_of(~"test"), sess.ident_of(~"test"),
cx.sess.ident_of(~"test_main")]); sess.ident_of(~"test_main")
]);
let test_main_path_expr = ast::expr { let test_main_path_expr = ast::expr {
id: cx.sess.next_node_id(), id: sess.next_node_id(),
callee_id: cx.sess.next_node_id(), callee_id: sess.next_node_id(),
node: ast::expr_path(test_main_path), node: ast::expr_path(test_main_path),
span: dummy_sp(), span: dummy_sp(),
}; };
let test_main_call_expr = ast::expr { let test_main_call_expr = ast::expr {
id: cx.sess.next_node_id(), id: sess.next_node_id(),
callee_id: cx.sess.next_node_id(), callee_id: sess.next_node_id(),
node: ast::expr_call( node: ast::expr_call(@test_main_path_expr,
@test_main_path_expr,
~[@args_call_expr, @test_call_expr], ~[@args_call_expr, @test_call_expr],
ast::NoSugar ast::NoSugar),
),
span: dummy_sp(), span: dummy_sp(),
}; };

View file

@ -19,7 +19,6 @@ use metadata::decoder;
use metadata::filesearch::FileSearch; use metadata::filesearch::FileSearch;
use metadata::loader; use metadata::loader;
use core::dvec::DVec;
use core::either; use core::either;
use core::option; use core::option;
use core::vec; use core::vec;
@ -36,19 +35,21 @@ use std::oldmap::HashMap;
// libraries necessary for later resolving, typechecking, linking, etc. // libraries necessary for later resolving, typechecking, linking, etc.
pub fn read_crates(diag: span_handler, pub fn read_crates(diag: span_handler,
crate: ast::crate, crate: ast::crate,
cstore: cstore::CStore, cstore: @mut cstore::CStore,
filesearch: FileSearch, filesearch: FileSearch,
os: loader::os, os: loader::os,
static: bool, statik: bool,
intr: @ident_interner) { intr: @ident_interner) {
let e = @{diag: diag, let e = @mut Env {
diag: diag,
filesearch: filesearch, filesearch: filesearch,
cstore: cstore, cstore: cstore,
os: os, os: os,
static: static, statik: statik,
crate_cache: DVec(), crate_cache: @mut ~[],
mut next_crate_num: 1, next_crate_num: 1,
intr: intr}; intr: intr
};
let v = let v =
visit::mk_simple_visitor(@visit::SimpleVisitor { visit::mk_simple_visitor(@visit::SimpleVisitor {
visit_view_item: |a| visit_view_item(e, a), visit_view_item: |a| visit_view_item(e, a),
@ -56,7 +57,7 @@ pub fn read_crates(diag: span_handler,
.. *visit::default_simple_visitor()}); .. *visit::default_simple_visitor()});
visit::visit_crate(crate, (), v); visit::visit_crate(crate, (), v);
dump_crates(e.crate_cache); dump_crates(e.crate_cache);
warn_if_multiple_versions(e, diag, e.crate_cache.get()); warn_if_multiple_versions(e, diag, e.crate_cache);
} }
type cache_entry = { type cache_entry = {
@ -66,7 +67,7 @@ type cache_entry = {
metas: @~[@ast::meta_item] metas: @~[@ast::meta_item]
}; };
fn dump_crates(crate_cache: DVec<cache_entry>) { fn dump_crates(+crate_cache: @mut ~[cache_entry]) {
debug!("resolved crates:"); debug!("resolved crates:");
for crate_cache.each |entry| { for crate_cache.each |entry| {
debug!("cnum: %?", entry.cnum); debug!("cnum: %?", entry.cnum);
@ -75,8 +76,9 @@ fn dump_crates(crate_cache: DVec<cache_entry>) {
} }
} }
fn warn_if_multiple_versions(e: env, diag: span_handler, fn warn_if_multiple_versions(e: @mut Env,
crate_cache: ~[cache_entry]) { diag: span_handler,
crate_cache: @mut ~[cache_entry]) {
use either::*; use either::*;
if crate_cache.len() != 0u { if crate_cache.len() != 0u {
@ -108,20 +110,22 @@ fn warn_if_multiple_versions(e: env, diag: span_handler,
} }
} }
warn_if_multiple_versions(e, diag, non_matches); warn_if_multiple_versions(e, diag, @mut non_matches);
} }
} }
type env = @{diag: span_handler, struct Env {
diag: span_handler,
filesearch: FileSearch, filesearch: FileSearch,
cstore: cstore::CStore, cstore: @mut cstore::CStore,
os: loader::os, os: loader::os,
static: bool, statik: bool,
crate_cache: DVec<cache_entry>, crate_cache: @mut ~[cache_entry],
mut next_crate_num: ast::crate_num, next_crate_num: ast::crate_num,
intr: @ident_interner}; intr: @ident_interner
}
fn visit_view_item(e: env, i: @ast::view_item) { fn visit_view_item(e: @mut Env, i: @ast::view_item) {
match /*bad*/copy i.node { match /*bad*/copy i.node {
ast::view_item_use(ident, meta_items, id) => { ast::view_item_use(ident, meta_items, id) => {
debug!("resolving use stmt. ident: %?, meta: %?", ident, meta_items); debug!("resolving use stmt. ident: %?, meta: %?", ident, meta_items);
@ -132,7 +136,7 @@ fn visit_view_item(e: env, i: @ast::view_item) {
} }
} }
fn visit_item(e: env, i: @ast::item) { fn visit_item(e: @mut Env, i: @ast::item) {
match /*bad*/copy i.node { match /*bad*/copy i.node {
ast::item_foreign_mod(fm) => { ast::item_foreign_mod(fm) => {
match attr::foreign_abi(i.attrs) { match attr::foreign_abi(i.attrs) {
@ -202,9 +206,8 @@ fn metas_with_ident(+ident: ~str, +metas: ~[@ast::meta_item])
metas_with(ident, ~"name", metas) metas_with(ident, ~"name", metas)
} }
fn existing_match(e: env, metas: ~[@ast::meta_item], hash: ~str) -> fn existing_match(e: @mut Env, metas: ~[@ast::meta_item], hash: ~str)
Option<int> { -> Option<int> {
for e.crate_cache.each |c| { for e.crate_cache.each |c| {
if loader::metadata_matches(*c.metas, metas) if loader::metadata_matches(*c.metas, metas)
&& (hash.is_empty() || c.hash == hash) { && (hash.is_empty() || c.hash == hash) {
@ -214,8 +217,12 @@ fn existing_match(e: env, metas: ~[@ast::meta_item], hash: ~str) ->
return None; return None;
} }
fn resolve_crate(e: env, ident: ast::ident, +metas: ~[@ast::meta_item], fn resolve_crate(e: @mut Env,
+hash: ~str, span: span) -> ast::crate_num { ident: ast::ident,
+metas: ~[@ast::meta_item],
+hash: ~str,
span: span)
-> ast::crate_num {
let metas = metas_with_ident(/*bad*/copy *e.intr.get(ident), metas); let metas = metas_with_ident(/*bad*/copy *e.intr.get(ident), metas);
match existing_match(e, metas, hash) { match existing_match(e, metas, hash) {
@ -228,7 +235,7 @@ fn resolve_crate(e: env, ident: ast::ident, +metas: ~[@ast::meta_item],
metas: metas, metas: metas,
hash: hash, hash: hash,
os: e.os, os: e.os,
static: e.static, static: e.statik,
intr: e.intr intr: e.intr
}; };
let cinfo = loader::load_library_crate(load_ctxt); let cinfo = loader::load_library_crate(load_ctxt);
@ -270,7 +277,7 @@ fn resolve_crate(e: env, ident: ast::ident, +metas: ~[@ast::meta_item],
} }
// Go through the crate metadata and load any crates that it references // Go through the crate metadata and load any crates that it references
fn resolve_crate_deps(e: env, cdata: @~[u8]) -> cstore::cnum_map { fn resolve_crate_deps(e: @mut Env, cdata: @~[u8]) -> cstore::cnum_map {
debug!("resolving deps of external crate"); debug!("resolving deps of external crate");
// The map from crate numbers in the crate we're resolving to local crate // The map from crate numbers in the crate we're resolving to local crate
// numbers // numbers

View file

@ -42,19 +42,19 @@ pub struct StaticMethodInfo {
purity: ast::purity purity: ast::purity
} }
pub fn get_symbol(cstore: cstore::CStore, def: ast::def_id) -> ~str { pub fn get_symbol(cstore: @mut cstore::CStore, def: ast::def_id) -> ~str {
let cdata = cstore::get_crate_data(cstore, def.crate).data; let cdata = cstore::get_crate_data(cstore, def.crate).data;
return decoder::get_symbol(cdata, def.node); return decoder::get_symbol(cdata, def.node);
} }
pub fn get_type_param_count(cstore: cstore::CStore, def: ast::def_id) pub fn get_type_param_count(cstore: @mut cstore::CStore, def: ast::def_id)
-> uint { -> uint {
let cdata = cstore::get_crate_data(cstore, def.crate).data; let cdata = cstore::get_crate_data(cstore, def.crate).data;
return decoder::get_type_param_count(cdata, def.node); return decoder::get_type_param_count(cdata, def.node);
} }
/// Iterates over all the language items in the given crate. /// Iterates over all the language items in the given crate.
pub fn each_lang_item(cstore: cstore::CStore, pub fn each_lang_item(cstore: @mut cstore::CStore,
cnum: ast::crate_num, cnum: ast::crate_num,
f: &fn(ast::node_id, uint) -> bool) { f: &fn(ast::node_id, uint) -> bool) {
let crate_data = cstore::get_crate_data(cstore, cnum); let crate_data = cstore::get_crate_data(cstore, cnum);
@ -62,8 +62,9 @@ pub fn each_lang_item(cstore: cstore::CStore,
} }
/// Iterates over all the paths in the given crate. /// Iterates over all the paths in the given crate.
pub fn each_path(cstore: cstore::CStore, cnum: ast::crate_num, pub fn each_path(cstore: @mut cstore::CStore,
f: fn(&str, decoder::def_like) -> bool) { cnum: ast::crate_num,
f: &fn(&str, decoder::def_like) -> bool) {
let crate_data = cstore::get_crate_data(cstore, cnum); let crate_data = cstore::get_crate_data(cstore, cnum);
let get_crate_data: decoder::GetCrateDataCb = |cnum| { let get_crate_data: decoder::GetCrateDataCb = |cnum| {
cstore::get_crate_data(cstore, cnum) cstore::get_crate_data(cstore, cnum)
@ -107,7 +108,7 @@ pub fn get_enum_variants(tcx: ty::ctxt, def: ast::def_id)
return decoder::get_enum_variants(cstore.intr, cdata, def.node, tcx) return decoder::get_enum_variants(cstore.intr, cdata, def.node, tcx)
} }
pub fn get_impls_for_mod(cstore: cstore::CStore, def: ast::def_id, pub fn get_impls_for_mod(cstore: @mut cstore::CStore, def: ast::def_id,
name: Option<ast::ident>) name: Option<ast::ident>)
-> @~[@decoder::_impl] { -> @~[@decoder::_impl] {
let cdata = cstore::get_crate_data(cstore, def.crate); let cdata = cstore::get_crate_data(cstore, def.crate);
@ -138,27 +139,27 @@ pub fn get_supertraits(tcx: ty::ctxt, def: ast::def_id) -> ~[ty::t] {
decoder::get_supertraits(cdata, def.node, tcx) decoder::get_supertraits(cdata, def.node, tcx)
} }
pub fn get_method_names_if_trait(cstore: cstore::CStore, def: ast::def_id) pub fn get_method_names_if_trait(cstore: @mut cstore::CStore,
def: ast::def_id)
-> Option<@DVec<(ast::ident, ast::self_ty_)>> { -> Option<@DVec<(ast::ident, ast::self_ty_)>> {
let cdata = cstore::get_crate_data(cstore, def.crate); let cdata = cstore::get_crate_data(cstore, def.crate);
return decoder::get_method_names_if_trait(cstore.intr, cdata, def.node); return decoder::get_method_names_if_trait(cstore.intr, cdata, def.node);
} }
pub fn get_type_name_if_impl(cstore: cstore::CStore, def: ast::def_id) -> pub fn get_type_name_if_impl(cstore: @mut cstore::CStore, def: ast::def_id)
Option<ast::ident> { -> Option<ast::ident> {
let cdata = cstore::get_crate_data(cstore, def.crate); let cdata = cstore::get_crate_data(cstore, def.crate);
decoder::get_type_name_if_impl(cstore.intr, cdata, def.node) decoder::get_type_name_if_impl(cstore.intr, cdata, def.node)
} }
pub fn get_static_methods_if_impl(cstore: cstore::CStore, pub fn get_static_methods_if_impl(cstore: @mut cstore::CStore,
def: ast::def_id) def: ast::def_id)
-> Option<~[StaticMethodInfo]> { -> Option<~[StaticMethodInfo]> {
let cdata = cstore::get_crate_data(cstore, def.crate); let cdata = cstore::get_crate_data(cstore, def.crate);
decoder::get_static_methods_if_impl(cstore.intr, cdata, def.node) decoder::get_static_methods_if_impl(cstore.intr, cdata, def.node)
} }
pub fn get_item_attrs(cstore: cstore::CStore, pub fn get_item_attrs(cstore: @mut cstore::CStore,
def_id: ast::def_id, def_id: ast::def_id,
f: fn(~[@ast::meta_item])) { f: fn(~[@ast::meta_item])) {
let cdata = cstore::get_crate_data(cstore, def_id.crate); let cdata = cstore::get_crate_data(cstore, def_id.crate);
@ -179,7 +180,7 @@ pub fn get_type(tcx: ty::ctxt,
decoder::get_type(cdata, def.node, tcx) decoder::get_type(cdata, def.node, tcx)
} }
pub fn get_region_param(cstore: metadata::cstore::CStore, pub fn get_region_param(cstore: @mut metadata::cstore::CStore,
def: ast::def_id) -> Option<ty::region_variance> { def: ast::def_id) -> Option<ty::region_variance> {
let cdata = cstore::get_crate_data(cstore, def.crate); let cdata = cstore::get_crate_data(cstore, def.crate);
return decoder::get_region_param(cdata, def.node); return decoder::get_region_param(cdata, def.node);
@ -216,7 +217,7 @@ pub fn get_impl_traits(tcx: ty::ctxt, def: ast::def_id) -> ~[ty::t] {
decoder::get_impl_traits(cdata, def.node, tcx) decoder::get_impl_traits(cdata, def.node, tcx)
} }
pub fn get_impl_method(cstore: cstore::CStore, pub fn get_impl_method(cstore: @mut cstore::CStore,
def: ast::def_id, def: ast::def_id,
mname: ast::ident) mname: ast::ident)
-> ast::def_id { -> ast::def_id {
@ -225,7 +226,7 @@ pub fn get_impl_method(cstore: cstore::CStore,
} }
/* If def names a class with a dtor, return it. Otherwise, return none. */ /* If def names a class with a dtor, return it. Otherwise, return none. */
pub fn struct_dtor(cstore: cstore::CStore, def: ast::def_id) pub fn struct_dtor(cstore: @mut cstore::CStore, def: ast::def_id)
-> Option<ast::def_id> { -> Option<ast::def_id> {
let cdata = cstore::get_crate_data(cstore, def.crate); let cdata = cstore::get_crate_data(cstore, def.crate);
decoder::struct_dtor(cdata, def.node) decoder::struct_dtor(cdata, def.node)

View file

@ -38,129 +38,137 @@ pub type crate_metadata = @{name: ~str,
cnum_map: cnum_map, cnum_map: cnum_map,
cnum: ast::crate_num}; cnum: ast::crate_num};
// This is a bit of an experiment at encapsulating the data in cstore. By pub struct CStore {
// keeping all the data in a non-exported enum variant, it's impossible for priv metas: oldmap::HashMap<ast::crate_num, crate_metadata>,
// other modules to access the cstore's private data. This could also be priv use_crate_map: use_crate_map,
// achieved with an obj, but at the expense of a vtable. Not sure if this is a priv used_crate_files: ~[Path],
// good pattern or not. priv used_libraries: ~[~str],
pub enum CStore { private(cstore_private), } priv used_link_args: ~[~str],
intr: @ident_interner
type cstore_private = }
@{metas: oldmap::HashMap<ast::crate_num, crate_metadata>,
use_crate_map: use_crate_map,
mut used_crate_files: ~[Path],
mut used_libraries: ~[~str],
mut used_link_args: ~[~str],
intr: @ident_interner};
// Map from node_id's of local use statements to crate numbers // Map from node_id's of local use statements to crate numbers
type use_crate_map = oldmap::HashMap<ast::node_id, ast::crate_num>; type use_crate_map = oldmap::HashMap<ast::node_id, ast::crate_num>;
// Internal method to retrieve the data from the cstore
pure fn p(cstore: CStore) -> cstore_private {
match cstore { private(p) => p }
}
pub fn mk_cstore(intr: @ident_interner) -> CStore { pub fn mk_cstore(intr: @ident_interner) -> CStore {
let meta_cache = oldmap::HashMap(); let meta_cache = oldmap::HashMap();
let crate_map = oldmap::HashMap(); let crate_map = oldmap::HashMap();
return private(@{metas: meta_cache, return CStore {
metas: meta_cache,
use_crate_map: crate_map, use_crate_map: crate_map,
mut used_crate_files: ~[], used_crate_files: ~[],
mut used_libraries: ~[], used_libraries: ~[],
mut used_link_args: ~[], used_link_args: ~[],
intr: intr}); intr: intr
};
} }
pub fn get_crate_data(cstore: CStore, cnum: ast::crate_num) pub fn get_crate_data(cstore: @mut CStore, cnum: ast::crate_num)
-> crate_metadata { -> crate_metadata {
return p(cstore).metas.get(&cnum); return cstore.metas.get(&cnum);
} }
pub fn get_crate_hash(cstore: CStore, cnum: ast::crate_num) -> ~str { pub fn get_crate_hash(cstore: @mut CStore, cnum: ast::crate_num) -> ~str {
let cdata = get_crate_data(cstore, cnum); let cdata = get_crate_data(cstore, cnum);
return decoder::get_crate_hash(cdata.data); return decoder::get_crate_hash(cdata.data);
} }
pub fn get_crate_vers(cstore: CStore, cnum: ast::crate_num) -> ~str { pub fn get_crate_vers(cstore: @mut CStore, cnum: ast::crate_num) -> ~str {
let cdata = get_crate_data(cstore, cnum); let cdata = get_crate_data(cstore, cnum);
return decoder::get_crate_vers(cdata.data); return decoder::get_crate_vers(cdata.data);
} }
pub fn set_crate_data(cstore: CStore, pub fn set_crate_data(cstore: @mut CStore,
cnum: ast::crate_num, cnum: ast::crate_num,
data: crate_metadata) { data: crate_metadata) {
p(cstore).metas.insert(cnum, data); let metas = cstore.metas;
metas.insert(cnum, data);
} }
pub fn have_crate_data(cstore: CStore, cnum: ast::crate_num) -> bool { pub fn have_crate_data(cstore: @mut CStore, cnum: ast::crate_num) -> bool {
return p(cstore).metas.contains_key_ref(&cnum); return cstore.metas.contains_key_ref(&cnum);
} }
pub fn iter_crate_data(cstore: CStore, pub fn iter_crate_data(cstore: @mut CStore,
i: fn(ast::crate_num, crate_metadata)) { i: fn(ast::crate_num, crate_metadata)) {
for p(cstore).metas.each_ref |&k, &v| { i(k, v);}; let metas = cstore.metas;
} for metas.each_ref |&k, &v| {
i(k, v);
pub fn add_used_crate_file(cstore: CStore, lib: &Path) {
if !vec::contains(p(cstore).used_crate_files, lib) {
p(cstore).used_crate_files.push(copy *lib);
} }
} }
pub fn get_used_crate_files(cstore: CStore) -> ~[Path] { pub fn add_used_crate_file(cstore: @mut CStore, lib: &Path) {
return /*bad*/copy p(cstore).used_crate_files; if !vec::contains(cstore.used_crate_files, lib) {
cstore.used_crate_files.push(copy *lib);
}
} }
pub fn add_used_library(cstore: CStore, +lib: ~str) -> bool { pub fn get_used_crate_files(cstore: @mut CStore) -> ~[Path] {
return /*bad*/copy cstore.used_crate_files;
}
pub fn add_used_library(cstore: @mut CStore, +lib: ~str) -> bool {
assert lib != ~""; assert lib != ~"";
if vec::contains(p(cstore).used_libraries, &lib) { return false; } if vec::contains(cstore.used_libraries, &lib) { return false; }
p(cstore).used_libraries.push(lib); cstore.used_libraries.push(lib);
return true; return true;
} }
pub fn get_used_libraries(cstore: CStore) -> ~[~str] { pub fn get_used_libraries(cstore: @mut CStore) -> ~[~str] {
return /*bad*/copy p(cstore).used_libraries; return /*bad*/copy cstore.used_libraries;
} }
pub fn add_used_link_args(cstore: CStore, args: ~str) { pub fn add_used_link_args(cstore: @mut CStore, args: ~str) {
p(cstore).used_link_args.push_all(str::split_char(args, ' ')); cstore.used_link_args.push_all(str::split_char(args, ' '));
} }
pub fn get_used_link_args(cstore: CStore) -> ~[~str] { pub fn get_used_link_args(cstore: @mut CStore) -> ~[~str] {
return /*bad*/copy p(cstore).used_link_args; return /*bad*/copy cstore.used_link_args;
} }
pub fn add_use_stmt_cnum(cstore: CStore, use_id: ast::node_id, pub fn add_use_stmt_cnum(cstore: @mut CStore,
use_id: ast::node_id,
cnum: ast::crate_num) { cnum: ast::crate_num) {
p(cstore).use_crate_map.insert(use_id, cnum); let use_crate_map = cstore.use_crate_map;
use_crate_map.insert(use_id, cnum);
} }
pub fn find_use_stmt_cnum(cstore: CStore, pub fn find_use_stmt_cnum(cstore: @mut CStore,
use_id: ast::node_id) -> Option<ast::crate_num> { use_id: ast::node_id)
p(cstore).use_crate_map.find(&use_id) -> Option<ast::crate_num> {
let use_crate_map = cstore.use_crate_map;
use_crate_map.find(&use_id)
} }
// returns hashes of crates directly used by this crate. Hashes are // returns hashes of crates directly used by this crate. Hashes are
// sorted by crate name. // sorted by crate name.
pub fn get_dep_hashes(cstore: CStore) -> ~[~str] { pub fn get_dep_hashes(cstore: @mut CStore) -> ~[~str] {
type crate_hash = {name: ~str, hash: ~str}; type crate_hash = {name: ~str, hash: ~str};
let mut result = ~[]; let mut result = ~[];
for p(cstore).use_crate_map.each_value_ref |&cnum| { let use_crate_map = cstore.use_crate_map;
for use_crate_map.each_value_ref |&cnum| {
let cdata = cstore::get_crate_data(cstore, cnum); let cdata = cstore::get_crate_data(cstore, cnum);
let hash = decoder::get_crate_hash(cdata.data); let hash = decoder::get_crate_hash(cdata.data);
debug!("Add hash[%s]: %s", cdata.name, hash); debug!("Add hash[%s]: %s", cdata.name, hash);
result.push({name: /*bad*/copy cdata.name, hash: hash}); result.push({name: /*bad*/copy cdata.name, hash: hash});
}; }
pure fn lteq(a: &crate_hash, b: &crate_hash) -> bool {a.name <= b.name}
pure fn lteq(a: &crate_hash, b: &crate_hash) -> bool {
a.name <= b.name
}
let sorted = std::sort::merge_sort(result, lteq); let sorted = std::sort::merge_sort(result, lteq);
debug!("sorted:"); debug!("sorted:");
for sorted.each |x| { for sorted.each |x| {
debug!(" hash[%s]: %s", x.name, x.hash); debug!(" hash[%s]: %s", x.name, x.hash);
} }
fn mapper(ch: &crate_hash) -> ~str { return /*bad*/copy ch.hash; }
fn mapper(ch: &crate_hash) -> ~str {
return /*bad*/copy ch.hash;
}
return vec::map(sorted, mapper); return vec::map(sorted, mapper);
} }

View file

@ -67,33 +67,33 @@ pub type encode_parms = {
item_symbols: HashMap<ast::node_id, ~str>, item_symbols: HashMap<ast::node_id, ~str>,
discrim_symbols: HashMap<ast::node_id, ~str>, discrim_symbols: HashMap<ast::node_id, ~str>,
link_meta: link_meta, link_meta: link_meta,
cstore: cstore::CStore, cstore: @mut cstore::CStore,
encode_inlined_item: encode_inlined_item encode_inlined_item: encode_inlined_item
}; };
type stats = { struct Stats {
mut inline_bytes: uint, inline_bytes: uint,
mut attr_bytes: uint, attr_bytes: uint,
mut dep_bytes: uint, dep_bytes: uint,
mut lang_item_bytes: uint, lang_item_bytes: uint,
mut item_bytes: uint, item_bytes: uint,
mut index_bytes: uint, index_bytes: uint,
mut zero_bytes: uint, zero_bytes: uint,
mut total_bytes: uint, total_bytes: uint,
mut n_inlines: uint n_inlines: uint
}; }
pub enum encode_ctxt = { pub enum encode_ctxt = {
diag: span_handler, diag: span_handler,
tcx: ty::ctxt, tcx: ty::ctxt,
stats: stats, stats: @mut Stats,
reachable: HashMap<ast::node_id, ()>, reachable: HashMap<ast::node_id, ()>,
reexports2: middle::resolve::ExportMap2, reexports2: middle::resolve::ExportMap2,
item_symbols: HashMap<ast::node_id, ~str>, item_symbols: HashMap<ast::node_id, ~str>,
discrim_symbols: HashMap<ast::node_id, ~str>, discrim_symbols: HashMap<ast::node_id, ~str>,
link_meta: link_meta, link_meta: link_meta,
cstore: cstore::CStore, cstore: @mut cstore::CStore,
encode_inlined_item: encode_inlined_item, encode_inlined_item: encode_inlined_item,
type_abbrevs: abbrev_map type_abbrevs: abbrev_map
}; };
@ -1067,12 +1067,11 @@ fn synthesize_crate_attrs(ecx: @encode_ctxt, crate: &crate) -> ~[attribute] {
return attrs; return attrs;
} }
fn encode_crate_deps(ecx: @encode_ctxt, ebml_w: writer::Encoder, fn encode_crate_deps(ecx: @encode_ctxt,
cstore: cstore::CStore) { ebml_w: writer::Encoder,
cstore: @mut cstore::CStore) {
fn get_ordered_deps(ecx: @encode_ctxt, cstore: cstore::CStore) fn get_ordered_deps(ecx: @encode_ctxt, cstore: @mut cstore::CStore)
-> ~[decoder::crate_dep] { -> ~[decoder::crate_dep] {
type hashkv = @{key: crate_num, val: cstore::crate_metadata}; type hashkv = @{key: crate_num, val: cstore::crate_metadata};
type numdep = decoder::crate_dep; type numdep = decoder::crate_dep;
@ -1168,20 +1167,21 @@ pub const metadata_encoding_version : &[u8] = &[0x72, //'r' as u8,
pub fn encode_metadata(parms: encode_parms, crate: &crate) -> ~[u8] { pub fn encode_metadata(parms: encode_parms, crate: &crate) -> ~[u8] {
let wr = @io::BytesWriter(); let wr = @io::BytesWriter();
let stats = let mut stats = Stats {
{mut inline_bytes: 0, inline_bytes: 0,
mut attr_bytes: 0, attr_bytes: 0,
mut dep_bytes: 0, dep_bytes: 0,
mut lang_item_bytes: 0, lang_item_bytes: 0,
mut item_bytes: 0, item_bytes: 0,
mut index_bytes: 0, index_bytes: 0,
mut zero_bytes: 0, zero_bytes: 0,
mut total_bytes: 0, total_bytes: 0,
mut n_inlines: 0}; n_inlines: 0
};
let ecx: @encode_ctxt = @encode_ctxt({ let ecx: @encode_ctxt = @encode_ctxt({
diag: parms.diag, diag: parms.diag,
tcx: parms.tcx, tcx: parms.tcx,
stats: move stats, stats: @mut move stats,
reachable: parms.reachable, reachable: parms.reachable,
reexports2: parms.reexports2, reexports2: parms.reexports2,
item_symbols: parms.item_symbols, item_symbols: parms.item_symbols,

View file

@ -53,30 +53,35 @@ pub enum DefIdSource {
} }
type conv_did = fn(source: DefIdSource, ast::def_id) -> ast::def_id; type conv_did = fn(source: DefIdSource, ast::def_id) -> ast::def_id;
pub type pstate = {data: @~[u8], crate: int, mut pos: uint, tcx: ty::ctxt}; pub struct PState {
data: @~[u8],
crate: int,
pos: uint,
tcx: ty::ctxt
}
fn peek(st: @pstate) -> char { fn peek(st: @mut PState) -> char {
st.data[st.pos] as char st.data[st.pos] as char
} }
fn next(st: @pstate) -> char { fn next(st: @mut PState) -> char {
let ch = st.data[st.pos] as char; let ch = st.data[st.pos] as char;
st.pos = st.pos + 1u; st.pos = st.pos + 1u;
return ch; return ch;
} }
fn next_byte(st: @pstate) -> u8 { fn next_byte(st: @mut PState) -> u8 {
let b = st.data[st.pos]; let b = st.data[st.pos];
st.pos = st.pos + 1u; st.pos = st.pos + 1u;
return b; return b;
} }
pub fn parse_ident(st: @pstate, last: char) -> ast::ident { pub fn parse_ident(st: @mut PState, last: char) -> ast::ident {
fn is_last(b: char, c: char) -> bool { return c == b; } fn is_last(b: char, c: char) -> bool { return c == b; }
return parse_ident_(st, |a| is_last(last, a) ); return parse_ident_(st, |a| is_last(last, a) );
} }
fn parse_ident_(st: @pstate, is_last: fn@(char) -> bool) -> fn parse_ident_(st: @mut PState, is_last: fn@(char) -> bool) ->
ast::ident { ast::ident {
let mut rslt = ~""; let mut rslt = ~"";
while !is_last(peek(st)) { while !is_last(peek(st)) {
@ -86,8 +91,13 @@ fn parse_ident_(st: @pstate, is_last: fn@(char) -> bool) ->
} }
pub fn parse_state_from_data(data: @~[u8], crate_num: int, pub fn parse_state_from_data(data: @~[u8], crate_num: int,
pos: uint, tcx: ty::ctxt) -> @pstate { pos: uint, tcx: ty::ctxt) -> @mut PState {
@{data: data, crate: crate_num, mut pos: pos, tcx: tcx} @mut PState {
data: data,
crate: crate_num,
pos: pos,
tcx: tcx
}
} }
pub fn parse_ty_data(data: @~[u8], crate_num: int, pos: uint, tcx: ty::ctxt, pub fn parse_ty_data(data: @~[u8], crate_num: int, pos: uint, tcx: ty::ctxt,
@ -102,7 +112,7 @@ pub fn parse_arg_data(data: @~[u8], crate_num: int, pos: uint, tcx: ty::ctxt,
parse_arg(st, conv) parse_arg(st, conv)
} }
fn parse_path(st: @pstate) -> @ast::path { fn parse_path(st: @mut PState) -> @ast::path {
let mut idents: ~[ast::ident] = ~[]; let mut idents: ~[ast::ident] = ~[];
fn is_last(c: char) -> bool { return c == '(' || c == ':'; } fn is_last(c: char) -> bool { return c == '(' || c == ':'; }
idents.push(parse_ident_(st, is_last)); idents.push(parse_ident_(st, is_last));
@ -122,7 +132,7 @@ fn parse_path(st: @pstate) -> @ast::path {
}; };
} }
fn parse_sigil(st: @pstate) -> ast::Sigil { fn parse_sigil(st: @mut PState) -> ast::Sigil {
match next(st) { match next(st) {
'@' => ast::ManagedSigil, '@' => ast::ManagedSigil,
'~' => ast::OwnedSigil, '~' => ast::OwnedSigil,
@ -131,7 +141,7 @@ fn parse_sigil(st: @pstate) -> ast::Sigil {
} }
} }
fn parse_vstore(st: @pstate) -> ty::vstore { fn parse_vstore(st: @mut PState) -> ty::vstore {
assert next(st) == '/'; assert next(st) == '/';
let c = peek(st); let c = peek(st);
@ -149,7 +159,7 @@ fn parse_vstore(st: @pstate) -> ty::vstore {
} }
} }
fn parse_substs(st: @pstate, conv: conv_did) -> ty::substs { fn parse_substs(st: @mut PState, conv: conv_did) -> ty::substs {
let self_r = parse_opt(st, || parse_region(st) ); let self_r = parse_opt(st, || parse_region(st) );
let self_ty = parse_opt(st, || parse_ty(st, conv) ); let self_ty = parse_opt(st, || parse_ty(st, conv) );
@ -166,7 +176,7 @@ fn parse_substs(st: @pstate, conv: conv_did) -> ty::substs {
}; };
} }
fn parse_bound_region(st: @pstate) -> ty::bound_region { fn parse_bound_region(st: @mut PState) -> ty::bound_region {
match next(st) { match next(st) {
's' => ty::br_self, 's' => ty::br_self,
'a' => { 'a' => {
@ -184,7 +194,7 @@ fn parse_bound_region(st: @pstate) -> ty::bound_region {
} }
} }
fn parse_region(st: @pstate) -> ty::Region { fn parse_region(st: @mut PState) -> ty::Region {
match next(st) { match next(st) {
'b' => { 'b' => {
ty::re_bound(parse_bound_region(st)) ty::re_bound(parse_bound_region(st))
@ -209,7 +219,7 @@ fn parse_region(st: @pstate) -> ty::Region {
} }
} }
fn parse_opt<T>(st: @pstate, f: fn() -> T) -> Option<T> { fn parse_opt<T>(st: @mut PState, f: fn() -> T) -> Option<T> {
match next(st) { match next(st) {
'n' => None, 'n' => None,
's' => Some(f()), 's' => Some(f()),
@ -217,7 +227,7 @@ fn parse_opt<T>(st: @pstate, f: fn() -> T) -> Option<T> {
} }
} }
fn parse_str(st: @pstate, term: char) -> ~str { fn parse_str(st: @mut PState, term: char) -> ~str {
let mut result = ~""; let mut result = ~"";
while peek(st) != term { while peek(st) != term {
result += str::from_byte(next_byte(st)); result += str::from_byte(next_byte(st));
@ -226,7 +236,7 @@ fn parse_str(st: @pstate, term: char) -> ~str {
return result; return result;
} }
fn parse_ty(st: @pstate, conv: conv_did) -> ty::t { fn parse_ty(st: @mut PState, conv: conv_did) -> ty::t {
match next(st) { match next(st) {
'n' => return ty::mk_nil(st.tcx), 'n' => return ty::mk_nil(st.tcx),
'z' => return ty::mk_bot(st.tcx), 'z' => return ty::mk_bot(st.tcx),
@ -330,7 +340,7 @@ fn parse_ty(st: @pstate, conv: conv_did) -> ty::t {
match st.tcx.rcache.find(&key) { match st.tcx.rcache.find(&key) {
Some(tt) => return tt, Some(tt) => return tt,
None => { None => {
let ps = @{pos: pos ,.. copy *st}; let ps = @mut PState {pos: pos ,.. copy *st};
let tt = parse_ty(ps, conv); let tt = parse_ty(ps, conv);
st.tcx.rcache.insert(key, tt); st.tcx.rcache.insert(key, tt);
return tt; return tt;
@ -354,7 +364,7 @@ fn parse_ty(st: @pstate, conv: conv_did) -> ty::t {
} }
} }
fn parse_mt(st: @pstate, conv: conv_did) -> ty::mt { fn parse_mt(st: @mut PState, conv: conv_did) -> ty::mt {
let mut m; let mut m;
match peek(st) { match peek(st) {
'm' => { next(st); m = ast::m_mutbl; } 'm' => { next(st); m = ast::m_mutbl; }
@ -364,7 +374,7 @@ fn parse_mt(st: @pstate, conv: conv_did) -> ty::mt {
ty::mt { ty: parse_ty(st, conv), mutbl: m } ty::mt { ty: parse_ty(st, conv), mutbl: m }
} }
fn parse_def(st: @pstate, source: DefIdSource, fn parse_def(st: @mut PState, source: DefIdSource,
conv: conv_did) -> ast::def_id { conv: conv_did) -> ast::def_id {
let mut def = ~[]; let mut def = ~[];
while peek(st) != '|' { def.push(next_byte(st)); } while peek(st) != '|' { def.push(next_byte(st)); }
@ -372,7 +382,7 @@ fn parse_def(st: @pstate, source: DefIdSource,
return conv(source, parse_def_id(def)); return conv(source, parse_def_id(def));
} }
fn parse_int(st: @pstate) -> int { fn parse_int(st: @mut PState) -> int {
let mut n = 0; let mut n = 0;
loop { loop {
let cur = peek(st); let cur = peek(st);
@ -383,7 +393,7 @@ fn parse_int(st: @pstate) -> int {
}; };
} }
fn parse_hex(st: @pstate) -> uint { fn parse_hex(st: @mut PState) -> uint {
let mut n = 0u; let mut n = 0u;
loop { loop {
let cur = peek(st); let cur = peek(st);
@ -421,11 +431,11 @@ fn parse_onceness(c: char) -> ast::Onceness {
} }
} }
fn parse_arg(st: @pstate, conv: conv_did) -> ty::arg { fn parse_arg(st: @mut PState, conv: conv_did) -> ty::arg {
ty::arg { mode: parse_mode(st), ty: parse_ty(st, conv) } ty::arg { mode: parse_mode(st), ty: parse_ty(st, conv) }
} }
fn parse_mode(st: @pstate) -> ast::mode { fn parse_mode(st: @mut PState) -> ast::mode {
let m = ast::expl(match next(st) { let m = ast::expl(match next(st) {
'+' => ast::by_copy, '+' => ast::by_copy,
'=' => ast::by_ref, '=' => ast::by_ref,
@ -435,7 +445,7 @@ fn parse_mode(st: @pstate) -> ast::mode {
return m; return m;
} }
fn parse_closure_ty(st: @pstate, conv: conv_did) -> ty::ClosureTy { fn parse_closure_ty(st: @mut PState, conv: conv_did) -> ty::ClosureTy {
let sigil = parse_sigil(st); let sigil = parse_sigil(st);
let purity = parse_purity(next(st)); let purity = parse_purity(next(st));
let onceness = parse_onceness(next(st)); let onceness = parse_onceness(next(st));
@ -450,7 +460,7 @@ fn parse_closure_ty(st: @pstate, conv: conv_did) -> ty::ClosureTy {
} }
} }
fn parse_bare_fn_ty(st: @pstate, conv: conv_did) -> ty::BareFnTy { fn parse_bare_fn_ty(st: @mut PState, conv: conv_did) -> ty::BareFnTy {
let purity = parse_purity(next(st)); let purity = parse_purity(next(st));
let abi = parse_abi(next(st)); let abi = parse_abi(next(st));
let sig = parse_sig(st, conv); let sig = parse_sig(st, conv);
@ -461,7 +471,7 @@ fn parse_bare_fn_ty(st: @pstate, conv: conv_did) -> ty::BareFnTy {
} }
} }
fn parse_sig(st: @pstate, conv: conv_did) -> ty::FnSig { fn parse_sig(st: @mut PState, conv: conv_did) -> ty::FnSig {
assert (next(st) == '['); assert (next(st) == '[');
let mut inputs: ~[ty::arg] = ~[]; let mut inputs: ~[ty::arg] = ~[];
while peek(st) != ']' { while peek(st) != ']' {
@ -506,7 +516,7 @@ pub fn parse_bounds_data(data: @~[u8], start: uint,
parse_bounds(st, conv) parse_bounds(st, conv)
} }
fn parse_bounds(st: @pstate, conv: conv_did) -> @~[ty::param_bound] { fn parse_bounds(st: @mut PState, conv: conv_did) -> @~[ty::param_bound] {
let mut bounds = ~[]; let mut bounds = ~[];
loop { loop {
bounds.push(match next(st) { bounds.push(match next(st) {

View file

@ -1274,13 +1274,13 @@ fn test_simplification() {
let item_in = ast::ii_item(quote_item!( let item_in = ast::ii_item(quote_item!(
fn new_int_alist<B: Copy>() -> alist<int, B> { fn new_int_alist<B: Copy>() -> alist<int, B> {
fn eq_int(&&a: int, &&b: int) -> bool { a == b } fn eq_int(&&a: int, &&b: int) -> bool { a == b }
return {eq_fn: eq_int, mut data: ~[]}; return {eq_fn: eq_int, data: ~[]};
} }
).get()); ).get());
let item_out = simplify_ast(item_in); let item_out = simplify_ast(item_in);
let item_exp = ast::ii_item(quote_item!( let item_exp = ast::ii_item(quote_item!(
fn new_int_alist<B: Copy>() -> alist<int, B> { fn new_int_alist<B: Copy>() -> alist<int, B> {
return {eq_fn: eq_int, mut data: ~[]}; return {eq_fn: eq_int, data: ~[]};
} }
).get()); ).get());
match (item_out, item_exp) { match (item_out, item_exp) {

View file

@ -21,7 +21,7 @@ use core::prelude::*;
use middle::moves; use middle::moves;
use middle::borrowck::{Loan, bckerr, BorrowckCtxt, inherent_mutability}; use middle::borrowck::{Loan, bckerr, BorrowckCtxt, inherent_mutability};
use middle::borrowck::{req_maps, root_map_key, save_and_restore}; use middle::borrowck::{req_maps, root_map_key, save_and_restore_managed};
use middle::borrowck::{MoveError, MoveOk, MoveFromIllegalCmt}; use middle::borrowck::{MoveError, MoveOk, MoveFromIllegalCmt};
use middle::borrowck::{MoveWhileBorrowed}; use middle::borrowck::{MoveWhileBorrowed};
use middle::mem_categorization::{cat_arg, cat_binding, cat_comp, cat_deref}; use middle::mem_categorization::{cat_arg, cat_binding, cat_comp, cat_deref};
@ -43,15 +43,15 @@ use syntax::codemap::span;
use syntax::print::pprust; use syntax::print::pprust;
use syntax::visit; use syntax::visit;
enum check_loan_ctxt = @{ struct CheckLoanCtxt {
bccx: @BorrowckCtxt, bccx: @BorrowckCtxt,
req_maps: req_maps, req_maps: req_maps,
reported: HashMap<ast::node_id, ()>, reported: HashMap<ast::node_id, ()>,
mut declared_purity: ast::purity, declared_purity: @mut ast::purity,
mut fn_args: @~[ast::node_id] fn_args: @mut @~[ast::node_id]
}; }
// if we are enforcing purity, why are we doing so? // if we are enforcing purity, why are we doing so?
#[deriving_eq] #[deriving_eq]
@ -68,11 +68,13 @@ enum purity_cause {
pub fn check_loans(bccx: @BorrowckCtxt, pub fn check_loans(bccx: @BorrowckCtxt,
req_maps: req_maps, req_maps: req_maps,
crate: @ast::crate) { crate: @ast::crate) {
let clcx = check_loan_ctxt(@{bccx: bccx, let clcx = @mut CheckLoanCtxt {
bccx: bccx,
req_maps: req_maps, req_maps: req_maps,
reported: HashMap(), reported: HashMap(),
mut declared_purity: ast::impure_fn, declared_purity: @mut ast::impure_fn,
mut fn_args: @~[]}); fn_args: @mut @~[]
};
let vt = visit::mk_vt(@visit::Visitor {visit_expr: check_loans_in_expr, let vt = visit::mk_vt(@visit::Visitor {visit_expr: check_loans_in_expr,
visit_local: check_loans_in_local, visit_local: check_loans_in_local,
visit_block: check_loans_in_block, visit_block: check_loans_in_block,
@ -104,11 +106,11 @@ impl assignment_type {
} }
} }
impl check_loan_ctxt { impl CheckLoanCtxt {
fn tcx() -> ty::ctxt { self.bccx.tcx } fn tcx(@mut self) -> ty::ctxt { self.bccx.tcx }
fn purity(scope_id: ast::node_id) -> Option<purity_cause> { fn purity(@mut self, scope_id: ast::node_id) -> Option<purity_cause> {
let default_purity = match self.declared_purity { let default_purity = match *self.declared_purity {
// an unsafe declaration overrides all // an unsafe declaration overrides all
ast::unsafe_fn => return None, ast::unsafe_fn => return None,
@ -138,7 +140,9 @@ impl check_loan_ctxt {
} }
} }
fn walk_loans(scope_id: ast::node_id, f: fn(v: &Loan) -> bool) { fn walk_loans(@mut self,
scope_id: ast::node_id,
f: &fn(v: &Loan) -> bool) {
let mut scope_id = scope_id; let mut scope_id = scope_id;
let region_map = self.tcx().region_map; let region_map = self.tcx().region_map;
let req_loan_map = self.req_maps.req_loan_map; let req_loan_map = self.req_maps.req_loan_map;
@ -157,9 +161,10 @@ impl check_loan_ctxt {
} }
} }
fn walk_loans_of(scope_id: ast::node_id, fn walk_loans_of(@mut self,
scope_id: ast::node_id,
lp: @loan_path, lp: @loan_path,
f: fn(v: &Loan) -> bool) { f: &fn(v: &Loan) -> bool) {
for self.walk_loans(scope_id) |loan| { for self.walk_loans(scope_id) |loan| {
if loan.lp == lp { if loan.lp == lp {
if !f(loan) { return; } if !f(loan) { return; }
@ -173,7 +178,8 @@ impl check_loan_ctxt {
// note: we take opt_expr and expr_id separately because for // note: we take opt_expr and expr_id separately because for
// overloaded operators the callee has an id but no expr. // overloaded operators the callee has an id but no expr.
// annoying. // annoying.
fn check_pure_callee_or_arg(pc: purity_cause, fn check_pure_callee_or_arg(@mut self,
pc: purity_cause,
opt_expr: Option<@ast::expr>, opt_expr: Option<@ast::expr>,
callee_id: ast::node_id, callee_id: ast::node_id,
callee_span: span) { callee_span: span) {
@ -239,7 +245,7 @@ impl check_loan_ctxt {
// True if the expression with the given `id` is a stack closure. // True if the expression with the given `id` is a stack closure.
// The expression must be an expr_fn(*) or expr_fn_block(*) // The expression must be an expr_fn(*) or expr_fn_block(*)
fn is_stack_closure(id: ast::node_id) -> bool { fn is_stack_closure(@mut self, id: ast::node_id) -> bool {
let fn_ty = ty::node_id_to_type(self.tcx(), id); let fn_ty = ty::node_id_to_type(self.tcx(), id);
match ty::get(fn_ty).sty { match ty::get(fn_ty).sty {
ty::ty_closure(ty::ClosureTy {sigil: ast::BorrowedSigil, ty::ty_closure(ty::ClosureTy {sigil: ast::BorrowedSigil,
@ -248,7 +254,7 @@ impl check_loan_ctxt {
} }
} }
fn is_allowed_pure_arg(expr: @ast::expr) -> bool { fn is_allowed_pure_arg(@mut self, expr: @ast::expr) -> bool {
return match expr.node { return match expr.node {
ast::expr_path(_) => { ast::expr_path(_) => {
let def = self.tcx().def_map.get(&expr.id); let def = self.tcx().def_map.get(&expr.id);
@ -263,7 +269,7 @@ impl check_loan_ctxt {
}; };
} }
fn check_for_conflicting_loans(scope_id: ast::node_id) { fn check_for_conflicting_loans(@mut self, scope_id: ast::node_id) {
debug!("check_for_conflicting_loans(scope_id=%?)", scope_id); debug!("check_for_conflicting_loans(scope_id=%?)", scope_id);
let new_loans = match self.req_maps.req_loan_map.find(&scope_id) { let new_loans = match self.req_maps.req_loan_map.find(&scope_id) {
@ -292,7 +298,7 @@ impl check_loan_ctxt {
} }
} }
fn report_error_if_loans_conflict(&self, fn report_error_if_loans_conflict(@mut self,
old_loan: &Loan, old_loan: &Loan,
new_loan: &Loan) { new_loan: &Loan) {
if old_loan.lp != new_loan.lp { if old_loan.lp != new_loan.lp {
@ -319,14 +325,14 @@ impl check_loan_ctxt {
} }
} }
fn is_local_variable(cmt: cmt) -> bool { fn is_local_variable(@mut self, cmt: cmt) -> bool {
match cmt.cat { match cmt.cat {
cat_local(_) => true, cat_local(_) => true,
_ => false _ => false
} }
} }
fn check_assignment(at: assignment_type, ex: @ast::expr) { fn check_assignment(@mut self, at: assignment_type, ex: @ast::expr) {
// We don't use cat_expr() here because we don't want to treat // We don't use cat_expr() here because we don't want to treat
// auto-ref'd parameters in overloaded operators as rvalues. // auto-ref'd parameters in overloaded operators as rvalues.
let cmt = match self.bccx.tcx.adjustments.find(&ex.id) { let cmt = match self.bccx.tcx.adjustments.find(&ex.id) {
@ -393,7 +399,7 @@ impl check_loan_ctxt {
self.add_write_guards_if_necessary(cmt); self.add_write_guards_if_necessary(cmt);
} }
fn add_write_guards_if_necessary(cmt: cmt) { fn add_write_guards_if_necessary(@mut self, cmt: cmt) {
match cmt.cat { match cmt.cat {
cat_deref(base, deref_count, ptr_kind) => { cat_deref(base, deref_count, ptr_kind) => {
self.add_write_guards_if_necessary(base); self.add_write_guards_if_necessary(base);
@ -416,12 +422,11 @@ impl check_loan_ctxt {
} }
} }
fn check_for_loan_conflicting_with_assignment( fn check_for_loan_conflicting_with_assignment(@mut self,
at: assignment_type, at: assignment_type,
ex: @ast::expr, ex: @ast::expr,
cmt: cmt, cmt: cmt,
lp: @loan_path) { lp: @loan_path) {
for self.walk_loans_of(ex.id, lp) |loan| { for self.walk_loans_of(ex.id, lp) |loan| {
match loan.mutbl { match loan.mutbl {
m_const => { /*ok*/ } m_const => { /*ok*/ }
@ -455,7 +460,7 @@ impl check_loan_ctxt {
} }
} }
fn report_purity_error(pc: purity_cause, sp: span, msg: ~str) { fn report_purity_error(@mut self, pc: purity_cause, sp: span, msg: ~str) {
match pc { match pc {
pc_pure_fn => { pc_pure_fn => {
self.tcx().sess.span_err( self.tcx().sess.span_err(
@ -463,7 +468,8 @@ impl check_loan_ctxt {
fmt!("%s prohibited in pure context", msg)); fmt!("%s prohibited in pure context", msg));
} }
pc_cmt(ref e) => { pc_cmt(ref e) => {
if self.reported.insert((*e).cmt.id, ()) { let reported = self.reported;
if reported.insert((*e).cmt.id, ()) {
self.tcx().sess.span_err( self.tcx().sess.span_err(
(*e).cmt.span, (*e).cmt.span,
fmt!("illegal borrow unless pure: %s", fmt!("illegal borrow unless pure: %s",
@ -477,7 +483,7 @@ impl check_loan_ctxt {
} }
} }
fn check_move_out_from_expr(ex: @ast::expr) { fn check_move_out_from_expr(@mut self, ex: @ast::expr) {
match ex.node { match ex.node {
ast::expr_paren(*) => { ast::expr_paren(*) => {
/* In the case of an expr_paren(), the expression inside /* In the case of an expr_paren(), the expression inside
@ -510,7 +516,7 @@ impl check_loan_ctxt {
} }
} }
fn analyze_move_out_from_cmt(cmt: cmt) -> MoveError { fn analyze_move_out_from_cmt(@mut self, cmt: cmt) -> MoveError {
debug!("check_move_out_from_cmt(cmt=%s)", debug!("check_move_out_from_cmt(cmt=%s)",
self.bccx.cmt_to_repr(cmt)); self.bccx.cmt_to_repr(cmt));
@ -543,7 +549,8 @@ impl check_loan_ctxt {
return MoveOk; return MoveOk;
} }
fn check_call(expr: @ast::expr, fn check_call(@mut self,
expr: @ast::expr,
callee: Option<@ast::expr>, callee: Option<@ast::expr>,
callee_id: ast::node_id, callee_id: ast::node_id,
callee_span: span, callee_span: span,
@ -562,10 +569,13 @@ impl check_loan_ctxt {
} }
} }
fn check_loans_in_fn(fk: visit::fn_kind, decl: ast::fn_decl, body: ast::blk, fn check_loans_in_fn(fk: visit::fn_kind,
sp: span, id: ast::node_id, &&self: check_loan_ctxt, decl: ast::fn_decl,
visitor: visit::vt<check_loan_ctxt>) body: ast::blk,
{ sp: span,
id: ast::node_id,
&&self: @mut CheckLoanCtxt,
visitor: visit::vt<@mut CheckLoanCtxt>) {
let is_stack_closure = self.is_stack_closure(id); let is_stack_closure = self.is_stack_closure(id);
let fty = ty::node_id_to_type(self.tcx(), id); let fty = ty::node_id_to_type(self.tcx(), id);
@ -580,15 +590,16 @@ fn check_loans_in_fn(fk: visit::fn_kind, decl: ast::fn_decl, body: ast::blk,
let fty_sigil = ty::ty_closure_sigil(fty); let fty_sigil = ty::ty_closure_sigil(fty);
check_moves_from_captured_variables(self, id, fty_sigil); check_moves_from_captured_variables(self, id, fty_sigil);
declared_purity = ty::determine_inherited_purity( declared_purity = ty::determine_inherited_purity(
copy self.declared_purity, ty::ty_fn_purity(fty), *self.declared_purity,
ty::ty_fn_purity(fty),
fty_sigil); fty_sigil);
} }
} }
debug!("purity on entry=%?", copy self.declared_purity); debug!("purity on entry=%?", copy self.declared_purity);
do save_and_restore(&mut(self.declared_purity)) { do save_and_restore_managed(self.declared_purity) {
do save_and_restore(&mut(self.fn_args)) { do save_and_restore_managed(self.fn_args) {
self.declared_purity = declared_purity; *self.declared_purity = declared_purity;
match fk { match fk {
visit::fk_anon(*) | visit::fk_anon(*) |
@ -611,7 +622,7 @@ fn check_loans_in_fn(fk: visit::fn_kind, decl: ast::fn_decl, body: ast::blk,
_ => {} // Ignore this argument. _ => {} // Ignore this argument.
} }
} }
self.fn_args = @move fn_args; *self.fn_args = @move fn_args;
} }
} }
@ -620,10 +631,9 @@ fn check_loans_in_fn(fk: visit::fn_kind, decl: ast::fn_decl, body: ast::blk,
} }
debug!("purity on exit=%?", copy self.declared_purity); debug!("purity on exit=%?", copy self.declared_purity);
fn check_moves_from_captured_variables(&&self: check_loan_ctxt, fn check_moves_from_captured_variables(self: @mut CheckLoanCtxt,
id: ast::node_id, id: ast::node_id,
fty_sigil: ast::Sigil) fty_sigil: ast::Sigil) {
{
match fty_sigil { match fty_sigil {
ast::ManagedSigil | ast::OwnedSigil => { ast::ManagedSigil | ast::OwnedSigil => {
let cap_vars = self.bccx.capture_map.get(&id); let cap_vars = self.bccx.capture_map.get(&id);
@ -666,14 +676,14 @@ fn check_loans_in_fn(fk: visit::fn_kind, decl: ast::fn_decl, body: ast::blk,
} }
fn check_loans_in_local(local: @ast::local, fn check_loans_in_local(local: @ast::local,
&&self: check_loan_ctxt, &&self: @mut CheckLoanCtxt,
vt: visit::vt<check_loan_ctxt>) { vt: visit::vt<@mut CheckLoanCtxt>) {
visit::visit_local(local, self, vt); visit::visit_local(local, self, vt);
} }
fn check_loans_in_expr(expr: @ast::expr, fn check_loans_in_expr(expr: @ast::expr,
&&self: check_loan_ctxt, &&self: @mut CheckLoanCtxt,
vt: visit::vt<check_loan_ctxt>) { vt: visit::vt<@mut CheckLoanCtxt>) {
debug!("check_loans_in_expr(expr=%?/%s)", debug!("check_loans_in_expr(expr=%?/%s)",
expr.id, pprust::expr_to_str(expr, self.tcx().sess.intr())); expr.id, pprust::expr_to_str(expr, self.tcx().sess.intr()));
@ -734,16 +744,16 @@ fn check_loans_in_expr(expr: @ast::expr,
} }
fn check_loans_in_block(blk: ast::blk, fn check_loans_in_block(blk: ast::blk,
&&self: check_loan_ctxt, &&self: @mut CheckLoanCtxt,
vt: visit::vt<check_loan_ctxt>) { vt: visit::vt<@mut CheckLoanCtxt>) {
do save_and_restore(&mut(self.declared_purity)) { do save_and_restore_managed(self.declared_purity) {
self.check_for_conflicting_loans(blk.node.id); self.check_for_conflicting_loans(blk.node.id);
match blk.node.rules { match blk.node.rules {
ast::default_blk => { ast::default_blk => {
} }
ast::unsafe_blk => { ast::unsafe_blk => {
self.declared_purity = ast::unsafe_fn; *self.declared_purity = ast::unsafe_fn;
} }
} }

View file

@ -68,19 +68,22 @@ use syntax::visit;
/// No good. Instead what will happen is that `root_ub` will be set to the /// No good. Instead what will happen is that `root_ub` will be set to the
/// body of the while loop and we will refuse to root the pointer `&*x` /// body of the while loop and we will refuse to root the pointer `&*x`
/// because it would have to be rooted for a region greater than `root_ub`. /// because it would have to be rooted for a region greater than `root_ub`.
enum gather_loan_ctxt = @{bccx: @BorrowckCtxt, struct GatherLoanCtxt {
bccx: @BorrowckCtxt,
req_maps: req_maps, req_maps: req_maps,
mut item_ub: ast::node_id, item_ub: ast::node_id,
mut root_ub: ast::node_id, root_ub: ast::node_id,
mut ignore_adjustments: LinearSet<ast::node_id>}; ignore_adjustments: LinearSet<ast::node_id>
}
pub fn gather_loans(bccx: @BorrowckCtxt, crate: @ast::crate) -> req_maps { pub fn gather_loans(bccx: @BorrowckCtxt, crate: @ast::crate) -> req_maps {
let glcx = gather_loan_ctxt(@{bccx: bccx, let glcx = @mut GatherLoanCtxt {
req_maps: {req_loan_map: HashMap(), bccx: bccx,
pure_map: HashMap()}, req_maps: {req_loan_map: HashMap(), pure_map: HashMap()},
mut item_ub: 0, item_ub: 0,
mut root_ub: 0, root_ub: 0,
mut ignore_adjustments: LinearSet::new()}); ignore_adjustments: LinearSet::new()
};
let v = visit::mk_vt(@visit::Visitor {visit_expr: req_loans_in_expr, let v = visit::mk_vt(@visit::Visitor {visit_expr: req_loans_in_expr,
visit_fn: req_loans_in_fn, visit_fn: req_loans_in_fn,
visit_stmt: add_stmt_to_map, visit_stmt: add_stmt_to_map,
@ -94,8 +97,8 @@ fn req_loans_in_fn(fk: visit::fn_kind,
body: ast::blk, body: ast::blk,
sp: span, sp: span,
id: ast::node_id, id: ast::node_id,
&&self: gather_loan_ctxt, &&self: @mut GatherLoanCtxt,
v: visit::vt<gather_loan_ctxt>) { v: visit::vt<@mut GatherLoanCtxt>) {
// see explanation attached to the `root_ub` field: // see explanation attached to the `root_ub` field:
let old_item_id = self.item_ub; let old_item_id = self.item_ub;
let old_root_ub = self.root_ub; let old_root_ub = self.root_ub;
@ -115,8 +118,8 @@ fn req_loans_in_fn(fk: visit::fn_kind,
} }
fn req_loans_in_expr(ex: @ast::expr, fn req_loans_in_expr(ex: @ast::expr,
&&self: gather_loan_ctxt, &&self: @mut GatherLoanCtxt,
vt: visit::vt<gather_loan_ctxt>) { vt: visit::vt<@mut GatherLoanCtxt>) {
let bccx = self.bccx; let bccx = self.bccx;
let tcx = bccx.tcx; let tcx = bccx.tcx;
let old_root_ub = self.root_ub; let old_root_ub = self.root_ub;
@ -283,10 +286,10 @@ fn req_loans_in_expr(ex: @ast::expr,
self.root_ub = old_root_ub; self.root_ub = old_root_ub;
} }
impl gather_loan_ctxt { impl GatherLoanCtxt {
fn tcx(&self) -> ty::ctxt { self.bccx.tcx } fn tcx(@mut self) -> ty::ctxt { self.bccx.tcx }
fn guarantee_adjustments(&self, fn guarantee_adjustments(@mut self,
expr: @ast::expr, expr: @ast::expr,
adjustment: &ty::AutoAdjustment) { adjustment: &ty::AutoAdjustment) {
debug!("guarantee_adjustments(expr=%s, adjustment=%?)", debug!("guarantee_adjustments(expr=%s, adjustment=%?)",
@ -334,12 +337,12 @@ impl gather_loan_ctxt {
// out loans, which will be added to the `req_loan_map`. This can // out loans, which will be added to the `req_loan_map`. This can
// also entail "rooting" GC'd pointers, which means ensuring // also entail "rooting" GC'd pointers, which means ensuring
// dynamically that they are not freed. // dynamically that they are not freed.
fn guarantee_valid(&self, fn guarantee_valid(@mut self,
cmt: cmt, cmt: cmt,
req_mutbl: ast::mutability, req_mutbl: ast::mutability,
scope_r: ty::Region) { scope_r: ty::Region) {
self.bccx.guaranteed_paths += 1; self.bccx.stats.guaranteed_paths += 1;
debug!("guarantee_valid(cmt=%s, req_mutbl=%s, scope_r=%s)", debug!("guarantee_valid(cmt=%s, req_mutbl=%s, scope_r=%s)",
self.bccx.cmt_to_repr(cmt), self.bccx.cmt_to_repr(cmt),
@ -391,7 +394,7 @@ impl gather_loan_ctxt {
// we were able guarantee the validity of the ptr, // we were able guarantee the validity of the ptr,
// perhaps by rooting or because it is immutably // perhaps by rooting or because it is immutably
// rooted. good. // rooted. good.
self.bccx.stable_paths += 1; self.bccx.stats.stable_paths += 1;
} }
Ok(PcIfPure(ref e)) => { Ok(PcIfPure(ref e)) => {
debug!("result of preserve: %?", PcIfPure((*e))); debug!("result of preserve: %?", PcIfPure((*e)));
@ -403,8 +406,9 @@ impl gather_loan_ctxt {
// if the scope is some block/expr in the // if the scope is some block/expr in the
// fn, then just require that this scope // fn, then just require that this scope
// be pure // be pure
self.req_maps.pure_map.insert(pure_id, (*e)); let pure_map = self.req_maps.pure_map;
self.bccx.req_pure_paths += 1; pure_map.insert(pure_id, *e);
self.bccx.stats.req_pure_paths += 1;
debug!("requiring purity for scope %?", debug!("requiring purity for scope %?",
scope_r); scope_r);
@ -441,9 +445,10 @@ impl gather_loan_ctxt {
// has type `@mut{f:int}`, this check might fail because `&x.f` // has type `@mut{f:int}`, this check might fail because `&x.f`
// reqires an immutable pointer, but `f` lives in (aliased) // reqires an immutable pointer, but `f` lives in (aliased)
// mutable memory. // mutable memory.
fn check_mutbl(&self, fn check_mutbl(@mut self,
req_mutbl: ast::mutability, req_mutbl: ast::mutability,
cmt: cmt) -> bckres<PreserveCondition> { cmt: cmt)
-> bckres<PreserveCondition> {
debug!("check_mutbl(req_mutbl=%?, cmt.mutbl=%?)", debug!("check_mutbl(req_mutbl=%?, cmt.mutbl=%?)",
req_mutbl, cmt.mutbl); req_mutbl, cmt.mutbl);
@ -469,7 +474,7 @@ impl gather_loan_ctxt {
} }
} }
fn add_loans(&self, fn add_loans(@mut self,
cmt: cmt, cmt: cmt,
req_mutbl: ast::mutability, req_mutbl: ast::mutability,
scope_r: ty::Region, scope_r: ty::Region,
@ -522,7 +527,7 @@ impl gather_loan_ctxt {
self.add_loans_to_scope_id(scope_id, move loans); self.add_loans_to_scope_id(scope_id, move loans);
if req_mutbl == m_imm && cmt.mutbl != m_imm { if req_mutbl == m_imm && cmt.mutbl != m_imm {
self.bccx.loaned_paths_imm += 1; self.bccx.stats.loaned_paths_imm += 1;
if self.tcx().sess.borrowck_note_loan() { if self.tcx().sess.borrowck_note_loan() {
self.bccx.span_note( self.bccx.span_note(
@ -530,11 +535,13 @@ impl gather_loan_ctxt {
fmt!("immutable loan required")); fmt!("immutable loan required"));
} }
} else { } else {
self.bccx.loaned_paths_same += 1; self.bccx.stats.loaned_paths_same += 1;
} }
} }
fn add_loans_to_scope_id(&self, scope_id: ast::node_id, +loans: ~[Loan]) { fn add_loans_to_scope_id(@mut self,
scope_id: ast::node_id,
+loans: ~[Loan]) {
debug!("adding %u loans to scope_id %?", loans.len(), scope_id); debug!("adding %u loans to scope_id %?", loans.len(), scope_id);
match self.req_maps.req_loan_map.find(&scope_id) { match self.req_maps.req_loan_map.find(&scope_id) {
Some(req_loans) => { Some(req_loans) => {
@ -542,12 +549,13 @@ impl gather_loan_ctxt {
} }
None => { None => {
let dvec = @dvec::from_vec(move loans); let dvec = @dvec::from_vec(move loans);
self.req_maps.req_loan_map.insert(scope_id, dvec); let req_loan_map = self.req_maps.req_loan_map;
req_loan_map.insert(scope_id, dvec);
} }
} }
} }
fn gather_pat(&self, fn gather_pat(@mut self,
discr_cmt: cmt, discr_cmt: cmt,
root_pat: @ast::pat, root_pat: @ast::pat,
arm_id: ast::node_id, arm_id: ast::node_id,
@ -602,10 +610,9 @@ impl gather_loan_ctxt {
} }
} }
fn vec_slice_info(&self, fn vec_slice_info(@mut self,
pat: @ast::pat, pat: @ast::pat,
tail_ty: ty::t) -> (ast::mutability, ty::Region) tail_ty: ty::t) -> (ast::mutability, ty::Region) {
{
/*! /*!
* *
* In a pattern like [a, b, ..c], normally `c` has slice type, * In a pattern like [a, b, ..c], normally `c` has slice type,
@ -631,11 +638,11 @@ impl gather_loan_ctxt {
} }
} }
fn pat_is_variant_or_struct(&self, pat: @ast::pat) -> bool { fn pat_is_variant_or_struct(@mut self, pat: @ast::pat) -> bool {
pat_util::pat_is_variant_or_struct(self.bccx.tcx.def_map, pat) pat_util::pat_is_variant_or_struct(self.bccx.tcx.def_map, pat)
} }
fn pat_is_binding(&self, pat: @ast::pat) -> bool { fn pat_is_binding(@mut self, pat: @ast::pat) -> bool {
pat_util::pat_is_binding(self.bccx.tcx.def_map, pat) pat_util::pat_is_binding(self.bccx.tcx.def_map, pat)
} }
} }
@ -643,8 +650,8 @@ impl gather_loan_ctxt {
// Setting up info that preserve needs. // Setting up info that preserve needs.
// This is just the most convenient place to do it. // This is just the most convenient place to do it.
fn add_stmt_to_map(stmt: @ast::stmt, fn add_stmt_to_map(stmt: @ast::stmt,
&&self: gather_loan_ctxt, &&self: @mut GatherLoanCtxt,
vt: visit::vt<gather_loan_ctxt>) { vt: visit::vt<@mut GatherLoanCtxt>) {
match stmt.node { match stmt.node {
ast::stmt_expr(_, id) | ast::stmt_semi(_, id) => { ast::stmt_expr(_, id) | ast::stmt_semi(_, id) => {
self.bccx.stmt_map.insert(id, ()); self.bccx.stmt_map.insert(id, ());
@ -653,3 +660,4 @@ fn add_stmt_to_map(stmt: @ast::stmt,
} }
visit::visit_stmt(stmt, self, vt); visit::visit_stmt(stmt, self, vt);
} }

View file

@ -62,18 +62,17 @@ impl BorrowckCtxt {
cmt: cmt, cmt: cmt,
scope_region: ty::Region, scope_region: ty::Region,
mutbl: ast::mutability) -> bckres<~[Loan]> { mutbl: ast::mutability) -> bckres<~[Loan]> {
let lc = LoanContext { let mut lc = LoanContext {
bccx: self, bccx: self,
scope_region: scope_region, scope_region: scope_region,
loans: ~[] loans: ~[]
}; };
match lc.loan(cmt, mutbl, true) { match lc.loan(cmt, mutbl, true) {
Err(ref e) => Err((*e)), Err(ref e) => return Err((*e)),
Ok(()) => { Ok(()) => {}
let LoanContext {loans, _} = move lc;
Ok(loans)
}
} }
// XXX: Workaround for borrow check bug.
Ok(copy lc.loans)
} }
} }
@ -84,17 +83,16 @@ struct LoanContext {
scope_region: ty::Region, scope_region: ty::Region,
// accumulated list of loans that will be required // accumulated list of loans that will be required
mut loans: ~[Loan] loans: ~[Loan]
} }
impl LoanContext { impl LoanContext {
fn tcx(&self) -> ty::ctxt { self.bccx.tcx } fn tcx(&mut self) -> ty::ctxt { self.bccx.tcx }
fn loan(&self, fn loan(&mut self,
cmt: cmt, cmt: cmt,
req_mutbl: ast::mutability, req_mutbl: ast::mutability,
owns_lent_data: bool) -> bckres<()> owns_lent_data: bool) -> bckres<()> {
{
/*! /*!
* *
* The main routine. * The main routine.
@ -198,7 +196,7 @@ impl LoanContext {
// A "stable component" is one where assigning the base of the // A "stable component" is one where assigning the base of the
// component cannot cause the component itself to change types. // component cannot cause the component itself to change types.
// Example: record fields. // Example: record fields.
fn loan_stable_comp(&self, fn loan_stable_comp(&mut self,
cmt: cmt, cmt: cmt,
cmt_base: cmt, cmt_base: cmt,
req_mutbl: ast::mutability, req_mutbl: ast::mutability,
@ -268,12 +266,11 @@ impl LoanContext {
// An "unstable deref" means a deref of a ptr/comp where, if the // An "unstable deref" means a deref of a ptr/comp where, if the
// base of the deref is assigned to, pointers into the result of the // base of the deref is assigned to, pointers into the result of the
// deref would be invalidated. Examples: interior of variants, uniques. // deref would be invalidated. Examples: interior of variants, uniques.
fn loan_unstable_deref(&self, fn loan_unstable_deref(&mut self,
cmt: cmt, cmt: cmt,
cmt_base: cmt, cmt_base: cmt,
req_mutbl: ast::mutability, req_mutbl: ast::mutability,
owns_lent_data: bool) -> bckres<()> owns_lent_data: bool) -> bckres<()> {
{
// Variant components: the base must be immutable, because // Variant components: the base must be immutable, because
// if it is overwritten, the types of the embedded data // if it is overwritten, the types of the embedded data
// could change. // could change.
@ -284,12 +281,11 @@ impl LoanContext {
} }
} }
fn issue_loan(&self, fn issue_loan(&mut self,
cmt: cmt, cmt: cmt,
scope_ub: ty::Region, scope_ub: ty::Region,
req_mutbl: ast::mutability, req_mutbl: ast::mutability,
owns_lent_data: bool) -> bckres<()> owns_lent_data: bool) -> bckres<()> {
{
// Subtle: the `scope_ub` is the maximal lifetime of `cmt`. // Subtle: the `scope_ub` is the maximal lifetime of `cmt`.
// Therefore, if `cmt` owns the data being lent, then the // Therefore, if `cmt` owns the data being lent, then the
// scope of the loan must be less than `scope_ub`, or else the // scope of the loan must be less than `scope_ub`, or else the
@ -301,8 +297,8 @@ impl LoanContext {
// reborrowed. // reborrowed.
if !owns_lent_data || if !owns_lent_data ||
self.bccx.is_subregion_of(self.scope_region, scope_ub) self.bccx.is_subregion_of(/*bad*/copy self.scope_region,
{ scope_ub) {
match req_mutbl { match req_mutbl {
m_mutbl => { m_mutbl => {
// We do not allow non-mutable data to be loaned // We do not allow non-mutable data to be loaned
@ -340,3 +336,4 @@ impl LoanContext {
} }
} }
} }

View file

@ -262,7 +262,8 @@ pub fn check_crate(
capture_map: moves::CaptureMap, capture_map: moves::CaptureMap,
crate: @ast::crate) -> (root_map, mutbl_map, write_guard_map) crate: @ast::crate) -> (root_map, mutbl_map, write_guard_map)
{ {
let bccx = @BorrowckCtxt {tcx: tcx, let bccx = @BorrowckCtxt {
tcx: tcx,
method_map: method_map, method_map: method_map,
moves_map: moves_map, moves_map: moves_map,
capture_map: capture_map, capture_map: capture_map,
@ -270,11 +271,14 @@ pub fn check_crate(
mutbl_map: HashMap(), mutbl_map: HashMap(),
write_guard_map: HashMap(), write_guard_map: HashMap(),
stmt_map: HashMap(), stmt_map: HashMap(),
mut loaned_paths_same: 0, stats: @mut BorrowStats {
mut loaned_paths_imm: 0, loaned_paths_same: 0,
mut stable_paths: 0, loaned_paths_imm: 0,
mut req_pure_paths: 0, stable_paths: 0,
mut guaranteed_paths: 0}; req_pure_paths: 0,
guaranteed_paths: 0,
}
};
let req_maps = gather_loans::gather_loans(bccx, crate); let req_maps = gather_loans::gather_loans(bccx, crate);
check_loans::check_loans(bccx, req_maps, crate); check_loans::check_loans(bccx, req_maps, crate);
@ -282,22 +286,22 @@ pub fn check_crate(
if tcx.sess.borrowck_stats() { if tcx.sess.borrowck_stats() {
io::println(~"--- borrowck stats ---"); io::println(~"--- borrowck stats ---");
io::println(fmt!("paths requiring guarantees: %u", io::println(fmt!("paths requiring guarantees: %u",
bccx.guaranteed_paths)); bccx.stats.guaranteed_paths));
io::println(fmt!("paths requiring loans : %s", io::println(fmt!("paths requiring loans : %s",
make_stat(bccx, bccx.loaned_paths_same))); make_stat(bccx, bccx.stats.loaned_paths_same)));
io::println(fmt!("paths requiring imm loans : %s", io::println(fmt!("paths requiring imm loans : %s",
make_stat(bccx, bccx.loaned_paths_imm))); make_stat(bccx, bccx.stats.loaned_paths_imm)));
io::println(fmt!("stable paths : %s", io::println(fmt!("stable paths : %s",
make_stat(bccx, bccx.stable_paths))); make_stat(bccx, bccx.stats.stable_paths)));
io::println(fmt!("paths requiring purity : %s", io::println(fmt!("paths requiring purity : %s",
make_stat(bccx, bccx.req_pure_paths))); make_stat(bccx, bccx.stats.req_pure_paths)));
} }
return (bccx.root_map, bccx.mutbl_map, bccx.write_guard_map); return (bccx.root_map, bccx.mutbl_map, bccx.write_guard_map);
fn make_stat(bccx: &BorrowckCtxt, stat: uint) -> ~str { fn make_stat(bccx: &BorrowckCtxt, stat: uint) -> ~str {
let stat_f = stat as float; let stat_f = stat as float;
let total = bccx.guaranteed_paths as float; let total = bccx.stats.guaranteed_paths as float;
fmt!("%u (%.0f%%)", stat , stat_f * 100f / total) fmt!("%u (%.0f%%)", stat , stat_f * 100f / total)
} }
} }
@ -316,11 +320,15 @@ pub struct BorrowckCtxt {
stmt_map: stmt_set, stmt_map: stmt_set,
// Statistics: // Statistics:
mut loaned_paths_same: uint, stats: @mut BorrowStats
mut loaned_paths_imm: uint, }
mut stable_paths: uint,
mut req_pure_paths: uint, pub struct BorrowStats {
mut guaranteed_paths: uint loaned_paths_same: uint,
loaned_paths_imm: uint,
stable_paths: uint,
req_pure_paths: uint,
guaranteed_paths: uint
} }
pub struct RootInfo { pub struct RootInfo {
@ -397,7 +405,15 @@ pub type req_maps = {
}; };
pub fn save_and_restore<T:Copy,U>(save_and_restore_t: &mut T, pub fn save_and_restore<T:Copy,U>(save_and_restore_t: &mut T,
f: fn() -> U) -> U { f: &fn() -> U) -> U {
let old_save_and_restore_t = *save_and_restore_t;
let u = f();
*save_and_restore_t = old_save_and_restore_t;
move u
}
pub fn save_and_restore_managed<T:Copy,U>(save_and_restore_t: @mut T,
f: &fn() -> U) -> U {
let old_save_and_restore_t = *save_and_restore_t; let old_save_and_restore_t = *save_and_restore_t;
let u = f(); let u = f();
*save_and_restore_t = old_save_and_restore_t; *save_and_restore_t = old_save_and_restore_t;

View file

@ -66,10 +66,10 @@
* methods. It effectively does a reverse walk of the AST; whenever we * methods. It effectively does a reverse walk of the AST; whenever we
* reach a loop node, we iterate until a fixed point is reached. * reach a loop node, we iterate until a fixed point is reached.
* *
* ## The `users` struct * ## The `Users` struct
* *
* At each live node `N`, we track three pieces of information for each * At each live node `N`, we track three pieces of information for each
* variable `V` (these are encapsulated in the `users` struct): * variable `V` (these are encapsulated in the `Users` struct):
* *
* - `reader`: the `LiveNode` ID of some node which will read the value * - `reader`: the `LiveNode` ID of some node which will read the value
* that `V` holds on entry to `N`. Formally: a node `M` such * that `V` holds on entry to `N`. Formally: a node `M` such
@ -214,8 +214,11 @@ pub fn check_crate(tcx: ty::ctxt,
}); });
let last_use_map = HashMap(); let last_use_map = HashMap();
let initial_maps = @IrMaps(tcx, method_map, variable_moves_map, let initial_maps = @mut IrMaps(tcx,
capture_map, last_use_map); method_map,
variable_moves_map,
capture_map,
last_use_map);
visit::visit_crate(*crate, initial_maps, visitor); visit::visit_crate(*crate, initial_maps, visitor);
tcx.sess.abort_if_errors(); tcx.sess.abort_if_errors();
return last_use_map; return last_use_map;
@ -300,20 +303,21 @@ struct IrMaps {
capture_map: moves::CaptureMap, capture_map: moves::CaptureMap,
last_use_map: last_use_map, last_use_map: last_use_map,
mut num_live_nodes: uint, num_live_nodes: uint,
mut num_vars: uint, num_vars: uint,
live_node_map: HashMap<node_id, LiveNode>, live_node_map: HashMap<node_id, LiveNode>,
variable_map: HashMap<node_id, Variable>, variable_map: HashMap<node_id, Variable>,
capture_info_map: HashMap<node_id, @~[CaptureInfo]>, capture_info_map: HashMap<node_id, @~[CaptureInfo]>,
mut var_kinds: ~[VarKind], var_kinds: ~[VarKind],
mut lnks: ~[LiveNodeKind], lnks: ~[LiveNodeKind],
} }
fn IrMaps(tcx: ty::ctxt, fn IrMaps(tcx: ty::ctxt,
method_map: typeck::method_map, method_map: typeck::method_map,
variable_moves_map: moves::VariableMovesMap, variable_moves_map: moves::VariableMovesMap,
capture_map: moves::CaptureMap, capture_map: moves::CaptureMap,
last_use_map: last_use_map) -> IrMaps { last_use_map: last_use_map)
-> IrMaps {
IrMaps { IrMaps {
tcx: tcx, tcx: tcx,
method_map: method_map, method_map: method_map,
@ -331,7 +335,7 @@ fn IrMaps(tcx: ty::ctxt,
} }
impl IrMaps { impl IrMaps {
fn add_live_node(lnk: LiveNodeKind) -> LiveNode { fn add_live_node(&mut self, lnk: LiveNodeKind) -> LiveNode {
let ln = LiveNode(self.num_live_nodes); let ln = LiveNode(self.num_live_nodes);
self.lnks.push(lnk); self.lnks.push(lnk);
self.num_live_nodes += 1; self.num_live_nodes += 1;
@ -342,14 +346,16 @@ impl IrMaps {
ln ln
} }
fn add_live_node_for_node(node_id: node_id, lnk: LiveNodeKind) { fn add_live_node_for_node(&mut self,
node_id: node_id,
lnk: LiveNodeKind) {
let ln = self.add_live_node(lnk); let ln = self.add_live_node(lnk);
self.live_node_map.insert(node_id, ln); self.live_node_map.insert(node_id, ln);
debug!("%s is node %d", ln.to_str(), node_id); debug!("%s is node %d", ln.to_str(), node_id);
} }
fn add_variable(vk: VarKind) -> Variable { fn add_variable(&mut self, vk: VarKind) -> Variable {
let v = Variable(self.num_vars); let v = Variable(self.num_vars);
self.var_kinds.push(vk); self.var_kinds.push(vk);
self.num_vars += 1; self.num_vars += 1;
@ -367,7 +373,7 @@ impl IrMaps {
v v
} }
fn variable(node_id: node_id, span: span) -> Variable { fn variable(&mut self, node_id: node_id, span: span) -> Variable {
match self.variable_map.find(&node_id) { match self.variable_map.find(&node_id) {
Some(var) => var, Some(var) => var,
None => { None => {
@ -377,7 +383,7 @@ impl IrMaps {
} }
} }
fn variable_name(var: Variable) -> ~str { fn variable_name(&mut self, var: Variable) -> ~str {
match copy self.var_kinds[*var] { match copy self.var_kinds[*var] {
Local(LocalInfo {ident: nm, _}) | Local(LocalInfo {ident: nm, _}) |
Arg(_, nm, _) => self.tcx.sess.str_of(nm), Arg(_, nm, _) => self.tcx.sess.str_of(nm),
@ -385,11 +391,11 @@ impl IrMaps {
} }
} }
fn set_captures(node_id: node_id, +cs: ~[CaptureInfo]) { fn set_captures(&mut self, node_id: node_id, +cs: ~[CaptureInfo]) {
self.capture_info_map.insert(node_id, @cs); self.capture_info_map.insert(node_id, @cs);
} }
fn captures(expr: @expr) -> @~[CaptureInfo] { fn captures(&mut self, expr: @expr) -> @~[CaptureInfo] {
match self.capture_info_map.find(&expr.id) { match self.capture_info_map.find(&expr.id) {
Some(caps) => caps, Some(caps) => caps,
None => { None => {
@ -398,11 +404,11 @@ impl IrMaps {
} }
} }
fn lnk(ln: LiveNode) -> LiveNodeKind { fn lnk(&mut self, ln: LiveNode) -> LiveNodeKind {
self.lnks[*ln] self.lnks[*ln]
} }
fn add_last_use(expr_id: node_id, var: Variable) { fn add_last_use(&mut self, expr_id: node_id, var: Variable) {
let vk = self.var_kinds[*var]; let vk = self.var_kinds[*var];
debug!("Node %d is a last use of variable %?", expr_id, vk); debug!("Node %d is a last use of variable %?", expr_id, vk);
match vk { match vk {
@ -429,13 +435,18 @@ impl IrMaps {
} }
} }
fn visit_fn(fk: visit::fn_kind, decl: fn_decl, body: blk, fn visit_fn(fk: visit::fn_kind,
sp: span, id: node_id, &&self: @IrMaps, v: vt<@IrMaps>) { decl: fn_decl,
body: blk,
sp: span,
id: node_id,
&&self: @mut IrMaps,
v: vt<@mut IrMaps>) {
debug!("visit_fn: id=%d", id); debug!("visit_fn: id=%d", id);
let _i = ::util::common::indenter(); let _i = ::util::common::indenter();
// swap in a new set of IR maps for this function body: // swap in a new set of IR maps for this function body:
let fn_maps = @IrMaps(self.tcx, let fn_maps = @mut IrMaps(self.tcx,
self.method_map, self.method_map,
self.variable_moves_map, self.variable_moves_map,
self.capture_map, self.capture_map,
@ -449,7 +460,7 @@ fn visit_fn(fk: visit::fn_kind, decl: fn_decl, body: blk,
|_bm, arg_id, _x, path| { |_bm, arg_id, _x, path| {
debug!("adding argument %d", arg_id); debug!("adding argument %d", arg_id);
let ident = ast_util::path_to_ident(path); let ident = ast_util::path_to_ident(path);
(*fn_maps).add_variable(Arg(arg_id, ident, mode)); fn_maps.add_variable(Arg(arg_id, ident, mode));
} }
}; };
@ -486,10 +497,10 @@ fn visit_fn(fk: visit::fn_kind, decl: fn_decl, body: blk,
// - exit_ln represents the end of the fn, either by return or fail // - exit_ln represents the end of the fn, either by return or fail
// - implicit_ret_var is a pseudo-variable that represents // - implicit_ret_var is a pseudo-variable that represents
// an implicit return // an implicit return
let specials = { let specials = Specials {
exit_ln: (*fn_maps).add_live_node(ExitNode), exit_ln: fn_maps.add_live_node(ExitNode),
fallthrough_ln: (*fn_maps).add_live_node(ExitNode), fallthrough_ln: fn_maps.add_live_node(ExitNode),
no_ret_var: (*fn_maps).add_variable(ImplicitRet) no_ret_var: fn_maps.add_variable(ImplicitRet)
}; };
// compute liveness // compute liveness
@ -509,7 +520,7 @@ fn visit_fn(fk: visit::fn_kind, decl: fn_decl, body: blk,
lsets.warn_about_unused_args(decl, entry_ln); lsets.warn_about_unused_args(decl, entry_ln);
} }
fn visit_local(local: @local, &&self: @IrMaps, vt: vt<@IrMaps>) { fn visit_local(local: @local, &&self: @mut IrMaps, vt: vt<@mut IrMaps>) {
let def_map = self.tcx.def_map; let def_map = self.tcx.def_map;
do pat_util::pat_bindings(def_map, local.node.pat) |_bm, p_id, sp, path| { do pat_util::pat_bindings(def_map, local.node.pat) |_bm, p_id, sp, path| {
debug!("adding local variable %d", p_id); debug!("adding local variable %d", p_id);
@ -529,7 +540,7 @@ fn visit_local(local: @local, &&self: @IrMaps, vt: vt<@IrMaps>) {
visit::visit_local(local, self, vt); visit::visit_local(local, self, vt);
} }
fn visit_arm(arm: arm, &&self: @IrMaps, vt: vt<@IrMaps>) { fn visit_arm(arm: arm, &&self: @mut IrMaps, vt: vt<@mut IrMaps>) {
let def_map = self.tcx.def_map; let def_map = self.tcx.def_map;
for arm.pats.each |pat| { for arm.pats.each |pat| {
do pat_util::pat_bindings(def_map, *pat) |bm, p_id, sp, path| { do pat_util::pat_bindings(def_map, *pat) |bm, p_id, sp, path| {
@ -548,7 +559,7 @@ fn visit_arm(arm: arm, &&self: @IrMaps, vt: vt<@IrMaps>) {
visit::visit_arm(arm, self, vt); visit::visit_arm(arm, self, vt);
} }
fn visit_expr(expr: @expr, &&self: @IrMaps, vt: vt<@IrMaps>) { fn visit_expr(expr: @expr, &&self: @mut IrMaps, vt: vt<@mut IrMaps>) {
match expr.node { match expr.node {
// live nodes required for uses or definitions of variables: // live nodes required for uses or definitions of variables:
expr_path(_) => { expr_path(_) => {
@ -626,21 +637,25 @@ fn visit_expr(expr: @expr, &&self: @IrMaps, vt: vt<@IrMaps>) {
// Actually we compute just a bit more than just liveness, but we use // Actually we compute just a bit more than just liveness, but we use
// the same basic propagation framework in all cases. // the same basic propagation framework in all cases.
type users = { struct Users {
reader: LiveNode, reader: LiveNode,
writer: LiveNode, writer: LiveNode,
used: bool used: bool
};
fn invalid_users() -> users {
{reader: invalid_node(), writer: invalid_node(), used: false}
} }
type Specials = { fn invalid_users() -> Users {
Users {
reader: invalid_node(),
writer: invalid_node(),
used: false
}
}
struct Specials {
exit_ln: LiveNode, exit_ln: LiveNode,
fallthrough_ln: LiveNode, fallthrough_ln: LiveNode,
no_ret_var: Variable no_ret_var: Variable
}; }
const ACC_READ: uint = 1u; const ACC_READ: uint = 1u;
const ACC_WRITE: uint = 2u; const ACC_WRITE: uint = 2u;
@ -650,10 +665,10 @@ type LiveNodeMap = HashMap<node_id, LiveNode>;
struct Liveness { struct Liveness {
tcx: ty::ctxt, tcx: ty::ctxt,
ir: @IrMaps, ir: @mut IrMaps,
s: Specials, s: Specials,
successors: ~[mut LiveNode], successors: ~[mut LiveNode],
users: ~[mut users], users: ~[mut Users],
// The list of node IDs for the nested loop scopes // The list of node IDs for the nested loop scopes
// we're in. // we're in.
loop_scope: DVec<node_id>, loop_scope: DVec<node_id>,
@ -664,7 +679,7 @@ struct Liveness {
cont_ln: LiveNodeMap cont_ln: LiveNodeMap
} }
fn Liveness(ir: @IrMaps, specials: Specials) -> Liveness { fn Liveness(ir: @mut IrMaps, specials: Specials) -> Liveness {
Liveness { Liveness {
ir: ir, ir: ir,
tcx: ir.tcx, tcx: ir.tcx,

View file

@ -408,28 +408,24 @@ pub struct region_dep {
pub type dep_map = HashMap<ast::node_id, @DVec<region_dep>>; pub type dep_map = HashMap<ast::node_id, @DVec<region_dep>>;
pub type determine_rp_ctxt_ = { pub struct DetermineRpCtxt {
sess: Session, sess: Session,
ast_map: ast_map::map, ast_map: ast_map::map,
def_map: resolve::DefMap, def_map: resolve::DefMap,
region_paramd_items: region_paramd_items, region_paramd_items: region_paramd_items,
dep_map: dep_map, dep_map: dep_map,
worklist: DVec<ast::node_id>, worklist: ~[ast::node_id],
// the innermost enclosing item id // the innermost enclosing item id
mut item_id: ast::node_id, item_id: ast::node_id,
// true when we are within an item but not within a method. // true when we are within an item but not within a method.
// see long discussion on region_is_relevant() // see long discussion on region_is_relevant()
mut anon_implies_rp: bool, anon_implies_rp: bool,
// encodes the context of the current type; invariant if // encodes the context of the current type; invariant if
// mutable, covariant otherwise // mutable, covariant otherwise
mut ambient_variance: region_variance, ambient_variance: region_variance,
};
pub enum determine_rp_ctxt {
determine_rp_ctxt_(@determine_rp_ctxt_)
} }
pub fn join_variance(++variance1: region_variance, pub fn join_variance(++variance1: region_variance,
@ -465,15 +461,15 @@ pub fn add_variance(+ambient_variance: region_variance,
} }
} }
pub impl determine_rp_ctxt { pub impl DetermineRpCtxt {
fn add_variance(variance: region_variance) -> region_variance { fn add_variance(@mut self, variance: region_variance) -> region_variance {
add_variance(self.ambient_variance, variance) add_variance(self.ambient_variance, variance)
} }
/// Records that item `id` is region-parameterized with the /// Records that item `id` is region-parameterized with the
/// variance `variance`. If `id` was already parameterized, then /// variance `variance`. If `id` was already parameterized, then
/// the new variance is joined with the old variance. /// the new variance is joined with the old variance.
fn add_rp(id: ast::node_id, variance: region_variance) { fn add_rp(@mut self, id: ast::node_id, variance: region_variance) {
assert id != 0; assert id != 0;
let old_variance = self.region_paramd_items.find(&id); let old_variance = self.region_paramd_items.find(&id);
let joined_variance = match old_variance { let joined_variance = match old_variance {
@ -487,7 +483,8 @@ pub impl determine_rp_ctxt {
joined_variance, old_variance, variance); joined_variance, old_variance, variance);
if Some(joined_variance) != old_variance { if Some(joined_variance) != old_variance {
self.region_paramd_items.insert(id, joined_variance); let region_paramd_items = self.region_paramd_items;
region_paramd_items.insert(id, joined_variance);
self.worklist.push(id); self.worklist.push(id);
} }
} }
@ -497,7 +494,7 @@ pub impl determine_rp_ctxt {
/// `from`. Put another way, it indicates that the current item /// `from`. Put another way, it indicates that the current item
/// contains a value of type `from`, so if `from` is /// contains a value of type `from`, so if `from` is
/// region-parameterized, so is the current item. /// region-parameterized, so is the current item.
fn add_dep(from: ast::node_id) { fn add_dep(@mut self, from: ast::node_id) {
debug!("add dependency from %d -> %d (%s -> %s) with variance %?", debug!("add dependency from %d -> %d (%s -> %s) with variance %?",
from, self.item_id, from, self.item_id,
ast_map::node_id_to_str(self.ast_map, from, ast_map::node_id_to_str(self.ast_map, from,
@ -509,7 +506,8 @@ pub impl determine_rp_ctxt {
Some(vec) => vec, Some(vec) => vec,
None => { None => {
let vec = @DVec(); let vec = @DVec();
self.dep_map.insert(from, vec); let dep_map = self.dep_map;
dep_map.insert(from, vec);
vec vec
} }
}; };
@ -552,7 +550,7 @@ pub impl determine_rp_ctxt {
// case it is bound. We handle this by setting a flag // case it is bound. We handle this by setting a flag
// (anon_implies_rp) to true when we enter an item and setting // (anon_implies_rp) to true when we enter an item and setting
// that flag to false when we enter a method. // that flag to false when we enter a method.
fn region_is_relevant(r: @ast::region) -> bool { fn region_is_relevant(@mut self, r: @ast::region) -> bool {
match r.node { match r.node {
ast::re_static => false, ast::re_static => false,
ast::re_anon => self.anon_implies_rp, ast::re_anon => self.anon_implies_rp,
@ -567,7 +565,9 @@ pub impl determine_rp_ctxt {
// //
// If the region is explicitly specified, then we follows the // If the region is explicitly specified, then we follows the
// normal rules. // normal rules.
fn opt_region_is_relevant(opt_r: Option<@ast::region>) -> bool { fn opt_region_is_relevant(@mut self,
opt_r: Option<@ast::region>)
-> bool {
debug!("opt_region_is_relevant: %? (anon_implies_rp=%b)", debug!("opt_region_is_relevant: %? (anon_implies_rp=%b)",
opt_r, self.anon_implies_rp); opt_r, self.anon_implies_rp);
match opt_r { match opt_r {
@ -576,9 +576,10 @@ pub impl determine_rp_ctxt {
} }
} }
fn with(item_id: ast::node_id, fn with(@mut self,
item_id: ast::node_id,
anon_implies_rp: bool, anon_implies_rp: bool,
f: fn()) { f: &fn()) {
let old_item_id = self.item_id; let old_item_id = self.item_id;
let old_anon_implies_rp = self.anon_implies_rp; let old_anon_implies_rp = self.anon_implies_rp;
self.item_id = item_id; self.item_id = item_id;
@ -590,7 +591,7 @@ pub impl determine_rp_ctxt {
self.anon_implies_rp = old_anon_implies_rp; self.anon_implies_rp = old_anon_implies_rp;
} }
fn with_ambient_variance(variance: region_variance, f: fn()) { fn with_ambient_variance(@mut self, variance: region_variance, f: &fn()) {
let old_ambient_variance = self.ambient_variance; let old_ambient_variance = self.ambient_variance;
self.ambient_variance = self.add_variance(variance); self.ambient_variance = self.add_variance(variance);
f(); f();
@ -599,8 +600,8 @@ pub impl determine_rp_ctxt {
} }
pub fn determine_rp_in_item(item: @ast::item, pub fn determine_rp_in_item(item: @ast::item,
&&cx: determine_rp_ctxt, &&cx: @mut DetermineRpCtxt,
visitor: visit::vt<determine_rp_ctxt>) { visitor: visit::vt<@mut DetermineRpCtxt>) {
do cx.with(item.id, true) { do cx.with(item.id, true) {
visit::visit_item(item, cx, visitor); visit::visit_item(item, cx, visitor);
} }
@ -609,10 +610,10 @@ pub fn determine_rp_in_item(item: @ast::item,
pub fn determine_rp_in_fn(fk: visit::fn_kind, pub fn determine_rp_in_fn(fk: visit::fn_kind,
decl: ast::fn_decl, decl: ast::fn_decl,
body: ast::blk, body: ast::blk,
_sp: span, _: span,
_id: ast::node_id, _: ast::node_id,
&&cx: determine_rp_ctxt, &&cx: @mut DetermineRpCtxt,
visitor: visit::vt<determine_rp_ctxt>) { visitor: visit::vt<@mut DetermineRpCtxt>) {
do cx.with(cx.item_id, false) { do cx.with(cx.item_id, false) {
do cx.with_ambient_variance(rv_contravariant) { do cx.with_ambient_variance(rv_contravariant) {
for decl.inputs.each |a| { for decl.inputs.each |a| {
@ -626,16 +627,16 @@ pub fn determine_rp_in_fn(fk: visit::fn_kind,
} }
pub fn determine_rp_in_ty_method(ty_m: ast::ty_method, pub fn determine_rp_in_ty_method(ty_m: ast::ty_method,
&&cx: determine_rp_ctxt, &&cx: @mut DetermineRpCtxt,
visitor: visit::vt<determine_rp_ctxt>) { visitor: visit::vt<@mut DetermineRpCtxt>) {
do cx.with(cx.item_id, false) { do cx.with(cx.item_id, false) {
visit::visit_ty_method(ty_m, cx, visitor); visit::visit_ty_method(ty_m, cx, visitor);
} }
} }
pub fn determine_rp_in_ty(ty: @ast::Ty, pub fn determine_rp_in_ty(ty: @ast::Ty,
&&cx: determine_rp_ctxt, &&cx: @mut DetermineRpCtxt,
visitor: visit::vt<determine_rp_ctxt>) { visitor: visit::vt<@mut DetermineRpCtxt>) {
// we are only interested in types that will require an item to // we are only interested in types that will require an item to
// be region-parameterized. if cx.item_id is zero, then this type // be region-parameterized. if cx.item_id is zero, then this type
// is not a member of a type defn nor is it a constitutent of an // is not a member of a type defn nor is it a constitutent of an
@ -647,10 +648,11 @@ pub fn determine_rp_in_ty(ty: @ast::Ty,
// respect to &r, because &r/ty can be used whereever a *smaller* // respect to &r, because &r/ty can be used whereever a *smaller*
// region is expected (and hence is a supertype of those // region is expected (and hence is a supertype of those
// locations) // locations)
let sess = cx.sess;
match ty.node { match ty.node {
ast::ty_rptr(r, _) => { ast::ty_rptr(r, _) => {
debug!("referenced rptr type %s", debug!("referenced rptr type %s",
pprust::ty_to_str(ty, cx.sess.intr())); pprust::ty_to_str(ty, sess.intr()));
if cx.region_is_relevant(r) { if cx.region_is_relevant(r) {
cx.add_rp(cx.item_id, cx.add_variance(rv_contravariant)) cx.add_rp(cx.item_id, cx.add_variance(rv_contravariant))
@ -659,7 +661,7 @@ pub fn determine_rp_in_ty(ty: @ast::Ty,
ast::ty_closure(ref f) => { ast::ty_closure(ref f) => {
debug!("referenced fn type: %s", debug!("referenced fn type: %s",
pprust::ty_to_str(ty, cx.sess.intr())); pprust::ty_to_str(ty, sess.intr()));
match f.region { match f.region {
Some(r) => { Some(r) => {
if cx.region_is_relevant(r) { if cx.region_is_relevant(r) {
@ -692,12 +694,12 @@ pub fn determine_rp_in_ty(ty: @ast::Ty,
cx.add_dep(did.node); cx.add_dep(did.node);
} }
} else { } else {
let cstore = cx.sess.cstore; let cstore = sess.cstore;
match csearch::get_region_param(cstore, did) { match csearch::get_region_param(cstore, did) {
None => {} None => {}
Some(variance) => { Some(variance) => {
debug!("reference to external, rp'd type %s", debug!("reference to external, rp'd type %s",
pprust::ty_to_str(ty, cx.sess.intr())); pprust::ty_to_str(ty, sess.intr()));
if cx.opt_region_is_relevant(path.rp) { if cx.opt_region_is_relevant(path.rp) {
cx.add_rp(cx.item_id, cx.add_variance(variance)) cx.add_rp(cx.item_id, cx.add_variance(variance))
} }
@ -752,8 +754,9 @@ pub fn determine_rp_in_ty(ty: @ast::Ty,
} }
} }
fn visit_mt(mt: ast::mt, &&cx: determine_rp_ctxt, fn visit_mt(mt: ast::mt,
visitor: visit::vt<determine_rp_ctxt>) { &&cx: @mut DetermineRpCtxt,
visitor: visit::vt<@mut DetermineRpCtxt>) {
// mutability is invariant // mutability is invariant
if mt.mutbl == ast::m_mutbl { if mt.mutbl == ast::m_mutbl {
do cx.with_ambient_variance(rv_invariant) { do cx.with_ambient_variance(rv_invariant) {
@ -765,9 +768,10 @@ pub fn determine_rp_in_ty(ty: @ast::Ty,
} }
} }
pub fn determine_rp_in_struct_field(cm: @ast::struct_field, pub fn determine_rp_in_struct_field(
&&cx: determine_rp_ctxt, cm: @ast::struct_field,
visitor: visit::vt<determine_rp_ctxt>) { &&cx: @mut DetermineRpCtxt,
visitor: visit::vt<@mut DetermineRpCtxt>) {
match cm.node.kind { match cm.node.kind {
ast::named_field(_, ast::struct_mutable, _) => { ast::named_field(_, ast::struct_mutable, _) => {
do cx.with_ambient_variance(rv_invariant) { do cx.with_ambient_variance(rv_invariant) {
@ -786,15 +790,17 @@ pub fn determine_rp_in_crate(sess: Session,
def_map: resolve::DefMap, def_map: resolve::DefMap,
crate: @ast::crate) crate: @ast::crate)
-> region_paramd_items { -> region_paramd_items {
let cx = determine_rp_ctxt_(@{sess: sess, let cx = @mut DetermineRpCtxt {
sess: sess,
ast_map: ast_map, ast_map: ast_map,
def_map: def_map, def_map: def_map,
region_paramd_items: HashMap(), region_paramd_items: HashMap(),
dep_map: HashMap(), dep_map: HashMap(),
worklist: DVec(), worklist: ~[],
mut item_id: 0, item_id: 0,
mut anon_implies_rp: false, anon_implies_rp: false,
mut ambient_variance: rv_covariant}); ambient_variance: rv_covariant
};
// Gather up the base set, worklist and dep_map // Gather up the base set, worklist and dep_map
let visitor = visit::mk_vt(@visit::Visitor { let visitor = visit::mk_vt(@visit::Visitor {
@ -833,7 +839,8 @@ pub fn determine_rp_in_crate(sess: Session,
debug!("%s", { debug!("%s", {
debug!("Region variance results:"); debug!("Region variance results:");
for cx.region_paramd_items.each_ref |&key, &value| { let region_paramd_items = cx.region_paramd_items;
for region_paramd_items.each_ref |&key, &value| {
debug!("item %? (%s) is parameterized with variance %?", debug!("item %? (%s) is parameterized with variance %?",
key, key,
ast_map::node_id_to_str(ast_map, key, ast_map::node_id_to_str(ast_map, key,

View file

@ -145,7 +145,7 @@ pub enum NamespaceResult {
UnboundResult, UnboundResult,
/// Means that resolve has determined that the name is bound in the Module /// Means that resolve has determined that the name is bound in the Module
/// argument, and specified by the NameBindings argument. /// argument, and specified by the NameBindings argument.
BoundResult(@Module, @NameBindings) BoundResult(@Module, @mut NameBindings)
} }
pub impl NamespaceResult { pub impl NamespaceResult {
@ -364,10 +364,10 @@ pub fn ImportDirective(privacy: Privacy,
/// The item that an import resolves to. /// The item that an import resolves to.
pub struct Target { pub struct Target {
target_module: @Module, target_module: @Module,
bindings: @NameBindings, bindings: @mut NameBindings,
} }
pub fn Target(target_module: @Module, bindings: @NameBindings) -> Target { pub fn Target(target_module: @Module, bindings: @mut NameBindings) -> Target {
Target { Target {
target_module: target_module, target_module: target_module,
bindings: bindings bindings: bindings
@ -385,18 +385,19 @@ pub struct ImportResolution {
// zero, outside modules can count on the targets being correct. Before // zero, outside modules can count on the targets being correct. Before
// then, all bets are off; future imports could override this name. // then, all bets are off; future imports could override this name.
mut outstanding_references: uint, outstanding_references: uint,
/// The value that this `use` directive names, if there is one. /// The value that this `use` directive names, if there is one.
mut value_target: Option<Target>, value_target: Option<Target>,
/// The type that this `use` directive names, if there is one. /// The type that this `use` directive names, if there is one.
mut type_target: Option<Target>, type_target: Option<Target>,
/// There exists one state per import statement /// There exists one state per import statement
state: @mut ImportState, state: @mut ImportState,
} }
pub fn ImportResolution(privacy: Privacy, span: span, pub fn ImportResolution(privacy: Privacy,
+span: span,
state: @mut ImportState) -> ImportResolution { state: @mut ImportState) -> ImportResolution {
ImportResolution { ImportResolution {
privacy: privacy, privacy: privacy,
@ -447,7 +448,7 @@ pub struct Module {
mut def_id: Option<def_id>, mut def_id: Option<def_id>,
kind: ModuleKind, kind: ModuleKind,
children: HashMap<ident,@NameBindings>, children: HashMap<ident,@mut NameBindings>,
imports: DVec<@ImportDirective>, imports: DVec<@ImportDirective>,
// The anonymous children of this node. Anonymous children are pseudo- // The anonymous children of this node. Anonymous children are pseudo-
@ -476,7 +477,7 @@ pub struct Module {
exported_names: HashMap<ident,node_id>, exported_names: HashMap<ident,node_id>,
// The status of resolving each import in this module. // The status of resolving each import in this module.
import_resolutions: HashMap<ident,@ImportResolution>, import_resolutions: HashMap<ident,@mut ImportResolution>,
// The number of unresolved globs that this module exports. // The number of unresolved globs that this module exports.
mut glob_count: uint, mut glob_count: uint,
@ -521,9 +522,9 @@ pub fn unused_import_lint_level(session: Session) -> level {
// Records a possibly-private type definition. // Records a possibly-private type definition.
pub struct TypeNsDef { pub struct TypeNsDef {
mut privacy: Privacy, privacy: Privacy,
mut module_def: Option<@Module>, module_def: Option<@Module>,
mut type_def: Option<def> type_def: Option<def>
} }
// Records a possibly-private value definition. // Records a possibly-private value definition.
@ -535,18 +536,19 @@ pub struct ValueNsDef {
// Records the definitions (at most one for each namespace) that a name is // Records the definitions (at most one for each namespace) that a name is
// bound to. // bound to.
pub struct NameBindings { pub struct NameBindings {
mut type_def: Option<TypeNsDef>, //< Meaning in type namespace. type_def: Option<TypeNsDef>, //< Meaning in type namespace.
mut value_def: Option<ValueNsDef>, //< Meaning in value namespace. value_def: Option<ValueNsDef>, //< Meaning in value namespace.
// For error reporting // For error reporting
// FIXME (#3783): Merge me into TypeNsDef and ValueNsDef. // FIXME (#3783): Merge me into TypeNsDef and ValueNsDef.
mut type_span: Option<span>, type_span: Option<span>,
mut value_span: Option<span>, value_span: Option<span>,
} }
pub impl NameBindings { pub impl NameBindings {
/// Creates a new module in this set of name bindings. /// Creates a new module in this set of name bindings.
fn define_module(privacy: Privacy, fn define_module(@mut self,
privacy: Privacy,
parent_link: ParentLink, parent_link: ParentLink,
def_id: Option<def_id>, def_id: Option<def_id>,
kind: ModuleKind, kind: ModuleKind,
@ -573,7 +575,7 @@ pub impl NameBindings {
} }
/// Records a type definition. /// Records a type definition.
fn define_type(privacy: Privacy, def: def, sp: span) { fn define_type(@mut self, privacy: Privacy, def: def, sp: span) {
// Merges the type with the existing type def or creates a new one. // Merges the type with the existing type def or creates a new one.
match self.type_def { match self.type_def {
None => { None => {
@ -595,7 +597,7 @@ pub impl NameBindings {
} }
/// Records a value definition. /// Records a value definition.
fn define_value(privacy: Privacy, def: def, sp: span) { fn define_value(@mut self, privacy: Privacy, def: def, sp: span) {
self.value_def = Some(ValueNsDef { privacy: privacy, def: def }); self.value_def = Some(ValueNsDef { privacy: privacy, def: def });
self.value_span = Some(sp); self.value_span = Some(sp);
} }
@ -612,7 +614,7 @@ pub impl NameBindings {
* Returns the module node. Fails if this node does not have a module * Returns the module node. Fails if this node does not have a module
* definition. * definition.
*/ */
fn get_module() -> @Module { fn get_module(@mut self) -> @Module {
match self.get_module_if_available() { match self.get_module_if_available() {
None => { None => {
die!(~"get_module called on a node with no module \ die!(~"get_module called on a node with no module \
@ -750,15 +752,15 @@ pub fn Resolver(session: Session,
lang_items: LanguageItems, lang_items: LanguageItems,
crate: @crate) crate: @crate)
-> Resolver { -> Resolver {
let graph_root = @NameBindings(); let graph_root = @mut NameBindings();
(*graph_root).define_module(Public, graph_root.define_module(Public,
NoParentLink, NoParentLink,
Some(def_id { crate: 0, node: 0 }), Some(def_id { crate: 0, node: 0 }),
NormalModuleKind, NormalModuleKind,
crate.span); crate.span);
let current_module = (*graph_root).get_module(); let current_module = graph_root.get_module();
let self = Resolver { let self = Resolver {
session: session, session: session,
@ -814,7 +816,7 @@ pub struct Resolver {
intr: @ident_interner, intr: @ident_interner,
graph_root: @NameBindings, graph_root: @mut NameBindings,
unused_import_lint_level: level, unused_import_lint_level: level,
@ -894,7 +896,7 @@ pub impl Resolver {
/// Constructs the reduced graph for the entire crate. /// Constructs the reduced graph for the entire crate.
fn build_reduced_graph(this: @Resolver) { fn build_reduced_graph(this: @Resolver) {
let initial_parent = let initial_parent =
ModuleReducedGraphParent((*self.graph_root).get_module()); ModuleReducedGraphParent(self.graph_root.get_module());
visit_crate(*self.crate, initial_parent, mk_vt(@Visitor { visit_crate(*self.crate, initial_parent, mk_vt(@Visitor {
visit_item: |item, context, visitor| visit_item: |item, context, visitor|
(*this).build_reduced_graph_for_item(item, context, visitor), (*this).build_reduced_graph_for_item(item, context, visitor),
@ -943,7 +945,7 @@ pub impl Resolver {
duplicate_checking_mode: DuplicateCheckingMode, duplicate_checking_mode: DuplicateCheckingMode,
// For printing errors // For printing errors
sp: span) sp: span)
-> (@NameBindings, ReducedGraphParent) { -> (@mut NameBindings, ReducedGraphParent) {
// If this is the immediate descendant of a module, then we add the // If this is the immediate descendant of a module, then we add the
// child name directly. Otherwise, we create or reuse an anonymous // child name directly. Otherwise, we create or reuse an anonymous
@ -960,7 +962,7 @@ pub impl Resolver {
let new_parent = ModuleReducedGraphParent(module_); let new_parent = ModuleReducedGraphParent(module_);
match module_.children.find(&name) { match module_.children.find(&name) {
None => { None => {
let child = @NameBindings(); let child = @mut NameBindings();
module_.children.insert(name, child); module_.children.insert(name, child);
return (child, new_parent); return (child, new_parent);
} }
@ -1080,14 +1082,14 @@ pub impl Resolver {
let parent_link = self.get_parent_link(new_parent, ident); let parent_link = self.get_parent_link(new_parent, ident);
let def_id = def_id { crate: 0, node: item.id }; let def_id = def_id { crate: 0, node: item.id };
(*name_bindings).define_module(privacy, name_bindings.define_module(privacy,
parent_link, parent_link,
Some(def_id), Some(def_id),
NormalModuleKind, NormalModuleKind,
sp); sp);
let new_parent = let new_parent =
ModuleReducedGraphParent((*name_bindings).get_module()); ModuleReducedGraphParent(name_bindings.get_module());
visit_mod(module_, sp, item.id, new_parent, visitor); visit_mod(module_, sp, item.id, new_parent, visitor);
} }
@ -1102,7 +1104,7 @@ pub impl Resolver {
let parent_link = self.get_parent_link(new_parent, let parent_link = self.get_parent_link(new_parent,
ident); ident);
let def_id = def_id { crate: 0, node: item.id }; let def_id = def_id { crate: 0, node: item.id };
(*name_bindings).define_module(privacy, name_bindings.define_module(privacy,
parent_link, parent_link,
Some(def_id), Some(def_id),
ExternModuleKind, ExternModuleKind,
@ -1124,7 +1126,7 @@ pub impl Resolver {
let (name_bindings, _) = let (name_bindings, _) =
self.add_child(ident, parent, ForbidDuplicateValues, sp); self.add_child(ident, parent, ForbidDuplicateValues, sp);
(*name_bindings).define_value name_bindings.define_value
(privacy, def_const(local_def(item.id)), sp); (privacy, def_const(local_def(item.id)), sp);
} }
item_fn(_, purity, _, _) => { item_fn(_, purity, _, _) => {
@ -1132,7 +1134,7 @@ pub impl Resolver {
self.add_child(ident, parent, ForbidDuplicateValues, sp); self.add_child(ident, parent, ForbidDuplicateValues, sp);
let def = def_fn(local_def(item.id), purity); let def = def_fn(local_def(item.id), purity);
(*name_bindings).define_value(privacy, def, sp); name_bindings.define_value(privacy, def, sp);
visit_item(item, new_parent, visitor); visit_item(item, new_parent, visitor);
} }
@ -1141,7 +1143,7 @@ pub impl Resolver {
let (name_bindings, _) = let (name_bindings, _) =
self.add_child(ident, parent, ForbidDuplicateTypes, sp); self.add_child(ident, parent, ForbidDuplicateTypes, sp);
(*name_bindings).define_type name_bindings.define_type
(privacy, def_ty(local_def(item.id)), sp); (privacy, def_ty(local_def(item.id)), sp);
} }
@ -1149,7 +1151,7 @@ pub impl Resolver {
let (name_bindings, new_parent) = let (name_bindings, new_parent) =
self.add_child(ident, parent, ForbidDuplicateTypes, sp); self.add_child(ident, parent, ForbidDuplicateTypes, sp);
(*name_bindings).define_type name_bindings.define_type
(privacy, def_ty(local_def(item.id)), sp); (privacy, def_ty(local_def(item.id)), sp);
for (*enum_definition).variants.each |variant| { for (*enum_definition).variants.each |variant| {
@ -1329,10 +1331,7 @@ pub impl Resolver {
let def_id = local_def(item.id); let def_id = local_def(item.id);
self.trait_info.insert(def_id, method_names); self.trait_info.insert(def_id, method_names);
(*name_bindings).define_type name_bindings.define_type(privacy, def_ty(def_id), sp);
(privacy,
def_ty(def_id),
sp);
visit_item(item, new_parent, visitor); visit_item(item, new_parent, visitor);
} }
@ -1363,20 +1362,20 @@ pub impl Resolver {
match variant.node.kind { match variant.node.kind {
tuple_variant_kind(_) => { tuple_variant_kind(_) => {
(*child).define_value(privacy, child.define_value(privacy,
def_variant(item_id, def_variant(item_id,
local_def(variant.node.id)), local_def(variant.node.id)),
variant.span); variant.span);
} }
struct_variant_kind(_) => { struct_variant_kind(_) => {
(*child).define_type(privacy, child.define_type(privacy,
def_variant(item_id, def_variant(item_id,
local_def(variant.node.id)), local_def(variant.node.id)),
variant.span); variant.span);
self.structs.insert(local_def(variant.node.id), ()); self.structs.insert(local_def(variant.node.id), ());
} }
enum_variant_kind(ref enum_definition) => { enum_variant_kind(ref enum_definition) => {
(*child).define_type(privacy, child.define_type(privacy,
def_ty(local_def(variant.node.id)), def_ty(local_def(variant.node.id)),
variant.span); variant.span);
for (*enum_definition).variants.each |variant| { for (*enum_definition).variants.each |variant| {
@ -1488,7 +1487,7 @@ pub impl Resolver {
NormalModuleKind, NormalModuleKind,
view_item.span); view_item.span);
self.build_reduced_graph_for_external_crate self.build_reduced_graph_for_external_crate
((*child_name_bindings).get_module()); (child_name_bindings.get_module());
} }
None => { None => {
/* Ignore. */ /* Ignore. */
@ -1512,7 +1511,7 @@ pub impl Resolver {
match /*bad*/copy foreign_item.node { match /*bad*/copy foreign_item.node {
foreign_item_fn(_, _, type_parameters) => { foreign_item_fn(_, _, type_parameters) => {
let def = def_fn(local_def(foreign_item.id), unsafe_fn); let def = def_fn(local_def(foreign_item.id), unsafe_fn);
(*name_bindings).define_value(Public, def, foreign_item.span); name_bindings.define_value(Public, def, foreign_item.span);
do self.with_type_parameter_rib do self.with_type_parameter_rib
(HasTypeParameters(&type_parameters, foreign_item.id, (HasTypeParameters(&type_parameters, foreign_item.id,
@ -1522,7 +1521,7 @@ pub impl Resolver {
} }
foreign_item_const(*) => { foreign_item_const(*) => {
let def = def_const(local_def(foreign_item.id)); let def = def_const(local_def(foreign_item.id));
(*name_bindings).define_value(Public, def, foreign_item.span); name_bindings.define_value(Public, def, foreign_item.span);
visit_foreign_item(foreign_item, new_parent, visitor); visit_foreign_item(foreign_item, new_parent, visitor);
} }
@ -1554,10 +1553,12 @@ pub impl Resolver {
visit_block(block, new_parent, visitor); visit_block(block, new_parent, visitor);
} }
fn handle_external_def(def: def, modules: HashMap<def_id, @Module>, fn handle_external_def(def: def,
child_name_bindings: @NameBindings, modules: HashMap<def_id, @Module>,
child_name_bindings: @mut NameBindings,
final_ident: ~str, final_ident: ~str,
ident: ident, new_parent: ReducedGraphParent) { ident: ident,
new_parent: ReducedGraphParent) {
match def { match def {
def_mod(def_id) | def_foreign_mod(def_id) => { def_mod(def_id) | def_foreign_mod(def_id) => {
match copy child_name_bindings.type_def { match copy child_name_bindings.type_def {
@ -1588,7 +1589,9 @@ pub impl Resolver {
// avoid creating cycles in the // avoid creating cycles in the
// module graph. // module graph.
let resolution = @ImportResolution(Public, dummy_sp(), let resolution =
@mut ImportResolution(Public,
dummy_sp(),
@mut ImportState()); @mut ImportState());
resolution.outstanding_references = 0; resolution.outstanding_references = 0;
@ -1618,7 +1621,7 @@ pub impl Resolver {
def_variant(*) => { def_variant(*) => {
debug!("(building reduced graph for external \ debug!("(building reduced graph for external \
crate) building value %s", final_ident); crate) building value %s", final_ident);
(*child_name_bindings).define_value(Public, def, dummy_sp()); child_name_bindings.define_value(Public, def, dummy_sp());
} }
def_ty(def_id) => { def_ty(def_id) => {
debug!("(building reduced graph for external \ debug!("(building reduced graph for external \
@ -1729,7 +1732,7 @@ pub impl Resolver {
_ => {} // Fall through. _ => {} // Fall through.
} }
current_module = (*child_name_bindings).get_module(); current_module = child_name_bindings.get_module();
} }
match def_like { match def_like {
@ -1867,7 +1870,8 @@ pub impl Resolver {
} }
None => { None => {
debug!("(building import directive) creating new"); debug!("(building import directive) creating new");
let resolution = @ImportResolution(privacy, span, let resolution = @mut ImportResolution(privacy,
span,
state); state);
let name = self.idents_to_str(module_path.get()); let name = self.idents_to_str(module_path.get());
// Don't warn about unused intrinsics because they're // Don't warn about unused intrinsics because they're
@ -1910,7 +1914,7 @@ pub impl Resolver {
debug!("(resolving imports) iteration %u, %u imports left", debug!("(resolving imports) iteration %u, %u imports left",
i, self.unresolved_imports); i, self.unresolved_imports);
let module_root = (*self.graph_root).get_module(); let module_root = self.graph_root.get_module();
self.resolve_imports_for_module_subtree(module_root); self.resolve_imports_for_module_subtree(module_root);
if self.unresolved_imports == 0 { if self.unresolved_imports == 0 {
@ -2191,7 +2195,8 @@ pub impl Resolver {
if import_resolution.outstanding_references if import_resolution.outstanding_references
== 0 => { == 0 => {
fn get_binding(import_resolution: @ImportResolution, fn get_binding(import_resolution:
@mut ImportResolution,
namespace: Namespace) namespace: Namespace)
-> NamespaceResult { -> NamespaceResult {
@ -2469,7 +2474,7 @@ pub impl Resolver {
None => { None => {
// Simple: just copy the old import resolution. // Simple: just copy the old import resolution.
let new_import_resolution = let new_import_resolution =
@ImportResolution(privacy, @mut ImportResolution(privacy,
target_import_resolution.span, target_import_resolution.span,
state); state);
new_import_resolution.value_target = new_import_resolution.value_target =
@ -2512,7 +2517,8 @@ pub impl Resolver {
match module_.import_resolutions.find(&ident) { match module_.import_resolutions.find(&ident) {
None => { None => {
// Create a new import resolution from this child. // Create a new import resolution from this child.
dest_import_resolution = @ImportResolution(privacy, span, dest_import_resolution = @mut ImportResolution(privacy,
span,
state); state);
module_.import_resolutions.insert module_.import_resolutions.insert
(ident, dest_import_resolution); (ident, dest_import_resolution);
@ -3202,7 +3208,7 @@ pub impl Resolver {
// processing. // processing.
fn record_exports() { fn record_exports() {
let root_module = (*self.graph_root).get_module(); let root_module = self.graph_root.get_module();
self.record_exports_for_module_subtree(root_module); self.record_exports_for_module_subtree(root_module);
} }
@ -3265,7 +3271,7 @@ pub impl Resolver {
fn add_exports_of_namebindings(exports2: &mut ~[Export2], fn add_exports_of_namebindings(exports2: &mut ~[Export2],
ident: ident, ident: ident,
namebindings: @NameBindings, namebindings: @mut NameBindings,
ns: Namespace, ns: Namespace,
reexport: bool) { reexport: bool) {
match (namebindings.def_for_namespace(ns), match (namebindings.def_for_namespace(ns),
@ -3721,7 +3727,7 @@ pub impl Resolver {
// If this is the main function, we must record it in the // If this is the main function, we must record it in the
// session. // session.
// FIXME #4404 android JNI hacks // FIXME #4404 android JNI hacks
if !self.session.building_library || if !*self.session.building_library ||
self.session.targ_cfg.os == session::os_android { self.session.targ_cfg.os == session::os_android {
if self.attr_main_fn.is_none() && if self.attr_main_fn.is_none() &&
@ -4673,7 +4679,7 @@ pub impl Resolver {
let module_path_idents = self.intern_module_part_of_path(path); let module_path_idents = self.intern_module_part_of_path(path);
let root_module = (*self.graph_root).get_module(); let root_module = self.graph_root.get_module();
let mut containing_module; let mut containing_module;
match self.resolve_module_path_from_root(root_module, match self.resolve_module_path_from_root(root_module,
@ -5172,10 +5178,10 @@ pub impl Resolver {
~"multiple 'main' functions"); ~"multiple 'main' functions");
i += 1; i += 1;
} }
self.session.main_fn = self.main_fns[0]; *self.session.main_fn = self.main_fns[0];
} }
} else { } else {
self.session.main_fn = self.attr_main_fn; *self.session.main_fn = self.attr_main_fn;
} }
} }
@ -5191,7 +5197,7 @@ pub impl Resolver {
return; return;
} }
let root_module = (*self.graph_root).get_module(); let root_module = self.graph_root.get_module();
self.check_for_unused_imports_in_module_subtree(root_module); self.check_for_unused_imports_in_module_subtree(root_module);
} }
@ -5245,15 +5251,15 @@ pub impl Resolver {
import_resolution.state.warned = true; import_resolution.state.warned = true;
match self.unused_import_lint_level { match self.unused_import_lint_level {
warn => { warn => {
self.session.span_warn(import_resolution.span, self.session.span_warn(copy import_resolution.span,
~"unused import"); ~"unused import");
} }
deny | forbid => { deny | forbid => {
self.session.span_err(import_resolution.span, self.session.span_err(copy import_resolution.span,
~"unused import"); ~"unused import");
} }
allow => { allow => {
self.session.span_bug(import_resolution.span, self.session.span_bug(copy import_resolution.span,
~"shouldn't be here if lint \ ~"shouldn't be here if lint \
is allowed"); is allowed");
} }

View file

@ -370,7 +370,7 @@ pub fn get_tydesc_simple(ccx: @crate_ctxt, t: ty::t) -> ValueRef {
get_tydesc(ccx, t).tydesc get_tydesc(ccx, t).tydesc
} }
pub fn get_tydesc(ccx: @crate_ctxt, t: ty::t) -> @tydesc_info { pub fn get_tydesc(ccx: @crate_ctxt, t: ty::t) -> @mut tydesc_info {
match ccx.tydescs.find(&t) { match ccx.tydescs.find(&t) {
Some(inf) => inf, Some(inf) => inf,
_ => { _ => {
@ -2159,15 +2159,15 @@ pub fn register_fn_fuller(ccx: @crate_ctxt,
// FIXME #4404 android JNI hacks // FIXME #4404 android JNI hacks
let is_main = is_main_fn(&ccx.sess, node_id) && let is_main = is_main_fn(&ccx.sess, node_id) &&
(!ccx.sess.building_library || (!*ccx.sess.building_library ||
(ccx.sess.building_library && (*ccx.sess.building_library &&
ccx.sess.targ_cfg.os == session::os_android)); ccx.sess.targ_cfg.os == session::os_android));
if is_main { create_main_wrapper(ccx, sp, llfn); } if is_main { create_main_wrapper(ccx, sp, llfn); }
llfn llfn
} }
pub fn is_main_fn(sess: &Session, node_id: ast::node_id) -> bool { pub fn is_main_fn(sess: &Session, node_id: ast::node_id) -> bool {
match sess.main_fn { match *sess.main_fn {
Some((main_id, _)) => node_id == main_id, Some((main_id, _)) => node_id == main_id,
None => false None => false
} }
@ -2210,7 +2210,7 @@ pub fn create_main_wrapper(ccx: @crate_ctxt, _sp: span, main_llfn: ValueRef) {
let llfty = T_fn(~[ccx.int_type, ccx.int_type], ccx.int_type); let llfty = T_fn(~[ccx.int_type, ccx.int_type], ccx.int_type);
// FIXME #4404 android JNI hacks // FIXME #4404 android JNI hacks
let llfn = if ccx.sess.building_library { let llfn = if *ccx.sess.building_library {
decl_cdecl_fn(ccx.llmod, ~"amain", llfty) decl_cdecl_fn(ccx.llmod, ~"amain", llfty)
} else { } else {
decl_cdecl_fn(ccx.llmod, main_name(), llfty) decl_cdecl_fn(ccx.llmod, main_name(), llfty)
@ -2230,14 +2230,20 @@ pub fn create_main_wrapper(ccx: @crate_ctxt, _sp: span, main_llfn: ValueRef) {
let start = decl_cdecl_fn(ccx.llmod, ~"rust_start", start_ty); let start = decl_cdecl_fn(ccx.llmod, ~"rust_start", start_ty);
let args = unsafe { let args = unsafe {
if ccx.sess.building_library { if *ccx.sess.building_library {
~[rust_main, ~[
rust_main,
llvm::LLVMConstInt(T_i32(), 0u as c_ulonglong, False), llvm::LLVMConstInt(T_i32(), 0u as c_ulonglong, False),
llvm::LLVMConstInt(T_i32(), 0u as c_ulonglong, False), llvm::LLVMConstInt(T_i32(), 0u as c_ulonglong, False),
crate_map] crate_map
]
} else { } else {
~[rust_main, llvm::LLVMGetParam(llfn, 0 as c_uint), ~[
llvm::LLVMGetParam(llfn, 1 as c_uint), crate_map] rust_main,
llvm::LLVMGetParam(llfn, 0 as c_uint),
llvm::LLVMGetParam(llfn, 1 as c_uint),
crate_map
]
} }
}; };
@ -2815,10 +2821,12 @@ pub fn decl_crate_map(sess: session::Session, mapmeta: link_meta,
let mut n_subcrates = 1; let mut n_subcrates = 1;
let cstore = sess.cstore; let cstore = sess.cstore;
while cstore::have_crate_data(cstore, n_subcrates) { n_subcrates += 1; } while cstore::have_crate_data(cstore, n_subcrates) { n_subcrates += 1; }
let mapname = if sess.building_library { let mapname = if *sess.building_library {
mapmeta.name.to_owned() + ~"_" + mapmeta.vers.to_owned() + ~"_" mapmeta.name.to_owned() + ~"_" + mapmeta.vers.to_owned() + ~"_"
+ mapmeta.extras_hash.to_owned() + mapmeta.extras_hash.to_owned()
} else { ~"toplevel" }; } else {
~"toplevel"
};
let sym_name = ~"_rust_crate_map_" + mapname; let sym_name = ~"_rust_crate_map_" + mapname;
let arrtype = T_array(int_type, n_subcrates as uint); let arrtype = T_array(int_type, n_subcrates as uint);
let maptype = T_struct(~[T_i32(), T_ptr(T_i8()), int_type, arrtype]); let maptype = T_struct(~[T_i32(), T_ptr(T_i8()), int_type, arrtype]);
@ -2891,7 +2899,7 @@ pub fn crate_ctxt_to_encode_parms(cx: @crate_ctxt) -> encoder::encode_parms {
} }
pub fn write_metadata(cx: @crate_ctxt, crate: &ast::crate) { pub fn write_metadata(cx: @crate_ctxt, crate: &ast::crate) {
if !cx.sess.building_library { return; } if !*cx.sess.building_library { return; }
let encode_parms = crate_ctxt_to_encode_parms(cx); let encode_parms = crate_ctxt_to_encode_parms(cx);
let llmeta = C_bytes(encoder::encode_metadata(encode_parms, crate)); let llmeta = C_bytes(encoder::encode_metadata(encode_parms, crate));
let llconst = C_struct(~[llmeta]); let llconst = C_struct(~[llmeta]);
@ -3016,18 +3024,19 @@ pub fn trans_crate(sess: session::Session,
all_llvm_symbols: HashMap(), all_llvm_symbols: HashMap(),
tcx: tcx, tcx: tcx,
maps: maps, maps: maps,
stats: stats: @mut Stats {
{mut n_static_tydescs: 0u, n_static_tydescs: 0u,
mut n_glues_created: 0u, n_glues_created: 0u,
mut n_null_glues: 0u, n_null_glues: 0u,
mut n_real_glues: 0u, n_real_glues: 0u,
mut n_fns: 0u, n_fns: 0u,
mut n_monos: 0u, n_monos: 0u,
mut n_inlines: 0u, n_inlines: 0u,
mut n_closures: 0u, n_closures: 0u,
llvm_insn_ctxt: @mut ~[], llvm_insn_ctxt: @mut ~[],
llvm_insns: HashMap(), llvm_insns: HashMap(),
fn_times: @mut ~[]}, fn_times: @mut ~[]
},
upcalls: upcall::declare_upcalls(targ_cfg, llmod), upcalls: upcall::declare_upcalls(targ_cfg, llmod),
tydesc_type: tydesc_type, tydesc_type: tydesc_type,
int_type: int_type, int_type: int_type,

View file

@ -391,20 +391,20 @@ pub fn trans_rtcall_or_lang_call_with_type_params(bcx: block,
} }
pub fn body_contains_ret(body: ast::blk) -> bool { pub fn body_contains_ret(body: ast::blk) -> bool {
let cx = {mut found: false}; let cx = @mut false;
visit::visit_block(body, cx, visit::mk_vt(@visit::Visitor { visit::visit_block(body, cx, visit::mk_vt(@visit::Visitor {
visit_item: |_i, _cx, _v| { }, visit_item: |_i, _cx, _v| { },
visit_expr: |e: @ast::expr, cx: {mut found: bool}, v| { visit_expr: |e: @ast::expr, cx: @mut bool, v| {
if !cx.found { if !*cx {
match e.node { match e.node {
ast::expr_ret(_) => cx.found = true, ast::expr_ret(_) => *cx = true,
_ => visit::visit_expr(e, cx, v), _ => visit::visit_expr(e, cx, v),
} }
} }
}, },
..*visit::default_visitor() ..*visit::default_visitor()
})); }));
cx.found *cx
} }
// See [Note-arg-mode] // See [Note-arg-mode]

View file

@ -87,16 +87,17 @@ pub fn new_addrspace_gen() -> addrspace_gen {
return fn@() -> addrspace { *i += 1; *i }; return fn@() -> addrspace { *i += 1; *i };
} }
pub type tydesc_info = pub struct tydesc_info {
{ty: ty::t, ty: ty::t,
tydesc: ValueRef, tydesc: ValueRef,
size: ValueRef, size: ValueRef,
align: ValueRef, align: ValueRef,
addrspace: addrspace, addrspace: addrspace,
mut take_glue: Option<ValueRef>, take_glue: Option<ValueRef>,
mut drop_glue: Option<ValueRef>, drop_glue: Option<ValueRef>,
mut free_glue: Option<ValueRef>, free_glue: Option<ValueRef>,
mut visit_glue: Option<ValueRef>}; visit_glue: Option<ValueRef>
}
/* /*
* A note on nomenclature of linking: "extern", "foreign", and "upcall". * A note on nomenclature of linking: "extern", "foreign", and "upcall".
@ -124,18 +125,19 @@ pub type tydesc_info =
* *
*/ */
pub type stats = pub struct Stats {
{mut n_static_tydescs: uint, n_static_tydescs: uint,
mut n_glues_created: uint, n_glues_created: uint,
mut n_null_glues: uint, n_null_glues: uint,
mut n_real_glues: uint, n_real_glues: uint,
mut n_fns: uint, n_fns: uint,
mut n_monos: uint, n_monos: uint,
mut n_inlines: uint, n_inlines: uint,
mut n_closures: uint, n_closures: uint,
llvm_insn_ctxt: @mut ~[~str], llvm_insn_ctxt: @mut ~[~str],
llvm_insns: HashMap<~str, uint>, llvm_insns: HashMap<~str, uint>,
fn_times: @mut ~[{ident: ~str, time: int}]}; fn_times: @mut ~[{ident: ~str, time: int}]
}
pub struct BuilderRef_res { pub struct BuilderRef_res {
B: BuilderRef, B: BuilderRef,
@ -170,7 +172,7 @@ pub struct crate_ctxt {
enum_sizes: HashMap<ty::t, uint>, enum_sizes: HashMap<ty::t, uint>,
discrims: HashMap<ast::def_id, ValueRef>, discrims: HashMap<ast::def_id, ValueRef>,
discrim_symbols: HashMap<ast::node_id, ~str>, discrim_symbols: HashMap<ast::node_id, ~str>,
tydescs: HashMap<ty::t, @tydesc_info>, tydescs: HashMap<ty::t, @mut tydesc_info>,
// Set when running emit_tydescs to enforce that no more tydescs are // Set when running emit_tydescs to enforce that no more tydescs are
// created. // created.
mut finished_tydescs: bool, mut finished_tydescs: bool,
@ -208,7 +210,7 @@ pub struct crate_ctxt {
all_llvm_symbols: Set<~str>, all_llvm_symbols: Set<~str>,
tcx: ty::ctxt, tcx: ty::ctxt,
maps: astencode::Maps, maps: astencode::Maps,
stats: stats, stats: @mut Stats,
upcalls: @upcall::upcalls, upcalls: @upcall::upcalls,
tydesc_type: TypeRef, tydesc_type: TypeRef,
int_type: TypeRef, int_type: TypeRef,
@ -216,7 +218,7 @@ pub struct crate_ctxt {
task_type: TypeRef, task_type: TypeRef,
opaque_vec_type: TypeRef, opaque_vec_type: TypeRef,
builder: BuilderRef_res, builder: BuilderRef_res,
shape_cx: shape::ctxt, shape_cx: shape::Ctxt,
crate_map: ValueRef, crate_map: ValueRef,
// Set when at least one function uses GC. Needed so that // Set when at least one function uses GC. Needed so that
// decl_gc_metadata knows whether to link to the module metadata, which // decl_gc_metadata knows whether to link to the module metadata, which

View file

@ -367,16 +367,16 @@ fn create_pointer_type(cx: @crate_ctxt, t: ty::t, span: span,
return mdval; return mdval;
} }
type struct_ctxt = { struct StructCtxt {
file: ValueRef, file: ValueRef,
name: ~str, name: ~str,
line: int, line: int,
mut members: ~[ValueRef], members: ~[ValueRef],
mut total_size: int, total_size: int,
align: int align: int
}; }
fn finish_structure(cx: @struct_ctxt) -> ValueRef { fn finish_structure(cx: @mut StructCtxt) -> ValueRef {
return create_composite_type(StructureTypeTag, return create_composite_type(StructureTypeTag,
/*bad*/copy cx.name, /*bad*/copy cx.name,
cx.file, cx.file,
@ -389,12 +389,13 @@ fn finish_structure(cx: @struct_ctxt) -> ValueRef {
} }
fn create_structure(file: @metadata<file_md>, +name: ~str, line: int) fn create_structure(file: @metadata<file_md>, +name: ~str, line: int)
-> @struct_ctxt { -> @mut StructCtxt {
let cx = @{file: file.node, let cx = @mut StructCtxt {
file: file.node,
name: name, name: name,
line: line, line: line,
mut members: ~[], members: ~[],
mut total_size: 0, total_size: 0,
align: 64 //XXX different alignment per arch? align: 64 //XXX different alignment per arch?
}; };
return cx; return cx;
@ -416,7 +417,11 @@ fn create_derived_type(type_tag: int, file: ValueRef, +name: ~str, line: int,
return llmdnode(lldata); return llmdnode(lldata);
} }
fn add_member(cx: @struct_ctxt, +name: ~str, line: int, size: int, align: int, fn add_member(cx: @mut StructCtxt,
+name: ~str,
line: int,
size: int,
align: int,
ty: ValueRef) { ty: ValueRef) {
cx.members.push(create_derived_type(MemberTag, cx.file, name, line, cx.members.push(create_derived_type(MemberTag, cx.file, name, line,
size * 8, align * 8, cx.total_size, size * 8, align * 8, cx.total_size,

View file

@ -143,7 +143,7 @@ pub fn free_ty_immediate(bcx: block, v: ValueRef, t: ty::t) -> block {
} }
pub fn lazily_emit_all_tydesc_glue(ccx: @crate_ctxt, pub fn lazily_emit_all_tydesc_glue(ccx: @crate_ctxt,
static_ti: @tydesc_info) { static_ti: @mut tydesc_info) {
lazily_emit_tydesc_glue(ccx, abi::tydesc_field_take_glue, static_ti); lazily_emit_tydesc_glue(ccx, abi::tydesc_field_take_glue, static_ti);
lazily_emit_tydesc_glue(ccx, abi::tydesc_field_drop_glue, static_ti); lazily_emit_tydesc_glue(ccx, abi::tydesc_field_drop_glue, static_ti);
lazily_emit_tydesc_glue(ccx, abi::tydesc_field_free_glue, static_ti); lazily_emit_tydesc_glue(ccx, abi::tydesc_field_free_glue, static_ti);
@ -204,7 +204,7 @@ pub fn simplified_glue_type(tcx: ty::ctxt, field: uint, t: ty::t) -> ty::t {
return t; return t;
} }
pub pure fn cast_glue(ccx: @crate_ctxt, ti: @tydesc_info, v: ValueRef) pub pure fn cast_glue(ccx: @crate_ctxt, ti: @mut tydesc_info, v: ValueRef)
-> ValueRef { -> ValueRef {
unsafe { unsafe {
let llfnty = type_of_glue_fn(ccx, ti.ty); let llfnty = type_of_glue_fn(ccx, ti.ty);
@ -214,7 +214,7 @@ pub pure fn cast_glue(ccx: @crate_ctxt, ti: @tydesc_info, v: ValueRef)
pub fn lazily_emit_simplified_tydesc_glue(ccx: @crate_ctxt, pub fn lazily_emit_simplified_tydesc_glue(ccx: @crate_ctxt,
field: uint, field: uint,
ti: @tydesc_info) -> bool { ti: @mut tydesc_info) -> bool {
let _icx = ccx.insn_ctxt("lazily_emit_simplified_tydesc_glue"); let _icx = ccx.insn_ctxt("lazily_emit_simplified_tydesc_glue");
let simpl = simplified_glue_type(ccx.tcx, field, ti.ty); let simpl = simplified_glue_type(ccx.tcx, field, ti.ty);
if simpl != ti.ty { if simpl != ti.ty {
@ -241,7 +241,7 @@ pub fn lazily_emit_simplified_tydesc_glue(ccx: @crate_ctxt,
pub fn lazily_emit_tydesc_glue(ccx: @crate_ctxt, pub fn lazily_emit_tydesc_glue(ccx: @crate_ctxt,
field: uint, field: uint,
ti: @tydesc_info) { ti: @mut tydesc_info) {
let _icx = ccx.insn_ctxt("lazily_emit_tydesc_glue"); let _icx = ccx.insn_ctxt("lazily_emit_tydesc_glue");
let llfnty = type_of_glue_fn(ccx, ti.ty); let llfnty = type_of_glue_fn(ccx, ti.ty);
@ -305,8 +305,11 @@ pub fn lazily_emit_tydesc_glue(ccx: @crate_ctxt,
} }
// See [Note-arg-mode] // See [Note-arg-mode]
pub fn call_tydesc_glue_full(++bcx: block, v: ValueRef, tydesc: ValueRef, pub fn call_tydesc_glue_full(++bcx: block,
field: uint, static_ti: Option<@tydesc_info>) { v: ValueRef,
tydesc: ValueRef,
field: uint,
static_ti: Option<@mut tydesc_info>) {
let _icx = bcx.insn_ctxt("call_tydesc_glue_full"); let _icx = bcx.insn_ctxt("call_tydesc_glue_full");
let ccx = bcx.ccx(); let ccx = bcx.ccx();
// NB: Don't short-circuit even if this block is unreachable because // NB: Don't short-circuit even if this block is unreachable because
@ -647,7 +650,7 @@ pub fn declare_tydesc_addrspace(ccx: @crate_ctxt, t: ty::t) -> addrspace {
} }
// Generates the declaration for (but doesn't emit) a type descriptor. // Generates the declaration for (but doesn't emit) a type descriptor.
pub fn declare_tydesc(ccx: @crate_ctxt, t: ty::t) -> @tydesc_info { pub fn declare_tydesc(ccx: @crate_ctxt, t: ty::t) -> @mut tydesc_info {
let _icx = ccx.insn_ctxt("declare_tydesc"); let _icx = ccx.insn_ctxt("declare_tydesc");
// If emit_tydescs already ran, then we shouldn't be creating any new // If emit_tydescs already ran, then we shouldn't be creating any new
// tydescs. // tydescs.
@ -678,16 +681,17 @@ pub fn declare_tydesc(ccx: @crate_ctxt, t: ty::t) -> @tydesc_info {
llvm::LLVMAddGlobal(ccx.llmod, ccx.tydesc_type, buf) llvm::LLVMAddGlobal(ccx.llmod, ccx.tydesc_type, buf)
} }
}); });
let inf = let inf = @mut tydesc_info {
@{ty: t, ty: t,
tydesc: gvar, tydesc: gvar,
size: llsize, size: llsize,
align: llalign, align: llalign,
addrspace: addrspace, addrspace: addrspace,
mut take_glue: None, take_glue: None,
mut drop_glue: None, drop_glue: None,
mut free_glue: None, free_glue: None,
mut visit_glue: None}; visit_glue: None
};
log(debug, ~"--- declare_tydesc " + ppaux::ty_to_str(ccx.tcx, t)); log(debug, ~"--- declare_tydesc " + ppaux::ty_to_str(ccx.tcx, t));
return inf; return inf;
} }

View file

@ -28,25 +28,24 @@ use std::oldmap::HashMap;
use syntax::ast::def_id; use syntax::ast::def_id;
use syntax::ast; use syntax::ast;
pub enum reflector = { pub struct Reflector {
visitor_val: ValueRef, visitor_val: ValueRef,
visitor_methods: @~[ty::method], visitor_methods: @~[ty::method],
final_bcx: block, final_bcx: block,
tydesc_ty: TypeRef, tydesc_ty: TypeRef,
mut bcx: block bcx: block
}; }
pub impl reflector { pub impl Reflector {
fn c_uint(&mut self, u: uint) -> ValueRef {
fn c_uint(u: uint) -> ValueRef {
C_uint(self.bcx.ccx(), u) C_uint(self.bcx.ccx(), u)
} }
fn c_int(i: int) -> ValueRef { fn c_int(&mut self, i: int) -> ValueRef {
C_int(self.bcx.ccx(), i) C_int(self.bcx.ccx(), i)
} }
fn c_slice(+s: ~str) -> ValueRef { fn c_slice(&mut self, +s: ~str) -> ValueRef {
// We're careful to not use first class aggregates here because that // We're careful to not use first class aggregates here because that
// will kick us off fast isel. (Issue #4352.) // will kick us off fast isel. (Issue #4352.)
let bcx = self.bcx; let bcx = self.bcx;
@ -60,7 +59,7 @@ pub impl reflector {
scratch.val scratch.val
} }
fn c_size_and_align(t: ty::t) -> ~[ValueRef] { fn c_size_and_align(&mut self, t: ty::t) -> ~[ValueRef] {
let tr = type_of::type_of(self.bcx.ccx(), t); let tr = type_of::type_of(self.bcx.ccx(), t);
let s = machine::llsize_of_real(self.bcx.ccx(), tr); let s = machine::llsize_of_real(self.bcx.ccx(), tr);
let a = machine::llalign_of_min(self.bcx.ccx(), tr); let a = machine::llalign_of_min(self.bcx.ccx(), tr);
@ -68,19 +67,19 @@ pub impl reflector {
self.c_uint(a)]; self.c_uint(a)];
} }
fn c_tydesc(t: ty::t) -> ValueRef { fn c_tydesc(&mut self, t: ty::t) -> ValueRef {
let bcx = self.bcx; let bcx = self.bcx;
let static_ti = get_tydesc(bcx.ccx(), t); let static_ti = get_tydesc(bcx.ccx(), t);
glue::lazily_emit_all_tydesc_glue(bcx.ccx(), static_ti); glue::lazily_emit_all_tydesc_glue(bcx.ccx(), static_ti);
PointerCast(bcx, static_ti.tydesc, T_ptr(self.tydesc_ty)) PointerCast(bcx, static_ti.tydesc, T_ptr(self.tydesc_ty))
} }
fn c_mt(mt: ty::mt) -> ~[ValueRef] { fn c_mt(&mut self, mt: ty::mt) -> ~[ValueRef] {
~[self.c_uint(mt.mutbl as uint), ~[self.c_uint(mt.mutbl as uint),
self.c_tydesc(mt.ty)] self.c_tydesc(mt.ty)]
} }
fn visit(ty_name: ~str, args: ~[ValueRef]) { fn visit(&mut self, ty_name: ~str, args: ~[ValueRef]) {
let tcx = self.bcx.tcx(); let tcx = self.bcx.tcx();
let mth_idx = ty::method_idx( let mth_idx = ty::method_idx(
tcx.sess.ident_of(~"visit_" + ty_name), tcx.sess.ident_of(~"visit_" + ty_name),
@ -114,15 +113,18 @@ pub impl reflector {
self.bcx = next_bcx self.bcx = next_bcx
} }
fn bracketed(bracket_name: ~str, +extra: ~[ValueRef], fn bracketed(&mut self,
inner: fn()) { bracket_name: ~str,
+extra: ~[ValueRef],
inner: &fn()) {
// XXX: Bad copy. // XXX: Bad copy.
self.visit(~"enter_" + bracket_name, copy extra); self.visit(~"enter_" + bracket_name, copy extra);
inner(); inner();
self.visit(~"leave_" + bracket_name, extra); self.visit(~"leave_" + bracket_name, extra);
} }
fn vstore_name_and_extra(t: ty::t, fn vstore_name_and_extra(&mut self,
t: ty::t,
vstore: ty::vstore, vstore: ty::vstore,
f: fn(+s: ~str,+v: ~[ValueRef])) { f: fn(+s: ~str,+v: ~[ValueRef])) {
match vstore { match vstore {
@ -137,13 +139,12 @@ pub impl reflector {
} }
} }
fn leaf(+name: ~str) { fn leaf(&mut self, +name: ~str) {
self.visit(name, ~[]); self.visit(name, ~[]);
} }
// Entrypoint // Entrypoint
fn visit_ty(t: ty::t) { fn visit_ty(&mut self, t: ty::t) {
let bcx = self.bcx; let bcx = self.bcx;
debug!("reflect::visit_ty %s", debug!("reflect::visit_ty %s",
ty_to_str(bcx.ccx().tcx, t)); ty_to_str(bcx.ccx().tcx, t));
@ -301,7 +302,7 @@ pub impl reflector {
} }
} }
fn visit_sig(&self, retval: uint, sig: &ty::FnSig) { fn visit_sig(&mut self, retval: uint, sig: &ty::FnSig) {
for sig.inputs.eachi |i, arg| { for sig.inputs.eachi |i, arg| {
let modeval = match arg.mode { let modeval = match arg.mode {
ast::infer(_) => 0u, ast::infer(_) => 0u,
@ -333,13 +334,13 @@ pub fn emit_calls_to_trait_visit_ty(bcx: block,
assert bcx.ccx().tcx.intrinsic_defs.contains_key_ref(&tydesc); assert bcx.ccx().tcx.intrinsic_defs.contains_key_ref(&tydesc);
let (_, tydesc_ty) = bcx.ccx().tcx.intrinsic_defs.get(&tydesc); let (_, tydesc_ty) = bcx.ccx().tcx.intrinsic_defs.get(&tydesc);
let tydesc_ty = type_of::type_of(bcx.ccx(), tydesc_ty); let tydesc_ty = type_of::type_of(bcx.ccx(), tydesc_ty);
let r = reflector({ let mut r = Reflector {
visitor_val: visitor_val, visitor_val: visitor_val,
visitor_methods: ty::trait_methods(bcx.tcx(), visitor_trait_id), visitor_methods: ty::trait_methods(bcx.tcx(), visitor_trait_id),
final_bcx: final, final_bcx: final,
tydesc_ty: tydesc_ty, tydesc_ty: tydesc_ty,
mut bcx: bcx bcx: bcx
}); };
r.visit_ty(t); r.visit_ty(t);
Br(r.bcx, final.llbb); Br(r.bcx, final.llbb);
return final; return final;

View file

@ -34,7 +34,11 @@ use syntax::util::interner;
use ty_ctxt = middle::ty::ctxt; use ty_ctxt = middle::ty::ctxt;
pub type ctxt = {mut next_tag_id: u16, pad: u16, pad2: u32}; pub struct Ctxt {
next_tag_id: u16,
pad: u16,
pad2: u32
}
pub fn mk_global(ccx: @crate_ctxt, pub fn mk_global(ccx: @crate_ctxt,
name: ~str, name: ~str,
@ -57,14 +61,18 @@ pub fn mk_global(ccx: @crate_ctxt,
} }
} }
pub fn mk_ctxt(llmod: ModuleRef) -> ctxt { pub fn mk_ctxt(llmod: ModuleRef) -> Ctxt {
unsafe { unsafe {
let llshapetablesty = trans::common::T_named_struct(~"shapes"); let llshapetablesty = trans::common::T_named_struct(~"shapes");
let _llshapetables = str::as_c_str(~"shapes", |buf| { let _llshapetables = str::as_c_str(~"shapes", |buf| {
llvm::LLVMAddGlobal(llmod, llshapetablesty, buf) llvm::LLVMAddGlobal(llmod, llshapetablesty, buf)
}); });
return {mut next_tag_id: 0u16, pad: 0u16, pad2: 0u32}; return Ctxt {
next_tag_id: 0u16,
pad: 0u16,
pad2: 0u32
};
} }
} }

View file

@ -232,7 +232,7 @@ struct ctxt_ {
vecs_implicitly_copyable: bool, vecs_implicitly_copyable: bool,
legacy_modes: bool, legacy_modes: bool,
legacy_records: bool, legacy_records: bool,
cstore: metadata::cstore::CStore, cstore: @mut metadata::cstore::CStore,
sess: session::Session, sess: session::Session,
def_map: resolve::DefMap, def_map: resolve::DefMap,

View file

@ -11,16 +11,16 @@
/*! /*!
* Conversion from AST representation of types to the ty.rs * Conversion from AST representation of types to the ty.rs
* representation. The main routine here is `ast_ty_to_ty()`: each use * representation. The main routine here is `ast_ty_to_ty()`: each use
* is parameterized by an instance of `ast_conv` and a `region_scope`. * is parameterized by an instance of `AstConv` and a `region_scope`.
* *
* The parameterization of `ast_ty_to_ty()` is because it behaves * The parameterization of `ast_ty_to_ty()` is because it behaves
* somewhat differently during the collect and check phases, particularly * somewhat differently during the collect and check phases, particularly
* with respect to looking up the types of top-level items. In the * with respect to looking up the types of top-level items. In the
* collect phase, the crate context is used as the `ast_conv` instance; * collect phase, the crate context is used as the `AstConv` instance;
* in this phase, the `get_item_ty()` function triggers a recursive call * in this phase, the `get_item_ty()` function triggers a recursive call
* to `ty_of_item()` (note that `ast_ty_to_ty()` will detect recursive * to `ty_of_item()` (note that `ast_ty_to_ty()` will detect recursive
* types and report an error). In the check phase, when the @fn_ctxt is * types and report an error). In the check phase, when the @FnCtxt is
* used as the `ast_conv`, `get_item_ty()` just looks up the item type in * used as the `AstConv`, `get_item_ty()` just looks up the item type in
* `tcx.tcache`. * `tcx.tcache`.
* *
* The `region_scope` trait controls how region references are * The `region_scope` trait controls how region references are
@ -31,7 +31,7 @@
* region, or `type_rscope`, which permits the self region if the type in * region, or `type_rscope`, which permits the self region if the type in
* question is parameterized by a region. * question is parameterized by a region.
* *
* Unlike the `ast_conv` trait, the region scope can change as we descend * Unlike the `AstConv` trait, the region scope can change as we descend
* the type. This is to accommodate the fact that (a) fn types are binding * the type. This is to accommodate the fact that (a) fn types are binding
* scopes and (b) the default region may change. To understand case (a), * scopes and (b) the default region may change. To understand case (a),
* consider something like: * consider something like:
@ -58,12 +58,11 @@ use middle::pat_util::pat_id_map;
use middle::ty::{arg, field, substs}; use middle::ty::{arg, field, substs};
use middle::ty::{ty_param_substs_and_ty}; use middle::ty::{ty_param_substs_and_ty};
use middle::ty; use middle::ty;
use middle::typeck::check::fn_ctxt;
use middle::typeck::collect; use middle::typeck::collect;
use middle::typeck::rscope::{anon_rscope, binding_rscope, empty_rscope}; use middle::typeck::rscope::{anon_rscope, binding_rscope, empty_rscope};
use middle::typeck::rscope::{in_anon_rscope, in_binding_rscope}; use middle::typeck::rscope::{in_anon_rscope, in_binding_rscope};
use middle::typeck::rscope::{region_scope, type_rscope}; use middle::typeck::rscope::{region_scope, type_rscope};
use middle::typeck::{crate_ctxt, write_substs_to_tcx, write_ty_to_tcx}; use middle::typeck::{CrateCtxt, write_substs_to_tcx, write_ty_to_tcx};
use core::result; use core::result;
use core::vec; use core::vec;
@ -72,13 +71,13 @@ use syntax::codemap::span;
use syntax::print::pprust::path_to_str; use syntax::print::pprust::path_to_str;
use util::common::indenter; use util::common::indenter;
pub trait ast_conv { pub trait AstConv {
fn tcx() -> ty::ctxt; fn tcx(@mut self) -> ty::ctxt;
fn ccx() -> @crate_ctxt; fn ccx(@mut self) -> @mut CrateCtxt;
fn get_item_ty(id: ast::def_id) -> ty::ty_param_bounds_and_ty; fn get_item_ty(@mut self, id: ast::def_id) -> ty::ty_param_bounds_and_ty;
// what type should we use when a type is omitted? // what type should we use when a type is omitted?
fn ty_infer(span: span) -> ty::t; fn ty_infer(@mut self, span: span) -> ty::t;
} }
pub fn get_region_reporting_err(tcx: ty::ctxt, pub fn get_region_reporting_err(tcx: ty::ctxt,
@ -95,9 +94,12 @@ pub fn get_region_reporting_err(tcx: ty::ctxt,
} }
} }
pub fn ast_region_to_region<AC: ast_conv, RS: region_scope Copy Durable>( pub fn ast_region_to_region<AC: AstConv, RS: region_scope Copy Durable>(
self: AC, rscope: RS, span: span, a_r: @ast::region) -> ty::Region { self: @mut AC,
rscope: RS,
span: span,
a_r: @ast::region)
-> ty::Region {
let res = match a_r.node { let res = match a_r.node {
ast::re_static => Ok(ty::re_static), ast::re_static => Ok(ty::re_static),
ast::re_anon => rscope.anon_region(span), ast::re_anon => rscope.anon_region(span),
@ -108,10 +110,12 @@ pub fn ast_region_to_region<AC: ast_conv, RS: region_scope Copy Durable>(
get_region_reporting_err(self.tcx(), span, res) get_region_reporting_err(self.tcx(), span, res)
} }
pub fn ast_path_to_substs_and_ty<AC: ast_conv, RS: region_scope Copy Durable>( pub fn ast_path_to_substs_and_ty<AC: AstConv, RS: region_scope Copy Durable>(
self: AC, rscope: RS, did: ast::def_id, self: @mut AC,
path: @ast::path) -> ty_param_substs_and_ty { rscope: RS,
did: ast::def_id,
path: @ast::path)
-> ty_param_substs_and_ty {
let tcx = self.tcx(); let tcx = self.tcx();
let {bounds: decl_bounds, region_param: decl_rp, ty: decl_ty} = let {bounds: decl_bounds, region_param: decl_rp, ty: decl_ty} =
self.get_item_ty(did); self.get_item_ty(did);
@ -158,13 +162,13 @@ pub fn ast_path_to_substs_and_ty<AC: ast_conv, RS: region_scope Copy Durable>(
{substs: substs, ty: ty} {substs: substs, ty: ty}
} }
pub fn ast_path_to_ty<AC: ast_conv, RS: region_scope Copy Durable>( pub fn ast_path_to_ty<AC: AstConv, RS: region_scope Copy Durable>(
self: AC, self: @mut AC,
rscope: RS, rscope: RS,
did: ast::def_id, did: ast::def_id,
path: @ast::path, path: @ast::path,
path_id: ast::node_id) -> ty_param_substs_and_ty { path_id: ast::node_id)
-> ty_param_substs_and_ty {
// Look up the polytype of the item and then substitute the provided types // Look up the polytype of the item and then substitute the provided types
// for any type/region parameters. // for any type/region parameters.
let tcx = self.tcx(); let tcx = self.tcx();
@ -181,11 +185,11 @@ pub const NO_TPS: uint = 2;
// Parses the programmer's textual representation of a type into our // Parses the programmer's textual representation of a type into our
// internal notion of a type. `getter` is a function that returns the type // internal notion of a type. `getter` is a function that returns the type
// corresponding to a definition ID: // corresponding to a definition ID:
pub fn ast_ty_to_ty<AC: ast_conv, RS: region_scope Copy Durable>( pub fn ast_ty_to_ty<AC: AstConv, RS: region_scope Copy Durable>(
self: AC, rscope: RS, &&ast_ty: @ast::Ty) -> ty::t { self: @mut AC, rscope: RS, &&ast_ty: @ast::Ty) -> ty::t {
fn ast_mt_to_mt<AC: ast_conv, RS: region_scope Copy Durable>( fn ast_mt_to_mt<AC: AstConv, RS: region_scope Copy Durable>(
self: AC, rscope: RS, mt: ast::mt) -> ty::mt { self: @mut AC, rscope: RS, mt: ast::mt) -> ty::mt {
ty::mt {ty: ast_ty_to_ty(self, rscope, mt.ty), mutbl: mt.mutbl} ty::mt {ty: ast_ty_to_ty(self, rscope, mt.ty), mutbl: mt.mutbl}
} }
@ -193,8 +197,8 @@ pub fn ast_ty_to_ty<AC: ast_conv, RS: region_scope Copy Durable>(
// Handle @, ~, and & being able to mean estrs and evecs. // Handle @, ~, and & being able to mean estrs and evecs.
// If a_seq_ty is a str or a vec, make it an estr/evec. // If a_seq_ty is a str or a vec, make it an estr/evec.
// Also handle function sigils and first-class trait types. // Also handle function sigils and first-class trait types.
fn mk_pointer<AC: ast_conv, RS: region_scope Copy Durable>( fn mk_pointer<AC: AstConv, RS: region_scope Copy Durable>(
self: AC, self: @mut AC,
rscope: RS, rscope: RS,
a_seq_ty: ast::mt, a_seq_ty: ast::mt,
vst: ty::vstore, vst: ty::vstore,
@ -409,10 +413,12 @@ pub fn ast_ty_to_ty<AC: ast_conv, RS: region_scope Copy Durable>(
return typ; return typ;
} }
pub fn ty_of_arg<AC: ast_conv, RS: region_scope Copy Durable>( pub fn ty_of_arg<AC: AstConv, RS: region_scope Copy Durable>(
self: AC, rscope: RS, a: ast::arg, self: @mut AC,
expected_ty: Option<ty::arg>) -> ty::arg { rscope: RS,
a: ast::arg,
expected_ty: Option<ty::arg>)
-> ty::arg {
let ty = match a.ty.node { let ty = match a.ty.node {
ast::ty_infer if expected_ty.is_some() => expected_ty.get().ty, ast::ty_infer if expected_ty.is_some() => expected_ty.get().ty,
ast::ty_infer => self.ty_infer(a.ty.span), ast::ty_infer => self.ty_infer(a.ty.span),
@ -455,12 +461,13 @@ pub fn ty_of_arg<AC: ast_conv, RS: region_scope Copy Durable>(
arg {mode: mode, ty: ty} arg {mode: mode, ty: ty}
} }
pub fn ty_of_bare_fn<AC: ast_conv, RS: region_scope Copy Durable>( pub fn ty_of_bare_fn<AC: AstConv, RS: region_scope Copy Durable>(
self: AC, rscope: RS, self: @mut AC,
rscope: RS,
purity: ast::purity, purity: ast::purity,
abi: ast::Abi, abi: ast::Abi,
decl: ast::fn_decl) -> ty::BareFnTy decl: ast::fn_decl)
{ -> ty::BareFnTy {
debug!("ty_of_fn_decl"); debug!("ty_of_fn_decl");
// new region names that appear inside of the fn decl are bound to // new region names that appear inside of the fn decl are bound to
@ -480,16 +487,17 @@ pub fn ty_of_bare_fn<AC: ast_conv, RS: region_scope Copy Durable>(
} }
} }
pub fn ty_of_closure<AC: ast_conv, RS: region_scope Copy Durable>( pub fn ty_of_closure<AC: AstConv, RS: region_scope Copy Durable>(
self: AC, rscope: RS, self: @mut AC,
rscope: RS,
sigil: ast::Sigil, sigil: ast::Sigil,
purity: ast::purity, purity: ast::purity,
onceness: ast::Onceness, onceness: ast::Onceness,
opt_region: Option<@ast::region>, opt_region: Option<@ast::region>,
decl: ast::fn_decl, decl: ast::fn_decl,
expected_tys: Option<ty::FnSig>, expected_tys: Option<ty::FnSig>,
span: span) -> ty::ClosureTy span: span)
{ -> ty::ClosureTy {
debug!("ty_of_fn_decl"); debug!("ty_of_fn_decl");
let _i = indenter(); let _i = indenter();

View file

@ -14,7 +14,7 @@ use middle::pat_util::{PatIdMap, pat_id_map, pat_is_binding, pat_is_const};
use middle::pat_util::{pat_is_variant_or_struct}; use middle::pat_util::{pat_is_variant_or_struct};
use middle::ty; use middle::ty;
use middle::typeck::check::demand; use middle::typeck::check::demand;
use middle::typeck::check::{check_block, check_expr_has_type, fn_ctxt}; use middle::typeck::check::{check_block, check_expr_has_type, FnCtxt};
use middle::typeck::check::{instantiate_path, lookup_def}; use middle::typeck::check::{instantiate_path, lookup_def};
use middle::typeck::check::{structure_of, valid_range_bounds}; use middle::typeck::check::{structure_of, valid_range_bounds};
use middle::typeck::require_same_types; use middle::typeck::require_same_types;
@ -27,7 +27,7 @@ use syntax::ast_util;
use syntax::codemap::span; use syntax::codemap::span;
use syntax::print::pprust; use syntax::print::pprust;
pub fn check_match(fcx: @fn_ctxt, pub fn check_match(fcx: @mut FnCtxt,
expr: @ast::expr, expr: @ast::expr,
discrim: @ast::expr, discrim: @ast::expr,
arms: ~[ast::arm]) -> bool { arms: ~[ast::arm]) -> bool {
@ -69,7 +69,7 @@ pub fn check_match(fcx: @fn_ctxt,
} }
pub struct pat_ctxt { pub struct pat_ctxt {
fcx: @fn_ctxt, fcx: @mut FnCtxt,
map: PatIdMap, map: PatIdMap,
match_region: ty::Region, // Region for the match as a whole match_region: ty::Region, // Region for the match as a whole
block_region: ty::Region, // Region for the block of the arm block_region: ty::Region, // Region for the block of the arm

View file

@ -10,7 +10,7 @@
use middle::ty; use middle::ty;
use middle::typeck::check::fn_ctxt; use middle::typeck::check::FnCtxt;
use middle::typeck::infer; use middle::typeck::infer;
use core::result::{Err, Ok}; use core::result::{Err, Ok};
@ -20,13 +20,12 @@ use syntax::codemap::span;
// Requires that the two types unify, and prints an error message if they // Requires that the two types unify, and prints an error message if they
// don't. // don't.
pub fn suptype(fcx: @fn_ctxt, sp: span, pub fn suptype(fcx: @mut FnCtxt, sp: span, expected: ty::t, actual: ty::t) {
expected: ty::t, actual: ty::t) {
suptype_with_fn(fcx, sp, expected, actual, suptype_with_fn(fcx, sp, expected, actual,
|sp, e, a, s| { fcx.report_mismatched_types(sp, e, a, s) }) |sp, e, a, s| { fcx.report_mismatched_types(sp, e, a, s) })
} }
pub fn suptype_with_fn(fcx: @fn_ctxt, pub fn suptype_with_fn(fcx: @mut FnCtxt,
sp: span, sp: span,
expected: ty::t, actual: ty::t, expected: ty::t, actual: ty::t,
handle_err: fn(span, ty::t, ty::t, &ty::type_err)) { handle_err: fn(span, ty::t, ty::t, &ty::type_err)) {
@ -40,7 +39,7 @@ pub fn suptype_with_fn(fcx: @fn_ctxt,
} }
} }
pub fn eqtype(fcx: @fn_ctxt, sp: span, expected: ty::t, actual: ty::t) { pub fn eqtype(fcx: @mut FnCtxt, sp: span, expected: ty::t, actual: ty::t) {
match infer::mk_eqty(fcx.infcx(), false, sp, actual, expected) { match infer::mk_eqty(fcx.infcx(), false, sp, actual, expected) {
Ok(()) => { /* ok */ } Ok(()) => { /* ok */ }
Err(ref err) => { Err(ref err) => {
@ -50,7 +49,7 @@ pub fn eqtype(fcx: @fn_ctxt, sp: span, expected: ty::t, actual: ty::t) {
} }
// Checks that the type `actual` can be coerced to `expected`. // Checks that the type `actual` can be coerced to `expected`.
pub fn coerce(fcx: @fn_ctxt, pub fn coerce(fcx: @mut FnCtxt,
sp: span, sp: span,
expected: ty::t, expected: ty::t,
expr: @ast::expr) { expr: @ast::expr) {

View file

@ -85,7 +85,7 @@ use middle::resolve::{Impl, MethodInfo};
use middle::resolve; use middle::resolve;
use middle::ty::*; use middle::ty::*;
use middle::ty; use middle::ty;
use middle::typeck::check::{fn_ctxt, impl_self_ty}; use middle::typeck::check::{FnCtxt, impl_self_ty};
use middle::typeck::check::{structurally_resolved_type}; use middle::typeck::check::{structurally_resolved_type};
use middle::typeck::check::vtable::VtableContext; use middle::typeck::check::vtable::VtableContext;
use middle::typeck::check::vtable; use middle::typeck::check::vtable;
@ -112,7 +112,7 @@ use syntax::codemap::dummy_sp;
use syntax::codemap::span; use syntax::codemap::span;
pub fn lookup( pub fn lookup(
fcx: @fn_ctxt, fcx: @mut FnCtxt,
// In a call `a.b::<X, Y, ...>(...)`: // In a call `a.b::<X, Y, ...>(...)`:
expr: @ast::expr, // The expression `a.b`. expr: @ast::expr, // The expression `a.b`.
@ -143,7 +143,7 @@ pub fn lookup(
} }
pub struct LookupContext { pub struct LookupContext {
fcx: @fn_ctxt, fcx: @mut FnCtxt,
expr: @ast::expr, expr: @ast::expr,
self_expr: @ast::expr, self_expr: @ast::expr,
callee_id: node_id, callee_id: node_id,
@ -1264,7 +1264,7 @@ pub impl LookupContext {
ty::item_path_str(self.tcx(), did))); ty::item_path_str(self.tcx(), did)));
} }
fn infcx(&self) -> @infer::InferCtxt { fn infcx(&self) -> @mut infer::InferCtxt {
self.fcx.inh.infcx self.fcx.inh.infcx
} }

File diff suppressed because it is too large Load diff

View file

@ -35,7 +35,7 @@ use middle::ty::{encl_region, re_scope};
use middle::ty::{vstore_box, vstore_fixed, vstore_slice}; use middle::ty::{vstore_box, vstore_fixed, vstore_slice};
use middle::ty::{vstore_uniq}; use middle::ty::{vstore_uniq};
use middle::ty; use middle::ty;
use middle::typeck::check::fn_ctxt; use middle::typeck::check::FnCtxt;
use middle::typeck::check::lookup_def; use middle::typeck::check::lookup_def;
use middle::typeck::infer::{fres, resolve_and_force_all_but_regions}; use middle::typeck::infer::{fres, resolve_and_force_all_but_regions};
use middle::typeck::infer::{resolve_type}; use middle::typeck::infer::{resolve_type};
@ -49,10 +49,14 @@ use syntax::codemap::span;
use syntax::print::pprust; use syntax::print::pprust;
use syntax::visit; use syntax::visit;
pub enum rcx { rcx_({fcx: @fn_ctxt, mut errors_reported: uint}) } pub struct Rcx {
pub type rvt = visit::vt<@rcx>; fcx: @mut FnCtxt,
errors_reported: uint
}
pub fn encl_region_of_def(fcx: @fn_ctxt, def: ast::def) -> ty::Region { pub type rvt = visit::vt<@mut Rcx>;
pub fn encl_region_of_def(fcx: @mut FnCtxt, def: ast::def) -> ty::Region {
let tcx = fcx.tcx(); let tcx = fcx.tcx();
match def { match def {
def_local(node_id, _) | def_arg(node_id, _, _) | def_local(node_id, _) | def_arg(node_id, _, _) |
@ -71,8 +75,8 @@ pub fn encl_region_of_def(fcx: @fn_ctxt, def: ast::def) -> ty::Region {
} }
} }
pub impl @rcx { pub impl Rcx {
fn resolve_type(unresolved_ty: ty::t) -> ty::t { fn resolve_type(@mut self, unresolved_ty: ty::t) -> ty::t {
/*! /*!
* Try to resolve the type for the given node, returning * Try to resolve the type for the given node, returning
* t_err if an error results. Note that we never care * t_err if an error results. Note that we never care
@ -109,23 +113,22 @@ pub impl @rcx {
} }
/// Try to resolve the type for the given node. /// Try to resolve the type for the given node.
fn resolve_node_type(id: ast::node_id) -> ty::t { fn resolve_node_type(@mut self, id: ast::node_id) -> ty::t {
self.resolve_type(self.fcx.node_ty(id)) self.resolve_type(self.fcx.node_ty(id))
} }
} }
pub fn regionck_expr(fcx: @fn_ctxt, e: @ast::expr) { pub fn regionck_expr(fcx: @mut FnCtxt, e: @ast::expr) {
let rcx = rcx_({fcx:fcx, mut errors_reported: 0}); let rcx = @mut Rcx { fcx: fcx, errors_reported: 0 };
let v = regionck_visitor(); let v = regionck_visitor();
(v.visit_expr)(e, @(move rcx), v); (v.visit_expr)(e, rcx, v);
fcx.infcx().resolve_regions(); fcx.infcx().resolve_regions();
} }
pub fn regionck_fn(fcx: @fn_ctxt, pub fn regionck_fn(fcx: @mut FnCtxt, blk: ast::blk) {
blk: ast::blk) { let rcx = @mut Rcx { fcx: fcx, errors_reported: 0 };
let rcx = rcx_({fcx:fcx, mut errors_reported: 0});
let v = regionck_visitor(); let v = regionck_visitor();
(v.visit_block)(blk, @(move rcx), v); (v.visit_block)(blk, rcx, v);
fcx.infcx().resolve_regions(); fcx.infcx().resolve_regions();
} }
@ -138,11 +141,11 @@ pub fn regionck_visitor() -> rvt {
.. *visit::default_visitor()}) .. *visit::default_visitor()})
} }
pub fn visit_item(_item: @ast::item, &&_rcx: @rcx, _v: rvt) { pub fn visit_item(_item: @ast::item, &&_rcx: @mut Rcx, _v: rvt) {
// Ignore items // Ignore items
} }
pub fn visit_local(l: @ast::local, &&rcx: @rcx, v: rvt) { pub fn visit_local(l: @ast::local, &&rcx: @mut Rcx, v: rvt) {
// Check to make sure that the regions in all local variables are // Check to make sure that the regions in all local variables are
// within scope. // within scope.
// //
@ -173,11 +176,11 @@ pub fn visit_local(l: @ast::local, &&rcx: @rcx, v: rvt) {
} }
} }
pub fn visit_block(b: ast::blk, &&rcx: @rcx, v: rvt) { pub fn visit_block(b: ast::blk, &&rcx: @mut Rcx, v: rvt) {
visit::visit_block(b, rcx, v); visit::visit_block(b, rcx, v);
} }
pub fn visit_expr(expr: @ast::expr, &&rcx: @rcx, v: rvt) { pub fn visit_expr(expr: @ast::expr, &&rcx: @mut Rcx, v: rvt) {
debug!("visit_expr(e=%s)", rcx.fcx.expr_to_str(expr)); debug!("visit_expr(e=%s)", rcx.fcx.expr_to_str(expr));
for rcx.fcx.inh.adjustments.find(&expr.id).each |adjustment| { for rcx.fcx.inh.adjustments.find(&expr.id).each |adjustment| {
@ -292,11 +295,11 @@ pub fn visit_expr(expr: @ast::expr, &&rcx: @rcx, v: rvt) {
visit::visit_expr(expr, rcx, v); visit::visit_expr(expr, rcx, v);
} }
pub fn visit_stmt(s: @ast::stmt, &&rcx: @rcx, v: rvt) { pub fn visit_stmt(s: @ast::stmt, &&rcx: @mut Rcx, v: rvt) {
visit::visit_stmt(s, rcx, v); visit::visit_stmt(s, rcx, v);
} }
pub fn visit_node(id: ast::node_id, span: span, rcx: @rcx) -> bool { pub fn visit_node(id: ast::node_id, span: span, rcx: @mut Rcx) -> bool {
/*! /*!
* *
* checks the type of the node `id` and reports an error if it * checks the type of the node `id` and reports an error if it
@ -315,7 +318,7 @@ pub fn visit_node(id: ast::node_id, span: span, rcx: @rcx) -> bool {
constrain_regions_in_type_of_node(rcx, id, encl_region, span) constrain_regions_in_type_of_node(rcx, id, encl_region, span)
} }
pub fn constrain_auto_ref(rcx: @rcx, expr: @ast::expr) { pub fn constrain_auto_ref(rcx: @mut Rcx, expr: @ast::expr) {
/*! /*!
* *
* If `expr` is auto-ref'd (e.g., as part of a borrow), then this * If `expr` is auto-ref'd (e.g., as part of a borrow), then this
@ -360,7 +363,7 @@ pub fn constrain_auto_ref(rcx: @rcx, expr: @ast::expr) {
} }
pub fn constrain_free_variables( pub fn constrain_free_variables(
rcx: @rcx, rcx: @mut Rcx,
region: ty::Region, region: ty::Region,
expr: @ast::expr) { expr: @ast::expr) {
/*! /*!
@ -396,7 +399,7 @@ pub fn constrain_free_variables(
} }
pub fn constrain_regions_in_type_of_node( pub fn constrain_regions_in_type_of_node(
rcx: @rcx, rcx: @mut Rcx,
id: ast::node_id, id: ast::node_id,
encl_region: ty::Region, encl_region: ty::Region,
span: span) -> bool { span: span) -> bool {
@ -413,7 +416,7 @@ pub fn constrain_regions_in_type_of_node(
} }
pub fn constrain_regions_in_type( pub fn constrain_regions_in_type(
rcx: @rcx, rcx: @mut Rcx,
encl_region: ty::Region, encl_region: ty::Region,
span: span, span: span,
ty: ty::t) -> bool { ty: ty::t) -> bool {
@ -434,7 +437,7 @@ pub fn constrain_regions_in_type(
|t| ty::type_has_regions(t)); |t| ty::type_has_regions(t));
return (e == rcx.errors_reported); return (e == rcx.errors_reported);
fn constrain_region(rcx: @rcx, fn constrain_region(rcx: @mut Rcx,
encl_region: ty::Region, encl_region: ty::Region,
span: span, span: span,
region: ty::Region) { region: ty::Region) {
@ -521,13 +524,13 @@ pub mod guarantor {
*/ */
use core::prelude::*; use core::prelude::*;
use middle::typeck::check::regionck::{rcx, infallibly_mk_subr}; use middle::typeck::check::regionck::{Rcx, infallibly_mk_subr};
use middle::ty; use middle::ty;
use syntax::ast; use syntax::ast;
use syntax::codemap::span; use syntax::codemap::span;
use util::ppaux::{ty_to_str}; use util::ppaux::{ty_to_str};
pub fn for_addr_of(rcx: @rcx, expr: @ast::expr, base: @ast::expr) { pub fn for_addr_of(rcx: @mut Rcx, expr: @ast::expr, base: @ast::expr) {
/*! /*!
* *
* Computes the guarantor for an expression `&base` and then * Computes the guarantor for an expression `&base` and then
@ -542,7 +545,7 @@ pub mod guarantor {
link(rcx, expr.span, expr.id, guarantor); link(rcx, expr.span, expr.id, guarantor);
} }
pub fn for_match(rcx: @rcx, discr: @ast::expr, arms: &[ast::arm]) { pub fn for_match(rcx: @mut Rcx, discr: @ast::expr, arms: &[ast::arm]) {
/*! /*!
* *
* Computes the guarantors for any ref bindings in a match and * Computes the guarantors for any ref bindings in a match and
@ -558,11 +561,10 @@ pub mod guarantor {
} }
} }
pub fn for_autoref(rcx: @rcx, pub fn for_autoref(rcx: @mut Rcx,
expr: @ast::expr, expr: @ast::expr,
adjustment: &ty::AutoAdjustment, adjustment: &ty::AutoAdjustment,
autoref: &ty::AutoRef) autoref: &ty::AutoRef) {
{
/*! /*!
* *
* Computes the guarantor for an expression that has an * Computes the guarantor for an expression that has an
@ -583,11 +585,10 @@ pub mod guarantor {
} }
fn link( fn link(
rcx: @rcx, rcx: @mut Rcx,
span: span, span: span,
id: ast::node_id, id: ast::node_id,
guarantor: Option<ty::Region>) guarantor: Option<ty::Region>) {
{
/*! /*!
* *
* Links the lifetime of the borrowed pointer resulting from a borrow * Links the lifetime of the borrowed pointer resulting from a borrow
@ -640,7 +641,7 @@ pub mod guarantor {
ty: ty::t ty: ty::t
} }
fn guarantor(rcx: @rcx, expr: @ast::expr) -> Option<ty::Region> { fn guarantor(rcx: @mut Rcx, expr: @ast::expr) -> Option<ty::Region> {
/*! /*!
* *
* Computes the guarantor of `expr`, or None if `expr` is * Computes the guarantor of `expr`, or None if `expr` is
@ -715,7 +716,7 @@ pub mod guarantor {
} }
} }
fn categorize(rcx: @rcx, expr: @ast::expr) -> ExprCategorization { fn categorize(rcx: @mut Rcx, expr: @ast::expr) -> ExprCategorization {
debug!("categorize(expr=%s)", rcx.fcx.expr_to_str(expr)); debug!("categorize(expr=%s)", rcx.fcx.expr_to_str(expr));
let _i = ::util::common::indenter(); let _i = ::util::common::indenter();
@ -741,8 +742,9 @@ pub mod guarantor {
return expr_ct.cat; return expr_ct.cat;
} }
fn categorize_unadjusted(rcx: @rcx, fn categorize_unadjusted(rcx: @mut Rcx,
expr: @ast::expr) -> ExprCategorizationType { expr: @ast::expr)
-> ExprCategorizationType {
debug!("categorize_unadjusted(expr=%s)", rcx.fcx.expr_to_str(expr)); debug!("categorize_unadjusted(expr=%s)", rcx.fcx.expr_to_str(expr));
let _i = ::util::common::indenter(); let _i = ::util::common::indenter();
@ -765,11 +767,11 @@ pub mod guarantor {
} }
fn apply_autoderefs( fn apply_autoderefs(
+rcx: @rcx, +rcx: @mut Rcx,
+expr: @ast::expr, +expr: @ast::expr,
+autoderefs: uint, +autoderefs: uint,
+ct: ExprCategorizationType) -> ExprCategorizationType +ct: ExprCategorizationType)
{ -> ExprCategorizationType {
let mut ct = ct; let mut ct = ct;
let tcx = rcx.fcx.ccx.tcx; let tcx = rcx.fcx.ccx.tcx;
for uint::range(0, autoderefs) |_| { for uint::range(0, autoderefs) |_| {
@ -824,10 +826,9 @@ pub mod guarantor {
} }
fn link_ref_bindings_in_pat( fn link_ref_bindings_in_pat(
rcx: @rcx, rcx: @mut Rcx,
pat: @ast::pat, pat: @ast::pat,
guarantor: Option<ty::Region>) guarantor: Option<ty::Region>) {
{
/*! /*!
* *
* Descends through the pattern, tracking the guarantor * Descends through the pattern, tracking the guarantor
@ -901,10 +902,9 @@ pub mod guarantor {
} }
} }
fn link_ref_bindings_in_pats(rcx: @rcx, fn link_ref_bindings_in_pats(rcx: @mut Rcx,
pats: &~[@ast::pat], pats: &~[@ast::pat],
guarantor: Option<ty::Region>) guarantor: Option<ty::Region>) {
{
for pats.each |pat| { for pats.each |pat| {
link_ref_bindings_in_pat(rcx, *pat, guarantor); link_ref_bindings_in_pat(rcx, *pat, guarantor);
} }
@ -912,7 +912,7 @@ pub mod guarantor {
} }
pub fn infallibly_mk_subr(rcx: @rcx, pub fn infallibly_mk_subr(rcx: @mut Rcx,
a_is_expected: bool, a_is_expected: bool,
span: span, span: span,
a: ty::Region, a: ty::Region,

View file

@ -13,12 +13,12 @@ use core::prelude::*;
use middle::resolve; use middle::resolve;
use middle::ty::{param_ty, substs}; use middle::ty::{param_ty, substs};
use middle::ty; use middle::ty;
use middle::typeck::check::{fn_ctxt, impl_self_ty}; use middle::typeck::check::{FnCtxt, impl_self_ty};
use middle::typeck::check::{structurally_resolved_type}; use middle::typeck::check::{structurally_resolved_type};
use middle::typeck::infer::{fixup_err_to_str, InferCtxt}; use middle::typeck::infer::{fixup_err_to_str, InferCtxt};
use middle::typeck::infer::{resolve_and_force_all_but_regions, resolve_type}; use middle::typeck::infer::{resolve_and_force_all_but_regions, resolve_type};
use middle::typeck::infer; use middle::typeck::infer;
use middle::typeck::{crate_ctxt, vtable_origin, vtable_param, vtable_res}; use middle::typeck::{CrateCtxt, vtable_origin, vtable_param, vtable_res};
use middle::typeck::{vtable_static, vtable_trait}; use middle::typeck::{vtable_static, vtable_trait};
use util::common::indenter; use util::common::indenter;
use util::ppaux::tys_to_str; use util::ppaux::tys_to_str;
@ -63,8 +63,8 @@ pub struct LocationInfo {
/// A vtable context includes an inference context, a crate context, and a /// A vtable context includes an inference context, a crate context, and a
/// callback function to call in case of type error. /// callback function to call in case of type error.
pub struct VtableContext { pub struct VtableContext {
ccx: @crate_ctxt, ccx: @mut CrateCtxt,
infcx: @infer::InferCtxt infcx: @mut infer::InferCtxt
} }
pub impl VtableContext { pub impl VtableContext {
@ -501,11 +501,13 @@ pub fn connect_trait_tps(vcx: &VtableContext,
} }
} }
pub fn insert_vtables(ccx: @crate_ctxt, callee_id: ast::node_id, pub fn insert_vtables(ccx: @mut CrateCtxt,
callee_id: ast::node_id,
vtables: vtable_res) { vtables: vtable_res) {
debug!("insert_vtables(callee_id=%d, vtables=%?)", debug!("insert_vtables(callee_id=%d, vtables=%?)",
callee_id, vtables.map(|v| v.to_str(ccx.tcx))); callee_id, vtables.map(|v| v.to_str(ccx.tcx)));
ccx.vtable_map.insert(callee_id, vtables); let vtable_map = ccx.vtable_map;
vtable_map.insert(callee_id, vtables);
} }
pub fn location_info_for_expr(expr: @ast::expr) -> LocationInfo { pub fn location_info_for_expr(expr: @ast::expr) -> LocationInfo {
@ -515,7 +517,9 @@ pub fn location_info_for_expr(expr: @ast::expr) -> LocationInfo {
} }
} }
pub fn early_resolve_expr(ex: @ast::expr, &&fcx: @fn_ctxt, is_early: bool) { pub fn early_resolve_expr(ex: @ast::expr,
&&fcx: @mut FnCtxt,
is_early: bool) {
debug!("vtable: early_resolve_expr() ex with id %? (early: %b): %s", debug!("vtable: early_resolve_expr() ex with id %? (early: %b): %s",
ex.id, is_early, expr_to_str(ex, fcx.tcx().sess.intr())); ex.id, is_early, expr_to_str(ex, fcx.tcx().sess.intr()));
let _indent = indenter(); let _indent = indenter();
@ -540,7 +544,10 @@ pub fn early_resolve_expr(ex: @ast::expr, &&fcx: @fn_ctxt, is_early: bool) {
let vtbls = lookup_vtables(&vcx, &location_info_for_expr(ex), let vtbls = lookup_vtables(&vcx, &location_info_for_expr(ex),
item_ty.bounds, substs, false, item_ty.bounds, substs, false,
is_early); is_early);
if !is_early { cx.vtable_map.insert(ex.id, vtbls); } if !is_early {
let vtable_map = cx.vtable_map;
vtable_map.insert(ex.id, vtbls);
}
} }
} }
_ => () _ => ()
@ -625,8 +632,10 @@ pub fn early_resolve_expr(ex: @ast::expr, &&fcx: @fn_ctxt, is_early: bool) {
// vtable (that is: "ex has vtable // vtable (that is: "ex has vtable
// <vtable>") // <vtable>")
if !is_early { if !is_early {
cx.vtable_map.insert( let vtable_map =
ex.id, @~[vtable]); cx.vtable_map;
vtable_map.insert(ex.id,
@~[vtable]);
} }
} }
None => err = true None => err = true
@ -696,7 +705,10 @@ pub fn early_resolve_expr(ex: @ast::expr, &&fcx: @fn_ctxt, is_early: bool) {
Map this expression to that vtable (that is: "ex has Map this expression to that vtable (that is: "ex has
vtable <vtable>") vtable <vtable>")
*/ */
if !is_early { cx.vtable_map.insert(ex.id, @~[vtable]); } if !is_early {
let vtable_map = cx.vtable_map;
vtable_map.insert(ex.id, @~[vtable]);
}
fcx.tcx().legacy_boxed_traits.insert(ex.id, ()); fcx.tcx().legacy_boxed_traits.insert(ex.id, ());
} }
} }
@ -709,19 +721,18 @@ pub fn early_resolve_expr(ex: @ast::expr, &&fcx: @fn_ctxt, is_early: bool) {
} }
pub fn resolve_expr(ex: @ast::expr, pub fn resolve_expr(ex: @ast::expr,
&&fcx: @fn_ctxt, &&fcx: @mut FnCtxt,
v: visit::vt<@fn_ctxt>) { v: visit::vt<@mut FnCtxt>) {
early_resolve_expr(ex, fcx, false); early_resolve_expr(ex, fcx, false);
visit::visit_expr(ex, fcx, v); visit::visit_expr(ex, fcx, v);
} }
// Detect points where a trait-bounded type parameter is // Detect points where a trait-bounded type parameter is
// instantiated, resolve the impls for the parameters. // instantiated, resolve the impls for the parameters.
pub fn resolve_in_block(fcx: @fn_ctxt, bl: ast::blk) { pub fn resolve_in_block(fcx: @mut FnCtxt, bl: ast::blk) {
visit::visit_block(bl, fcx, visit::mk_vt(@visit::Visitor { visit::visit_block(bl, fcx, visit::mk_vt(@visit::Visitor {
visit_expr: resolve_expr, visit_expr: resolve_expr,
visit_item: fn@(_i: @ast::item, &&_e: @fn_ctxt, visit_item: |_,_,_| {},
_v: visit::vt<@fn_ctxt>) {},
.. *visit::default_visitor() .. *visit::default_visitor()
})); }));
} }

View file

@ -17,7 +17,7 @@ use core::prelude::*;
use middle::pat_util; use middle::pat_util;
use middle::ty::arg; use middle::ty::arg;
use middle::ty; use middle::ty;
use middle::typeck::check::{fn_ctxt, self_info}; use middle::typeck::check::{FnCtxt, self_info};
use middle::typeck::infer::{force_all, resolve_all, resolve_region}; use middle::typeck::infer::{force_all, resolve_all, resolve_region};
use middle::typeck::infer::{resolve_type}; use middle::typeck::infer::{resolve_type};
use middle::typeck::infer; use middle::typeck::infer;
@ -34,7 +34,7 @@ use syntax::codemap::span;
use syntax::print::pprust::pat_to_str; use syntax::print::pprust::pat_to_str;
use syntax::visit; use syntax::visit;
fn resolve_type_vars_in_type(fcx: @fn_ctxt, sp: span, typ: ty::t) fn resolve_type_vars_in_type(fcx: @mut FnCtxt, sp: span, typ: ty::t)
-> Option<ty::t> { -> Option<ty::t> {
if !ty::type_needs_infer(typ) { return Some(typ); } if !ty::type_needs_infer(typ) { return Some(typ); }
match resolve_type(fcx.infcx(), typ, resolve_all | force_all) { match resolve_type(fcx.infcx(), typ, resolve_all | force_all) {
@ -52,28 +52,28 @@ fn resolve_type_vars_in_type(fcx: @fn_ctxt, sp: span, typ: ty::t)
} }
} }
fn resolve_method_map_entry(fcx: @fn_ctxt, sp: span, id: ast::node_id) fn resolve_method_map_entry(fcx: @mut FnCtxt, sp: span, id: ast::node_id) {
{
// Resolve any method map entry // Resolve any method map entry
match fcx.ccx.method_map.find(&id) { match fcx.ccx.method_map.find(&id) {
None => {} None => {}
Some(ref mme) => { Some(ref mme) => {
for resolve_type_vars_in_type(fcx, sp, mme.self_arg.ty).each |t| { for resolve_type_vars_in_type(fcx, sp, mme.self_arg.ty).each |t| {
fcx.ccx.method_map.insert( let method_map = fcx.ccx.method_map;
id, method_map.insert(id,
method_map_entry { method_map_entry {
self_arg: arg {mode: mme.self_arg.mode, ty: *t}, self_arg: arg {
mode: mme.self_arg.mode,
ty: *t
},
.. *mme .. *mme
} });
);
} }
} }
} }
} }
fn resolve_type_vars_for_node(wbcx: wb_ctxt, sp: span, id: ast::node_id) fn resolve_type_vars_for_node(wbcx: @mut WbCtxt, sp: span, id: ast::node_id)
-> Option<ty::t> -> Option<ty::t> {
{
let fcx = wbcx.fcx, tcx = fcx.ccx.tcx; let fcx = wbcx.fcx, tcx = fcx.ccx.tcx;
// Resolve any borrowings for the node with id `id` // Resolve any borrowings for the node with id `id`
@ -137,10 +137,10 @@ fn resolve_type_vars_for_node(wbcx: wb_ctxt, sp: span, id: ast::node_id)
} }
} }
fn maybe_resolve_type_vars_for_node(wbcx: wb_ctxt, sp: span, fn maybe_resolve_type_vars_for_node(wbcx: @mut WbCtxt,
sp: span,
id: ast::node_id) id: ast::node_id)
-> Option<ty::t> -> Option<ty::t> {
{
if wbcx.fcx.inh.node_types.contains_key_ref(&id) { if wbcx.fcx.inh.node_types.contains_key_ref(&id) {
resolve_type_vars_for_node(wbcx, sp, id) resolve_type_vars_for_node(wbcx, sp, id)
} else { } else {
@ -148,18 +148,22 @@ fn maybe_resolve_type_vars_for_node(wbcx: wb_ctxt, sp: span,
} }
} }
type wb_ctxt = struct WbCtxt {
// As soon as we hit an error we have to stop resolving fcx: @mut FnCtxt,
// the entire function
{fcx: @fn_ctxt, mut success: bool};
type wb_vt = visit::vt<wb_ctxt>;
fn visit_stmt(s: @ast::stmt, wbcx: wb_ctxt, v: wb_vt) { // As soon as we hit an error we have to stop resolving
// the entire function.
success: bool,
}
type wb_vt = visit::vt<@mut WbCtxt>;
fn visit_stmt(s: @ast::stmt, &&wbcx: @mut WbCtxt, v: wb_vt) {
if !wbcx.success { return; } if !wbcx.success { return; }
resolve_type_vars_for_node(wbcx, s.span, ty::stmt_node_id(s)); resolve_type_vars_for_node(wbcx, s.span, ty::stmt_node_id(s));
visit::visit_stmt(s, wbcx, v); visit::visit_stmt(s, wbcx, v);
} }
fn visit_expr(e: @ast::expr, wbcx: wb_ctxt, v: wb_vt) { fn visit_expr(e: @ast::expr, &&wbcx: @mut WbCtxt, v: wb_vt) {
if !wbcx.success { return; } if !wbcx.success { return; }
resolve_type_vars_for_node(wbcx, e.span, e.id); resolve_type_vars_for_node(wbcx, e.span, e.id);
resolve_method_map_entry(wbcx.fcx, e.span, e.id); resolve_method_map_entry(wbcx.fcx, e.span, e.id);
@ -196,12 +200,12 @@ fn visit_expr(e: @ast::expr, wbcx: wb_ctxt, v: wb_vt) {
} }
visit::visit_expr(e, wbcx, v); visit::visit_expr(e, wbcx, v);
} }
fn visit_block(b: ast::blk, wbcx: wb_ctxt, v: wb_vt) { fn visit_block(b: ast::blk, &&wbcx: @mut WbCtxt, v: wb_vt) {
if !wbcx.success { return; } if !wbcx.success { return; }
resolve_type_vars_for_node(wbcx, b.span, b.node.id); resolve_type_vars_for_node(wbcx, b.span, b.node.id);
visit::visit_block(b, wbcx, v); visit::visit_block(b, wbcx, v);
} }
fn visit_pat(p: @ast::pat, wbcx: wb_ctxt, v: wb_vt) { fn visit_pat(p: @ast::pat, &&wbcx: @mut WbCtxt, v: wb_vt) {
if !wbcx.success { return; } if !wbcx.success { return; }
resolve_type_vars_for_node(wbcx, p.span, p.id); resolve_type_vars_for_node(wbcx, p.span, p.id);
debug!("Type for pattern binding %s (id %d) resolved to %s", debug!("Type for pattern binding %s (id %d) resolved to %s",
@ -211,7 +215,7 @@ fn visit_pat(p: @ast::pat, wbcx: wb_ctxt, v: wb_vt) {
p.id))); p.id)));
visit::visit_pat(p, wbcx, v); visit::visit_pat(p, wbcx, v);
} }
fn visit_local(l: @ast::local, wbcx: wb_ctxt, v: wb_vt) { fn visit_local(l: @ast::local, &&wbcx: @mut WbCtxt, v: wb_vt) {
if !wbcx.success { return; } if !wbcx.success { return; }
let var_ty = wbcx.fcx.local_ty(l.span, l.node.id); let var_ty = wbcx.fcx.local_ty(l.span, l.node.id);
match resolve_type(wbcx.fcx.infcx(), var_ty, resolve_all | force_all) { match resolve_type(wbcx.fcx.infcx(), var_ty, resolve_all | force_all) {
@ -233,11 +237,11 @@ fn visit_local(l: @ast::local, wbcx: wb_ctxt, v: wb_vt) {
} }
visit::visit_local(l, wbcx, v); visit::visit_local(l, wbcx, v);
} }
fn visit_item(_item: @ast::item, _wbcx: wb_ctxt, _v: wb_vt) { fn visit_item(_item: @ast::item, &&_wbcx: @mut WbCtxt, _v: wb_vt) {
// Ignore items // Ignore items
} }
fn mk_visitor() -> visit::vt<wb_ctxt> { fn mk_visitor() -> visit::vt<@mut WbCtxt> {
visit::mk_vt(@visit::Visitor {visit_item: visit_item, visit::mk_vt(@visit::Visitor {visit_item: visit_item,
visit_stmt: visit_stmt, visit_stmt: visit_stmt,
visit_expr: visit_expr, visit_expr: visit_expr,
@ -247,18 +251,18 @@ fn mk_visitor() -> visit::vt<wb_ctxt> {
.. *visit::default_visitor()}) .. *visit::default_visitor()})
} }
pub fn resolve_type_vars_in_expr(fcx: @fn_ctxt, e: @ast::expr) -> bool { pub fn resolve_type_vars_in_expr(fcx: @mut FnCtxt, e: @ast::expr) -> bool {
let wbcx = {fcx: fcx, mut success: true}; let wbcx = @mut WbCtxt { fcx: fcx, success: true };
let visit = mk_visitor(); let visit = mk_visitor();
(visit.visit_expr)(e, wbcx, visit); (visit.visit_expr)(e, wbcx, visit);
return wbcx.success; return wbcx.success;
} }
pub fn resolve_type_vars_in_fn(fcx: @fn_ctxt, pub fn resolve_type_vars_in_fn(fcx: @mut FnCtxt,
decl: &ast::fn_decl, decl: &ast::fn_decl,
blk: ast::blk, blk: ast::blk,
self_info: Option<self_info>) -> bool { self_info: Option<self_info>) -> bool {
let wbcx = {fcx: fcx, mut success: true}; let wbcx = @mut WbCtxt { fcx: fcx, success: true };
let visit = mk_visitor(); let visit = mk_visitor();
(visit.visit_block)(blk, wbcx, visit); (visit.visit_block)(blk, wbcx, visit);
for self_info.each |self_info| { for self_info.each |self_info| {

View file

@ -33,7 +33,7 @@ use middle::ty::{ty_type, ty_uint, ty_uniq, ty_bare_fn, ty_closure};
use middle::ty::{ty_opaque_closure_ptr, ty_unboxed_vec, type_kind_ext}; use middle::ty::{ty_opaque_closure_ptr, ty_unboxed_vec, type_kind_ext};
use middle::ty::{type_is_ty_var}; use middle::ty::{type_is_ty_var};
use middle::ty; use middle::ty;
use middle::typeck::crate_ctxt; use middle::typeck::CrateCtxt;
use middle::typeck::infer::combine::Combine; use middle::typeck::infer::combine::Combine;
use middle::typeck::infer::{InferCtxt, can_mk_subty}; use middle::typeck::infer::{InferCtxt, can_mk_subty};
use middle::typeck::infer::{new_infer_ctxt, resolve_ivar}; use middle::typeck::infer::{new_infer_ctxt, resolve_ivar};
@ -71,7 +71,7 @@ pub struct UniversalQuantificationResult {
bounds: @~[param_bounds] bounds: @~[param_bounds]
} }
pub fn get_base_type(inference_context: @InferCtxt, pub fn get_base_type(inference_context: @mut InferCtxt,
span: span, span: span,
original_type: t) original_type: t)
-> Option<t> { -> Option<t> {
@ -119,7 +119,7 @@ pub fn get_base_type(inference_context: @InferCtxt,
} }
// Returns the def ID of the base type, if there is one. // Returns the def ID of the base type, if there is one.
pub fn get_base_type_def_id(inference_context: @InferCtxt, pub fn get_base_type_def_id(inference_context: @mut InferCtxt,
span: span, span: span,
original_type: t) original_type: t)
-> Option<def_id> { -> Option<def_id> {
@ -171,7 +171,7 @@ pub fn CoherenceInfo() -> CoherenceInfo {
} }
} }
pub fn CoherenceChecker(crate_context: @crate_ctxt) -> CoherenceChecker { pub fn CoherenceChecker(crate_context: @mut CrateCtxt) -> CoherenceChecker {
CoherenceChecker { CoherenceChecker {
crate_context: crate_context, crate_context: crate_context,
inference_context: new_infer_ctxt(crate_context.tcx), inference_context: new_infer_ctxt(crate_context.tcx),
@ -182,8 +182,8 @@ pub fn CoherenceChecker(crate_context: @crate_ctxt) -> CoherenceChecker {
} }
pub struct CoherenceChecker { pub struct CoherenceChecker {
crate_context: @crate_ctxt, crate_context: @mut CrateCtxt,
inference_context: @InferCtxt, inference_context: @mut InferCtxt,
// A mapping from implementations to the corresponding base type // A mapping from implementations to the corresponding base type
// definition ID. // definition ID.
@ -415,7 +415,7 @@ pub impl CoherenceChecker {
} }
fn check_implementation_coherence() { fn check_implementation_coherence() {
let coherence_info = &self.crate_context.coherence_info; let coherence_info = &mut self.crate_context.coherence_info;
let extension_methods = &coherence_info.extension_methods; let extension_methods = &coherence_info.extension_methods;
for extension_methods.each_key_ref |&trait_id| { for extension_methods.each_key_ref |&trait_id| {
@ -478,7 +478,7 @@ pub impl CoherenceChecker {
fn iter_impls_of_trait(trait_def_id: def_id, fn iter_impls_of_trait(trait_def_id: def_id,
f: &fn(@Impl)) { f: &fn(@Impl)) {
let coherence_info = &self.crate_context.coherence_info; let coherence_info = &mut self.crate_context.coherence_info;
let extension_methods = &coherence_info.extension_methods; let extension_methods = &coherence_info.extension_methods;
match extension_methods.find(&trait_def_id) { match extension_methods.find(&trait_def_id) {
@ -824,9 +824,8 @@ pub impl CoherenceChecker {
// External crate handling // External crate handling
fn add_impls_for_module(impls_seen: HashMap<def_id,()>, fn add_impls_for_module(impls_seen: HashMap<def_id,()>,
crate_store: CStore, crate_store: @mut CStore,
module_def_id: def_id) { module_def_id: def_id) {
let implementations = get_impls_for_mod(crate_store, let implementations = get_impls_for_mod(crate_store,
module_def_id, module_def_id,
None); None);
@ -986,7 +985,7 @@ pub impl CoherenceChecker {
// //
fn populate_destructor_table() { fn populate_destructor_table() {
let coherence_info = &self.crate_context.coherence_info; let coherence_info = &mut self.crate_context.coherence_info;
let tcx = self.crate_context.tcx; let tcx = self.crate_context.tcx;
let drop_trait = tcx.lang_items.drop_trait(); let drop_trait = tcx.lang_items.drop_trait();
let impls_opt = coherence_info.extension_methods.find(&drop_trait); let impls_opt = coherence_info.extension_methods.find(&drop_trait);
@ -1035,8 +1034,8 @@ pub impl CoherenceChecker {
} }
} }
pub fn check_coherence(crate_context: @crate_ctxt, crate: @crate) { pub fn check_coherence(crate_context: @mut CrateCtxt, crate: @crate) {
let coherence_checker = @CoherenceChecker(crate_context); let coherence_checker = @CoherenceChecker(crate_context);
(*coherence_checker).check_coherence(crate); coherence_checker.check_coherence(crate);
} }

View file

@ -36,13 +36,13 @@ use metadata::csearch;
use middle::ty::{InstantiatedTraitRef, arg}; use middle::ty::{InstantiatedTraitRef, arg};
use middle::ty::{substs, ty_param_substs_and_ty}; use middle::ty::{substs, ty_param_substs_and_ty};
use middle::ty; use middle::ty;
use middle::typeck::astconv::{ast_conv, ty_of_arg}; use middle::typeck::astconv::{AstConv, ty_of_arg};
use middle::typeck::astconv::{ast_ty_to_ty}; use middle::typeck::astconv::{ast_ty_to_ty};
use middle::typeck::astconv; use middle::typeck::astconv;
use middle::typeck::infer; use middle::typeck::infer;
use middle::typeck::rscope::*; use middle::typeck::rscope::*;
use middle::typeck::rscope; use middle::typeck::rscope;
use middle::typeck::{crate_ctxt, lookup_def_tcx, no_params, write_ty_to_tcx}; use middle::typeck::{CrateCtxt, lookup_def_tcx, no_params, write_ty_to_tcx};
use util::common::{indenter, pluralize}; use util::common::{indenter, pluralize};
use util::ppaux; use util::ppaux;
use util::ppaux::bound_to_str; use util::ppaux::bound_to_str;
@ -61,7 +61,7 @@ use syntax::codemap;
use syntax::print::pprust::path_to_str; use syntax::print::pprust::path_to_str;
use syntax::visit; use syntax::visit;
pub fn collect_item_types(ccx: @crate_ctxt, crate: @ast::crate) { pub fn collect_item_types(ccx: @mut CrateCtxt, crate: @ast::crate) {
// FIXME (#2592): hooking into the "intrinsic" root module is crude. // FIXME (#2592): hooking into the "intrinsic" root module is crude.
// There ought to be a better approach. Attributes? // There ought to be a better approach. Attributes?
@ -114,19 +114,18 @@ pub fn collect_item_types(ccx: @crate_ctxt, crate: @ast::crate) {
})); }));
} }
pub impl @crate_ctxt { pub impl @mut CrateCtxt {
fn to_ty<RS: region_scope Copy Durable>( fn to_ty<RS: region_scope Copy Durable>(rs: RS, ast_ty: @ast::Ty)
rs: RS, ast_ty: @ast::Ty) -> ty::t { -> ty::t {
ast_ty_to_ty(self, rs, ast_ty) ast_ty_to_ty(self, rs, ast_ty)
} }
} }
pub impl @crate_ctxt: ast_conv { pub impl CrateCtxt: AstConv {
fn tcx() -> ty::ctxt { self.tcx } fn tcx(@mut self) -> ty::ctxt { self.tcx }
fn ccx() -> @crate_ctxt { self } fn ccx(@mut self) -> @mut CrateCtxt { self }
fn get_item_ty(id: ast::def_id) -> ty::ty_param_bounds_and_ty { fn get_item_ty(@mut self, id: ast::def_id) -> ty::ty_param_bounds_and_ty {
if id.crate != ast::local_crate { if id.crate != ast::local_crate {
csearch::get_type(self.tcx, id) csearch::get_type(self.tcx, id)
} else { } else {
@ -145,18 +144,17 @@ pub impl @crate_ctxt: ast_conv {
} }
} }
fn ty_infer(span: span) -> ty::t { fn ty_infer(@mut self, span: span) -> ty::t {
self.tcx.sess.span_bug(span, self.tcx.sess.span_bug(span,
~"found `ty_infer` in unexpected place"); ~"found `ty_infer` in unexpected place");
} }
} }
pub fn get_enum_variant_types(ccx: @crate_ctxt, pub fn get_enum_variant_types(ccx: @mut CrateCtxt,
enum_ty: ty::t, enum_ty: ty::t,
variants: &[ast::variant], variants: &[ast::variant],
ty_params: &[ast::ty_param], +ty_params: ~[ast::ty_param],
rp: Option<ty::region_variance>) rp: Option<ty::region_variance>) {
{
let tcx = ccx.tcx; let tcx = ccx.tcx;
// Create a set of parameter types shared among all the variants. // Create a set of parameter types shared among all the variants.
@ -193,9 +191,11 @@ pub fn get_enum_variant_types(ccx: @crate_ctxt,
} }
ast::enum_variant_kind(ref enum_definition) => { ast::enum_variant_kind(ref enum_definition) => {
get_enum_variant_types(ccx, enum_ty, get_enum_variant_types(ccx,
enum_ty,
enum_definition.variants, enum_definition.variants,
ty_params, rp); copy ty_params,
rp);
result_ty = None; result_ty = None;
} }
}; };
@ -213,15 +213,17 @@ pub fn get_enum_variant_types(ccx: @crate_ctxt,
} }
} }
pub fn ensure_trait_methods(ccx: @crate_ctxt, pub fn ensure_trait_methods(ccx: @mut CrateCtxt,
id: ast::node_id, id: ast::node_id,
trait_ty: ty::t) { trait_ty: ty::t) {
fn store_methods<T>(ccx: @crate_ctxt, id: ast::node_id, fn store_methods<T>(ccx: @mut CrateCtxt,
stuff: ~[T], f: &fn(v: &T) -> ty::method) { id: ast::node_id,
stuff: ~[T],
f: &fn(v: &T) -> ty::method) {
ty::store_trait_methods(ccx.tcx, id, @vec::map(stuff, f)); ty::store_trait_methods(ccx.tcx, id, @vec::map(stuff, f));
} }
fn make_static_method_ty(ccx: @crate_ctxt, fn make_static_method_ty(ccx: @mut CrateCtxt,
am: ast::ty_method, am: ast::ty_method,
rp: Option<ty::region_variance>, rp: Option<ty::region_variance>,
m: ty::method, m: ty::method,
@ -296,7 +298,7 @@ pub fn ensure_trait_methods(ccx: @crate_ctxt,
} }
} }
pub fn ensure_supertraits(ccx: @crate_ctxt, pub fn ensure_supertraits(ccx: @mut CrateCtxt,
id: ast::node_id, id: ast::node_id,
sp: codemap::span, sp: codemap::span,
rp: Option<ty::region_variance>, rp: Option<ty::region_variance>,
@ -480,7 +482,7 @@ pub fn compare_impl_method(tcx: ty::ctxt,
} }
} }
pub fn check_methods_against_trait(ccx: @crate_ctxt, pub fn check_methods_against_trait(ccx: @mut CrateCtxt,
tps: ~[ast::ty_param], tps: ~[ast::ty_param],
rp: Option<ty::region_variance>, rp: Option<ty::region_variance>,
selfty: ty::t, selfty: ty::t,
@ -531,7 +533,7 @@ pub fn check_methods_against_trait(ccx: @crate_ctxt,
} }
} // fn } // fn
pub fn convert_field(ccx: @crate_ctxt, pub fn convert_field(ccx: @mut CrateCtxt,
rp: Option<ty::region_variance>, rp: Option<ty::region_variance>,
bounds: @~[ty::param_bounds], bounds: @~[ty::param_bounds],
v: @ast::struct_field) { v: @ast::struct_field) {
@ -551,7 +553,7 @@ pub struct ConvertedMethod {
body_id: ast::node_id body_id: ast::node_id
} }
pub fn convert_methods(ccx: @crate_ctxt, pub fn convert_methods(ccx: @mut CrateCtxt,
ms: ~[@ast::method], ms: ~[@ast::method],
rp: Option<ty::region_variance>, rp: Option<ty::region_variance>,
rcvr_bounds: @~[ty::param_bounds]) rcvr_bounds: @~[ty::param_bounds])
@ -576,7 +578,7 @@ pub fn convert_methods(ccx: @crate_ctxt,
} }
} }
pub fn ensure_no_ty_param_bounds(ccx: @crate_ctxt, pub fn ensure_no_ty_param_bounds(ccx: @mut CrateCtxt,
span: span, span: span,
ty_params: &[ast::ty_param], ty_params: &[ast::ty_param],
thing: &static/str) { thing: &static/str) {
@ -590,7 +592,7 @@ pub fn ensure_no_ty_param_bounds(ccx: @crate_ctxt,
} }
} }
pub fn convert(ccx: @crate_ctxt, it: @ast::item) { pub fn convert(ccx: @mut CrateCtxt, it: @ast::item) {
let tcx = ccx.tcx; let tcx = ccx.tcx;
let rp = tcx.region_paramd_items.find(&it.id); let rp = tcx.region_paramd_items.find(&it.id);
debug!("convert: item %s with id %d rp %?", debug!("convert: item %s with id %d rp %?",
@ -602,8 +604,11 @@ pub fn convert(ccx: @crate_ctxt, it: @ast::item) {
ensure_no_ty_param_bounds(ccx, it.span, *ty_params, "enumeration"); ensure_no_ty_param_bounds(ccx, it.span, *ty_params, "enumeration");
let tpt = ty_of_item(ccx, it); let tpt = ty_of_item(ccx, it);
write_ty_to_tcx(tcx, it.id, tpt.ty); write_ty_to_tcx(tcx, it.id, tpt.ty);
get_enum_variant_types(ccx, tpt.ty, enum_definition.variants, get_enum_variant_types(ccx,
*ty_params, rp); tpt.ty,
enum_definition.variants,
copy *ty_params,
rp);
} }
ast::item_impl(ref tps, trait_ref, selfty, ref ms) => { ast::item_impl(ref tps, trait_ref, selfty, ref ms) => {
let i_bounds = ty_param_bounds(ccx, *tps); let i_bounds = ty_param_bounds(ccx, *tps);
@ -659,7 +664,7 @@ pub fn convert(ccx: @crate_ctxt, it: @ast::item) {
} }
} }
pub fn convert_struct(ccx: @crate_ctxt, pub fn convert_struct(ccx: @mut CrateCtxt,
rp: Option<ty::region_variance>, rp: Option<ty::region_variance>,
struct_def: @ast::struct_def, struct_def: @ast::struct_def,
+tps: ~[ast::ty_param], +tps: ~[ast::ty_param],
@ -716,7 +721,7 @@ pub fn convert_struct(ccx: @crate_ctxt,
} }
} }
pub fn convert_foreign(ccx: @crate_ctxt, i: @ast::foreign_item) { pub fn convert_foreign(ccx: @mut CrateCtxt, i: @ast::foreign_item) {
// As above, this call populates the type table with the converted // As above, this call populates the type table with the converted
// type of the foreign item. We simply write it into the node type // type of the foreign item. We simply write it into the node type
// table. // table.
@ -725,7 +730,7 @@ pub fn convert_foreign(ccx: @crate_ctxt, i: @ast::foreign_item) {
ccx.tcx.tcache.insert(local_def(i.id), tpt); ccx.tcx.tcache.insert(local_def(i.id), tpt);
} }
pub fn ty_of_method(ccx: @crate_ctxt, pub fn ty_of_method(ccx: @mut CrateCtxt,
m: @ast::method, m: @ast::method,
rp: Option<ty::region_variance>) -> ty::method { rp: Option<ty::region_variance>) -> ty::method {
{ident: m.ident, {ident: m.ident,
@ -737,7 +742,7 @@ pub fn ty_of_method(ccx: @crate_ctxt,
def_id: local_def(m.id)} def_id: local_def(m.id)}
} }
pub fn ty_of_ty_method(self: @crate_ctxt, pub fn ty_of_ty_method(self: @mut CrateCtxt,
m: ast::ty_method, m: ast::ty_method,
rp: Option<ty::region_variance>, rp: Option<ty::region_variance>,
id: ast::def_id) -> ty::method { id: ast::def_id) -> ty::method {
@ -756,7 +761,7 @@ pub fn ty_of_ty_method(self: @crate_ctxt,
it's bound to a valid trait type. Returns the def_id for the defining it's bound to a valid trait type. Returns the def_id for the defining
trait. Fails if the type is a type other than an trait type. trait. Fails if the type is a type other than an trait type.
*/ */
pub fn instantiate_trait_ref(ccx: @crate_ctxt, t: @ast::trait_ref, pub fn instantiate_trait_ref(ccx: @mut CrateCtxt, t: @ast::trait_ref,
rp: Option<ty::region_variance>) rp: Option<ty::region_variance>)
-> (ast::def_id, ty_param_substs_and_ty) { -> (ast::def_id, ty_param_substs_and_ty) {
@ -780,9 +785,8 @@ pub fn instantiate_trait_ref(ccx: @crate_ctxt, t: @ast::trait_ref,
} }
} }
pub fn ty_of_item(ccx: @crate_ctxt, it: @ast::item) pub fn ty_of_item(ccx: @mut CrateCtxt, it: @ast::item)
-> ty::ty_param_bounds_and_ty { -> ty::ty_param_bounds_and_ty {
let def_id = local_def(it.id); let def_id = local_def(it.id);
let tcx = ccx.tcx; let tcx = ccx.tcx;
match tcx.tcache.find(&def_id) { match tcx.tcache.find(&def_id) {
@ -871,7 +875,7 @@ pub fn ty_of_item(ccx: @crate_ctxt, it: @ast::item)
} }
} }
pub fn ty_of_foreign_item(ccx: @crate_ctxt, it: @ast::foreign_item) pub fn ty_of_foreign_item(ccx: @mut CrateCtxt, it: @ast::foreign_item)
-> ty::ty_param_bounds_and_ty { -> ty::ty_param_bounds_and_ty {
match /*bad*/copy it.node { match /*bad*/copy it.node {
ast::foreign_item_fn(fn_decl, _, params) => { ast::foreign_item_fn(fn_decl, _, params) => {
@ -892,7 +896,7 @@ pub fn ty_of_foreign_item(ccx: @crate_ctxt, it: @ast::foreign_item)
// of a newtyped Ty or a region) to ty's notion of ty param bounds, which can // of a newtyped Ty or a region) to ty's notion of ty param bounds, which can
// either be user-defined traits, or one of the four built-in traits (formerly // either be user-defined traits, or one of the four built-in traits (formerly
// known as kinds): Const, Copy, Durable, and Send. // known as kinds): Const, Copy, Durable, and Send.
pub fn compute_bounds(ccx: @crate_ctxt, pub fn compute_bounds(ccx: @mut CrateCtxt,
ast_bounds: @~[ast::ty_param_bound]) ast_bounds: @~[ast::ty_param_bound])
-> ty::param_bounds { -> ty::param_bounds {
@do vec::flat_map(*ast_bounds) |b| { @do vec::flat_map(*ast_bounds) |b| {
@ -928,8 +932,9 @@ pub fn compute_bounds(ccx: @crate_ctxt,
} }
} }
pub fn ty_param_bounds(ccx: @crate_ctxt, pub fn ty_param_bounds(ccx: @mut CrateCtxt,
params: &[ast::ty_param]) -> @~[ty::param_bounds] { params: ~[ast::ty_param])
-> @~[ty::param_bounds] {
@do params.map |param| { @do params.map |param| {
match ccx.tcx.ty_param_bounds.find(&param.id) { match ccx.tcx.ty_param_bounds.find(&param.id) {
Some(bs) => bs, Some(bs) => bs,
@ -942,7 +947,7 @@ pub fn ty_param_bounds(ccx: @crate_ctxt,
} }
} }
pub fn ty_of_foreign_fn_decl(ccx: @crate_ctxt, pub fn ty_of_foreign_fn_decl(ccx: @mut CrateCtxt,
decl: ast::fn_decl, decl: ast::fn_decl,
+ty_params: ~[ast::ty_param], +ty_params: ~[ast::ty_param],
def_id: ast::def_id) def_id: ast::def_id)
@ -964,7 +969,7 @@ pub fn ty_of_foreign_fn_decl(ccx: @crate_ctxt,
return tpt; return tpt;
} }
pub fn mk_ty_params(ccx: @crate_ctxt, atps: ~[ast::ty_param]) pub fn mk_ty_params(ccx: @mut CrateCtxt, atps: ~[ast::ty_param])
-> {bounds: @~[ty::param_bounds], params: ~[ty::t]} { -> {bounds: @~[ty::param_bounds], params: ~[ty::t]} {
let mut i = 0u; let mut i = 0u;
@ -977,7 +982,7 @@ pub fn mk_ty_params(ccx: @crate_ctxt, atps: ~[ast::ty_param])
})} })}
} }
pub fn mk_substs(ccx: @crate_ctxt, pub fn mk_substs(ccx: @mut CrateCtxt,
+atps: ~[ast::ty_param], +atps: ~[ast::ty_param],
rp: Option<ty::region_variance>) rp: Option<ty::region_variance>)
-> {bounds: @~[ty::param_bounds], substs: ty::substs} { -> {bounds: @~[ty::param_bounds], substs: ty::substs} {

View file

@ -78,7 +78,7 @@ pub fn macros() {
} }
pub trait Combine { pub trait Combine {
fn infcx() -> @InferCtxt; fn infcx() -> @mut InferCtxt;
fn tag() -> ~str; fn tag() -> ~str;
fn a_is_expected() -> bool; fn a_is_expected() -> bool;
fn span() -> span; fn span() -> span;
@ -113,7 +113,7 @@ pub trait Combine {
} }
pub struct CombineFields { pub struct CombineFields {
infcx: @InferCtxt, infcx: @mut InferCtxt,
a_is_expected: bool, a_is_expected: bool,
span: span, span: span,
} }

View file

@ -28,7 +28,7 @@ use std::list;
pub enum Glb = CombineFields; // "greatest lower bound" (common subtype) pub enum Glb = CombineFields; // "greatest lower bound" (common subtype)
pub impl Glb: Combine { pub impl Glb: Combine {
fn infcx() -> @InferCtxt { self.infcx } fn infcx() -> @mut InferCtxt { self.infcx }
fn tag() -> ~str { ~"glb" } fn tag() -> ~str { ~"glb" }
fn a_is_expected() -> bool { self.a_is_expected } fn a_is_expected() -> bool { self.a_is_expected }
fn span() -> span { self.span } fn span() -> span { self.span }

View file

@ -37,7 +37,7 @@ pub impl Lub {
} }
pub impl Lub: Combine { pub impl Lub: Combine {
fn infcx() -> @InferCtxt { self.infcx } fn infcx() -> @mut InferCtxt { self.infcx }
fn tag() -> ~str { ~"lub" } fn tag() -> ~str { ~"lub" }
fn a_is_expected() -> bool { self.a_is_expected } fn a_is_expected() -> bool { self.a_is_expected }
fn span() -> span { self.span } fn span() -> span { self.span }

View file

@ -313,15 +313,15 @@ pub struct InferCtxt {
// types that might instantiate a general type variable have an // types that might instantiate a general type variable have an
// order, represented by its upper and lower bounds. // order, represented by its upper and lower bounds.
ty_var_bindings: ValsAndBindings<ty::TyVid, Bounds<ty::t>>, ty_var_bindings: ValsAndBindings<ty::TyVid, Bounds<ty::t>>,
mut ty_var_counter: uint, ty_var_counter: uint,
// Map from integral variable to the kind of integer it represents // Map from integral variable to the kind of integer it represents
int_var_bindings: ValsAndBindings<ty::IntVid, Option<IntVarValue>>, int_var_bindings: ValsAndBindings<ty::IntVid, Option<IntVarValue>>,
mut int_var_counter: uint, int_var_counter: uint,
// Map from floating variable to the kind of float it represents // Map from floating variable to the kind of float it represents
float_var_bindings: ValsAndBindings<ty::FloatVid, Option<ast::float_ty>>, float_var_bindings: ValsAndBindings<ty::FloatVid, Option<ast::float_ty>>,
mut float_var_counter: uint, float_var_counter: uint,
// For region variables. // For region variables.
region_vars: RegionVarBindings, region_vars: RegionVarBindings,
@ -351,12 +351,12 @@ pub fn fixup_err_to_str(f: fixup_err) -> ~str {
fn new_ValsAndBindings<V:Copy, T:Copy>() -> ValsAndBindings<V, T> { fn new_ValsAndBindings<V:Copy, T:Copy>() -> ValsAndBindings<V, T> {
ValsAndBindings { ValsAndBindings {
vals: oldsmallintmap::mk(), vals: oldsmallintmap::mk(),
mut bindings: ~[] bindings: ~[]
} }
} }
pub fn new_infer_ctxt(tcx: ty::ctxt) -> @InferCtxt { pub fn new_infer_ctxt(tcx: ty::ctxt) -> @mut InferCtxt {
@InferCtxt { @mut InferCtxt {
tcx: tcx, tcx: tcx,
ty_var_bindings: new_ValsAndBindings(), ty_var_bindings: new_ValsAndBindings(),
@ -372,8 +372,12 @@ pub fn new_infer_ctxt(tcx: ty::ctxt) -> @InferCtxt {
} }
} }
pub fn mk_subty(cx: @InferCtxt, a_is_expected: bool, span: span, pub fn mk_subty(cx: @mut InferCtxt,
a: ty::t, b: ty::t) -> ures { a_is_expected: bool,
span: span,
a: ty::t,
b: ty::t)
-> ures {
debug!("mk_subty(%s <: %s)", a.inf_str(cx), b.inf_str(cx)); debug!("mk_subty(%s <: %s)", a.inf_str(cx), b.inf_str(cx));
do indent { do indent {
do cx.commit { do cx.commit {
@ -382,7 +386,7 @@ pub fn mk_subty(cx: @InferCtxt, a_is_expected: bool, span: span,
}.to_ures() }.to_ures()
} }
pub fn can_mk_subty(cx: @InferCtxt, a: ty::t, b: ty::t) -> ures { pub fn can_mk_subty(cx: @mut InferCtxt, a: ty::t, b: ty::t) -> ures {
debug!("can_mk_subty(%s <: %s)", a.inf_str(cx), b.inf_str(cx)); debug!("can_mk_subty(%s <: %s)", a.inf_str(cx), b.inf_str(cx));
do indent { do indent {
do cx.probe { do cx.probe {
@ -391,8 +395,12 @@ pub fn can_mk_subty(cx: @InferCtxt, a: ty::t, b: ty::t) -> ures {
}.to_ures() }.to_ures()
} }
pub fn mk_subr(cx: @InferCtxt, a_is_expected: bool, span: span, pub fn mk_subr(cx: @mut InferCtxt,
a: ty::Region, b: ty::Region) -> ures { a_is_expected: bool,
span: span,
a: ty::Region,
b: ty::Region)
-> ures {
debug!("mk_subr(%s <: %s)", a.inf_str(cx), b.inf_str(cx)); debug!("mk_subr(%s <: %s)", a.inf_str(cx), b.inf_str(cx));
do indent { do indent {
do cx.commit { do cx.commit {
@ -401,8 +409,12 @@ pub fn mk_subr(cx: @InferCtxt, a_is_expected: bool, span: span,
}.to_ures() }.to_ures()
} }
pub fn mk_eqty(cx: @InferCtxt, a_is_expected: bool, span: span, pub fn mk_eqty(cx: @mut InferCtxt,
a: ty::t, b: ty::t) -> ures { a_is_expected: bool,
span: span,
a: ty::t,
b: ty::t)
-> ures {
debug!("mk_eqty(%s <: %s)", a.inf_str(cx), b.inf_str(cx)); debug!("mk_eqty(%s <: %s)", a.inf_str(cx), b.inf_str(cx));
do indent { do indent {
do cx.commit { do cx.commit {
@ -412,8 +424,12 @@ pub fn mk_eqty(cx: @InferCtxt, a_is_expected: bool, span: span,
}.to_ures() }.to_ures()
} }
pub fn mk_coercety(cx: @InferCtxt, a_is_expected: bool, span: span, pub fn mk_coercety(cx: @mut InferCtxt,
a: ty::t, b: ty::t) -> CoerceResult { a_is_expected: bool,
span: span,
a: ty::t,
b: ty::t)
-> CoerceResult {
debug!("mk_coercety(%s -> %s)", a.inf_str(cx), b.inf_str(cx)); debug!("mk_coercety(%s -> %s)", a.inf_str(cx), b.inf_str(cx));
do indent { do indent {
do cx.commit { do cx.commit {
@ -422,7 +438,7 @@ pub fn mk_coercety(cx: @InferCtxt, a_is_expected: bool, span: span,
} }
} }
pub fn can_mk_coercety(cx: @InferCtxt, a: ty::t, b: ty::t) -> ures { pub fn can_mk_coercety(cx: @mut InferCtxt, a: ty::t, b: ty::t) -> ures {
debug!("can_mk_coercety(%s -> %s)", a.inf_str(cx), b.inf_str(cx)); debug!("can_mk_coercety(%s -> %s)", a.inf_str(cx), b.inf_str(cx));
do indent { do indent {
do cx.probe { do cx.probe {
@ -433,17 +449,22 @@ pub fn can_mk_coercety(cx: @InferCtxt, a: ty::t, b: ty::t) -> ures {
} }
// See comment on the type `resolve_state` below // See comment on the type `resolve_state` below
pub fn resolve_type(cx: @InferCtxt, a: ty::t, modes: uint) -> fres<ty::t> { pub fn resolve_type(cx: @mut InferCtxt,
resolver(cx, modes).resolve_type_chk(a) a: ty::t,
modes: uint)
-> fres<ty::t> {
let mut resolver = resolver(cx, modes);
resolver.resolve_type_chk(a)
} }
pub fn resolve_region(cx: @InferCtxt, r: ty::Region, modes: uint) pub fn resolve_region(cx: @mut InferCtxt, r: ty::Region, modes: uint)
-> fres<ty::Region> { -> fres<ty::Region> {
resolver(cx, modes).resolve_region_chk(r) let mut resolver = resolver(cx, modes);
resolver.resolve_region_chk(r)
} }
/* /*
fn resolve_borrowings(cx: @InferCtxt) { fn resolve_borrowings(cx: @mut InferCtxt) {
for cx.borrowings.each |item| { for cx.borrowings.each |item| {
match resolve_region(cx, item.scope, resolve_all|force_all) { match resolve_region(cx, item.scope, resolve_all|force_all) {
Ok(region) => { Ok(region) => {
@ -510,7 +531,7 @@ pub fn uok() -> ures {
} }
fn rollback_to<V:Copy Vid, T:Copy>( fn rollback_to<V:Copy Vid, T:Copy>(
vb: &ValsAndBindings<V, T>, vb: &mut ValsAndBindings<V, T>,
len: uint) len: uint)
{ {
while vb.bindings.len() != len { while vb.bindings.len() != len {
@ -526,7 +547,7 @@ struct Snapshot {
region_vars_snapshot: uint, region_vars_snapshot: uint,
} }
impl @InferCtxt { impl @mut InferCtxt {
fn combine_fields(a_is_expected: bool, fn combine_fields(a_is_expected: bool,
span: span) -> CombineFields { span: span) -> CombineFields {
CombineFields {infcx: self, CombineFields {infcx: self,
@ -557,11 +578,11 @@ impl @InferCtxt {
fn rollback_to(snapshot: &Snapshot) { fn rollback_to(snapshot: &Snapshot) {
debug!("rollback!"); debug!("rollback!");
rollback_to(&self.ty_var_bindings, snapshot.ty_var_bindings_len); rollback_to(&mut self.ty_var_bindings, snapshot.ty_var_bindings_len);
rollback_to(&self.int_var_bindings, rollback_to(&mut self.int_var_bindings,
snapshot.int_var_bindings_len); snapshot.int_var_bindings_len);
rollback_to(&self.float_var_bindings, rollback_to(&mut self.float_var_bindings,
snapshot.float_var_bindings_len); snapshot.float_var_bindings_len);
self.region_vars.rollback_to(snapshot.region_vars_snapshot); self.region_vars.rollback_to(snapshot.region_vars_snapshot);
@ -610,20 +631,20 @@ impl @InferCtxt {
fn next_simple_var<V: Copy,T: Copy>( fn next_simple_var<V: Copy,T: Copy>(
+counter: &mut uint, +counter: &mut uint,
+bindings: &ValsAndBindings<V,Option<T>>) -> uint +bindings: &mut ValsAndBindings<V,Option<T>>)
{ -> uint {
let id = *counter; let id = *counter;
*counter += 1; *counter += 1;
bindings.vals.insert(id, Root(None, 0)); bindings.vals.insert(id, Root(None, 0));
return id; return id;
} }
impl @InferCtxt { impl @mut InferCtxt {
fn next_ty_var_id() -> TyVid { fn next_ty_var_id() -> TyVid {
let id = self.ty_var_counter; let id = self.ty_var_counter;
self.ty_var_counter += 1; self.ty_var_counter += 1;
self.ty_var_bindings.vals.insert(id, let vals = self.ty_var_bindings.vals;
Root({lb: None, ub: None}, 0u)); vals.insert(id, Root({lb: None, ub: None}, 0u));
return TyVid(id); return TyVid(id);
} }
@ -637,7 +658,7 @@ impl @InferCtxt {
fn next_int_var_id() -> IntVid { fn next_int_var_id() -> IntVid {
IntVid(next_simple_var(&mut self.int_var_counter, IntVid(next_simple_var(&mut self.int_var_counter,
&self.int_var_bindings)) &mut self.int_var_bindings))
} }
fn next_int_var() -> ty::t { fn next_int_var() -> ty::t {
@ -646,7 +667,7 @@ impl @InferCtxt {
fn next_float_var_id() -> FloatVid { fn next_float_var_id() -> FloatVid {
FloatVid(next_simple_var(&mut self.float_var_counter, FloatVid(next_simple_var(&mut self.float_var_counter,
&self.float_var_bindings)) &mut self.float_var_bindings))
} }
fn next_float_var() -> ty::t { fn next_float_var() -> ty::t {
@ -726,9 +747,9 @@ impl @InferCtxt {
} }
fn replace_bound_regions_with_fresh_regions( fn replace_bound_regions_with_fresh_regions(
&self, span: span, span: span,
fsig: &ty::FnSig) -> (ty::FnSig, isr_alist) fsig: &ty::FnSig)
{ -> (ty::FnSig, isr_alist) {
let {fn_sig: fn_sig, isr: isr, _} = let {fn_sig: fn_sig, isr: isr, _} =
replace_bound_regions_in_fn_sig(self.tcx, @Nil, None, fsig, |br| { replace_bound_regions_in_fn_sig(self.tcx, @Nil, None, fsig, |br| {
// N.B.: The name of the bound region doesn't have anything to // N.B.: The name of the bound region doesn't have anything to

View file

@ -636,8 +636,8 @@ pub struct RegionVarBindings {
constraints: HashMap<Constraint, span>, constraints: HashMap<Constraint, span>,
lubs: CombineMap, lubs: CombineMap,
glbs: CombineMap, glbs: CombineMap,
mut skolemization_count: uint, skolemization_count: uint,
mut bound_count: uint, bound_count: uint,
// The undo log records actions that might later be undone. // The undo log records actions that might later be undone.
// //
@ -647,7 +647,7 @@ pub struct RegionVarBindings {
// actively snapshotting. The reason for this is that otherwise // actively snapshotting. The reason for this is that otherwise
// we end up adding entries for things like the lower bound on // we end up adding entries for things like the lower bound on
// a variable and so forth, which can never be rolled back. // a variable and so forth, which can never be rolled back.
mut undo_log: ~[UndoLogEntry], undo_log: ~[UndoLogEntry],
// This contains the results of inference. It begins as an empty // This contains the results of inference. It begins as an empty
// cell and only acquires a value after inference is complete. // cell and only acquires a value after inference is complete.
@ -677,11 +677,11 @@ fn CombineMap() -> CombineMap {
} }
pub impl RegionVarBindings { pub impl RegionVarBindings {
fn in_snapshot(&self) -> bool { fn in_snapshot(&mut self) -> bool {
self.undo_log.len() > 0 self.undo_log.len() > 0
} }
fn start_snapshot(&self) -> uint { fn start_snapshot(&mut self) -> uint {
debug!("RegionVarBindings: snapshot()=%u", self.undo_log.len()); debug!("RegionVarBindings: snapshot()=%u", self.undo_log.len());
if self.in_snapshot() { if self.in_snapshot() {
self.undo_log.len() self.undo_log.len()
@ -691,14 +691,14 @@ pub impl RegionVarBindings {
} }
} }
fn commit(&self) { fn commit(&mut self) {
debug!("RegionVarBindings: commit()"); debug!("RegionVarBindings: commit()");
while self.undo_log.len() > 0 { while self.undo_log.len() > 0 {
self.undo_log.pop(); self.undo_log.pop();
} }
} }
fn rollback_to(&self, snapshot: uint) { fn rollback_to(&mut self, snapshot: uint) {
debug!("RegionVarBindings: rollback_to(%u)", snapshot); debug!("RegionVarBindings: rollback_to(%u)", snapshot);
while self.undo_log.len() > snapshot { while self.undo_log.len() > snapshot {
let undo_item = self.undo_log.pop(); let undo_item = self.undo_log.pop();
@ -719,11 +719,11 @@ pub impl RegionVarBindings {
} }
} }
fn num_vars(&self) -> uint { fn num_vars(&mut self) -> uint {
self.var_spans.len() self.var_spans.len()
} }
fn new_region_var(&self, span: span) -> RegionVid { fn new_region_var(&mut self, span: span) -> RegionVid {
let id = self.num_vars(); let id = self.num_vars();
self.var_spans.push(span); self.var_spans.push(span);
let vid = RegionVid(id); let vid = RegionVid(id);
@ -735,13 +735,13 @@ pub impl RegionVarBindings {
return vid; return vid;
} }
fn new_skolemized(&self, br: ty::bound_region) -> Region { fn new_skolemized(&mut self, br: ty::bound_region) -> Region {
let sc = self.skolemization_count; let sc = self.skolemization_count;
self.skolemization_count += 1; self.skolemization_count += 1;
re_infer(ReSkolemized(sc, br)) re_infer(ReSkolemized(sc, br))
} }
fn new_bound(&self) -> Region { fn new_bound(&mut self) -> Region {
// Creates a fresh bound variable for use in GLB computations. // Creates a fresh bound variable for use in GLB computations.
// See discussion of GLB computation in the large comment at // See discussion of GLB computation in the large comment at
// the top of this file for more details. // the top of this file for more details.
@ -761,7 +761,7 @@ pub impl RegionVarBindings {
re_bound(br_fresh(sc)) re_bound(br_fresh(sc))
} }
fn add_constraint(&self, +constraint: Constraint, span: span) { fn add_constraint(&mut self, +constraint: Constraint, span: span) {
// cannot add constraints once regions are resolved // cannot add constraints once regions are resolved
assert self.values.is_empty(); assert self.values.is_empty();
@ -774,8 +774,10 @@ pub impl RegionVarBindings {
} }
} }
fn make_subregion(&self, span: span, fn make_subregion(&mut self,
sub: Region, sup: Region) -> cres<()> { span: span,
sub: Region,
sup: Region) -> cres<()> {
// cannot add constraints once regions are resolved // cannot add constraints once regions are resolved
assert self.values.is_empty(); assert self.values.is_empty();
@ -813,7 +815,11 @@ pub impl RegionVarBindings {
} }
} }
fn lub_regions(&self, span: span, a: Region, b: Region) -> cres<Region> { fn lub_regions(&mut self,
span: span,
a: Region,
b: Region)
-> cres<Region> {
// cannot add constraints once regions are resolved // cannot add constraints once regions are resolved
assert self.values.is_empty(); assert self.values.is_empty();
@ -835,7 +841,11 @@ pub impl RegionVarBindings {
} }
} }
fn glb_regions(&self, span: span, a: Region, b: Region) -> cres<Region> { fn glb_regions(&mut self,
span: span,
a: Region,
b: Region)
-> cres<Region> {
// cannot add constraints once regions are resolved // cannot add constraints once regions are resolved
assert self.values.is_empty(); assert self.values.is_empty();
@ -858,7 +868,7 @@ pub impl RegionVarBindings {
} }
} }
fn resolve_var(&self, rid: RegionVid) -> ty::Region { fn resolve_var(&mut self, rid: RegionVid) -> ty::Region {
debug!("RegionVarBindings: resolve_var(%?=%u)", rid, *rid); debug!("RegionVarBindings: resolve_var(%?=%u)", rid, *rid);
if self.values.is_empty() { if self.values.is_empty() {
self.tcx.sess.span_bug( self.tcx.sess.span_bug(
@ -900,14 +910,13 @@ pub impl RegionVarBindings {
} }
} }
fn combine_vars(&self, fn combine_vars(&mut self,
combines: CombineMap, combines: CombineMap,
a: Region, a: Region,
b: Region, b: Region,
span: span, span: span,
relate: fn(old_r: Region, new_r: Region) -> cres<()>) relate: &fn(old_r: Region, new_r: Region) -> cres<()>)
-> cres<Region> { -> cres<Region> {
let vars = TwoRegions { a: a, b: b }; let vars = TwoRegions { a: a, b: b };
match combines.find(&vars) { match combines.find(&vars) {
Some(c) => Ok(re_infer(ReVar(c))), Some(c) => Ok(re_infer(ReVar(c))),
@ -927,7 +936,9 @@ pub impl RegionVarBindings {
} }
} }
fn vars_created_since_snapshot(&self, snapshot: uint) -> ~[RegionVid] { fn vars_created_since_snapshot(&mut self,
snapshot: uint)
-> ~[RegionVid] {
do vec::build |push| { do vec::build |push| {
for uint::range(snapshot, self.undo_log.len()) |i| { for uint::range(snapshot, self.undo_log.len()) |i| {
match self.undo_log[i] { match self.undo_log[i] {
@ -938,7 +949,7 @@ pub impl RegionVarBindings {
} }
} }
fn tainted(&self, snapshot: uint, r0: Region) -> ~[Region] { fn tainted(&mut self, snapshot: uint, r0: Region) -> ~[Region] {
/*! /*!
* *
* Computes all regions that have been related to `r0` in any * Computes all regions that have been related to `r0` in any
@ -1022,18 +1033,18 @@ pub impl RegionVarBindings {
constraints, assuming such values can be found; if they cannot, constraints, assuming such values can be found; if they cannot,
errors are reported. errors are reported.
*/ */
fn resolve_regions(&self) { fn resolve_regions(&mut self) {
debug!("RegionVarBindings: resolve_regions()"); debug!("RegionVarBindings: resolve_regions()");
self.values.put_back(self.infer_variable_values()); self.values.put_back(self.infer_variable_values());
} }
} }
priv impl RegionVarBindings { priv impl RegionVarBindings {
fn is_subregion_of(&self, sub: Region, sup: Region) -> bool { fn is_subregion_of(&mut self, sub: Region, sup: Region) -> bool {
is_subregion_of(self.tcx.region_map, sub, sup) is_subregion_of(self.tcx.region_map, sub, sup)
} }
fn lub_concrete_regions(&self, +a: Region, +b: Region) -> Region { fn lub_concrete_regions(&mut self, +a: Region, +b: Region) -> Region {
match (a, b) { match (a, b) {
(re_static, _) | (_, re_static) => { (re_static, _) | (_, re_static) => {
re_static // nothing lives longer than static re_static // nothing lives longer than static
@ -1090,7 +1101,10 @@ priv impl RegionVarBindings {
} }
} }
fn glb_concrete_regions(&self, +a: Region, +b: Region) -> cres<Region> { fn glb_concrete_regions(&mut self,
+a: Region,
+b: Region)
-> cres<Region> {
match (a, b) { match (a, b) {
(re_static, r) | (r, re_static) => { (re_static, r) | (r, re_static) => {
// static lives longer than everything else // static lives longer than everything else
@ -1156,7 +1170,7 @@ priv impl RegionVarBindings {
} }
} }
fn report_type_error(&self, span: span, terr: &ty::type_err) { fn report_type_error(&mut self, span: span, terr: &ty::type_err) {
let terr_str = ty::type_err_to_str(self.tcx, terr); let terr_str = ty::type_err_to_str(self.tcx, terr);
self.tcx.sess.span_err(span, terr_str); self.tcx.sess.span_err(span, terr_str);
} }
@ -1164,35 +1178,23 @@ priv impl RegionVarBindings {
// ______________________________________________________________________ // ______________________________________________________________________
#[deriving_eq]
enum Direction { Incoming = 0, Outgoing = 1 } enum Direction { Incoming = 0, Outgoing = 1 }
impl Direction : cmp::Eq { #[deriving_eq]
pure fn eq(&self, other: &Direction) -> bool {
((*self) as uint) == ((*other) as uint)
}
pure fn ne(&self, other: &Direction) -> bool { !(*self).eq(other) }
}
enum Classification { Expanding, Contracting } enum Classification { Expanding, Contracting }
impl Classification : cmp::Eq {
pure fn eq(&self, other: &Classification) -> bool {
((*self) as uint) == ((*other) as uint)
}
pure fn ne(&self, other: &Classification) -> bool { !(*self).eq(other) }
}
enum GraphNodeValue { NoValue, Value(Region), ErrorValue } enum GraphNodeValue { NoValue, Value(Region), ErrorValue }
struct GraphNode { struct GraphNode {
span: span, span: span,
mut classification: Classification, classification: Classification,
mut value: GraphNodeValue, value: GraphNodeValue,
head_edge: [mut uint * 2], // FIXME(#3226)--should not need mut head_edge: [uint * 2],
} }
struct GraphEdge { struct GraphEdge {
next_edge: [mut uint * 2], // FIXME(#3226)--should not need mut next_edge: [uint * 2],
constraint: Constraint, constraint: Constraint,
span: span, span: span,
} }
@ -1214,14 +1216,14 @@ fn TwoRegionsMap() -> TwoRegionsMap {
} }
impl RegionVarBindings { impl RegionVarBindings {
fn infer_variable_values(&self) -> ~[GraphNodeValue] { fn infer_variable_values(&mut self) -> ~[GraphNodeValue] {
let graph = self.construct_graph(); let mut graph = self.construct_graph();
self.expansion(&graph); self.expansion(&mut graph);
self.contraction(&graph); self.contraction(&mut graph);
self.extract_values_and_report_conflicts(&graph) self.extract_values_and_report_conflicts(&mut graph)
} }
fn construct_graph(&self) -> Graph { fn construct_graph(&mut self) -> Graph {
let num_vars = self.num_vars(); let num_vars = self.num_vars();
let num_edges = self.constraints.len(); let num_edges = self.constraints.len();
@ -1234,7 +1236,7 @@ impl RegionVarBindings {
classification: Contracting, classification: Contracting,
span: self.var_spans[var_idx], span: self.var_spans[var_idx],
value: NoValue, value: NoValue,
head_edge: [mut uint::max_value, uint::max_value] head_edge: [uint::max_value, uint::max_value]
} }
}); });
@ -1242,7 +1244,7 @@ impl RegionVarBindings {
let mut edges = vec::with_capacity(num_edges); let mut edges = vec::with_capacity(num_edges);
for self.constraints.each_ref |constraint, span| { for self.constraints.each_ref |constraint, span| {
edges.push(GraphEdge { edges.push(GraphEdge {
next_edge: [mut uint::max_value, uint::max_value], next_edge: [uint::max_value, uint::max_value],
constraint: *constraint, constraint: *constraint,
span: *span span: *span
}); });
@ -1282,18 +1284,18 @@ impl RegionVarBindings {
} }
} }
fn expansion(&self, graph: &Graph) { fn expansion(&mut self, graph: &mut Graph) {
do self.iterate_until_fixed_point(~"Expansion", graph) |edge| { do self.iterate_until_fixed_point(~"Expansion", graph) |edge| {
match edge.constraint { match edge.constraint {
ConstrainRegSubVar(copy a_region, copy b_vid) => { ConstrainRegSubVar(copy a_region, copy b_vid) => {
let b_node = &graph.nodes[*b_vid]; let b_node = &mut graph.nodes[*b_vid];
self.expand_node(a_region, b_vid, b_node) self.expand_node(a_region, b_vid, b_node)
} }
ConstrainVarSubVar(copy a_vid, copy b_vid) => { ConstrainVarSubVar(copy a_vid, copy b_vid) => {
match graph.nodes[*a_vid].value { match graph.nodes[*a_vid].value {
NoValue | ErrorValue => false, NoValue | ErrorValue => false,
Value(copy a_region) => { Value(copy a_region) => {
let b_node = &graph.nodes[*b_vid]; let b_node = &mut graph.nodes[*b_vid];
self.expand_node(a_region, b_vid, b_node) self.expand_node(a_region, b_vid, b_node)
} }
} }
@ -1306,10 +1308,11 @@ impl RegionVarBindings {
} }
} }
fn expand_node(&self, fn expand_node(&mut self,
a_region: Region, a_region: Region,
b_vid: RegionVid, b_vid: RegionVid,
b_node: &GraphNode) -> bool { b_node: &mut GraphNode)
-> bool {
debug!("expand_node(%?, %? == %?)", debug!("expand_node(%?, %? == %?)",
a_region, b_vid, b_node.value); a_region, b_vid, b_node.value);
@ -1341,7 +1344,7 @@ impl RegionVarBindings {
} }
} }
fn contraction(&self, graph: &Graph) { fn contraction(&mut self, graph: &mut Graph) {
do self.iterate_until_fixed_point(~"Contraction", graph) |edge| { do self.iterate_until_fixed_point(~"Contraction", graph) |edge| {
match edge.constraint { match edge.constraint {
ConstrainRegSubVar(*) => { ConstrainRegSubVar(*) => {
@ -1352,23 +1355,24 @@ impl RegionVarBindings {
match graph.nodes[*b_vid].value { match graph.nodes[*b_vid].value {
NoValue | ErrorValue => false, NoValue | ErrorValue => false,
Value(copy b_region) => { Value(copy b_region) => {
let a_node = &graph.nodes[*a_vid]; let a_node = &mut graph.nodes[*a_vid];
self.contract_node(a_vid, a_node, b_region) self.contract_node(a_vid, a_node, b_region)
} }
} }
} }
ConstrainVarSubReg(copy a_vid, copy b_region) => { ConstrainVarSubReg(copy a_vid, copy b_region) => {
let a_node = &graph.nodes[*a_vid]; let a_node = &mut graph.nodes[*a_vid];
self.contract_node(a_vid, a_node, b_region) self.contract_node(a_vid, a_node, b_region)
} }
} }
} }
} }
fn contract_node(&self, fn contract_node(&mut self,
a_vid: RegionVid, a_vid: RegionVid,
a_node: &GraphNode, a_node: &mut GraphNode,
b_region: Region) -> bool { b_region: Region)
-> bool {
debug!("contract_node(%? == %?/%?, %?)", debug!("contract_node(%? == %?/%?, %?)",
a_vid, a_node.value, a_node.classification, b_region); a_vid, a_node.value, a_node.classification, b_region);
@ -1395,11 +1399,12 @@ impl RegionVarBindings {
} }
}; };
fn check_node(self: &RegionVarBindings, fn check_node(+self: &mut RegionVarBindings,
a_vid: RegionVid, a_vid: RegionVid,
a_node: &GraphNode, +a_node: &mut GraphNode,
a_region: Region, a_region: Region,
b_region: Region) -> bool { b_region: Region)
-> bool {
if !self.is_subregion_of(a_region, b_region) { if !self.is_subregion_of(a_region, b_region) {
debug!("Setting %? to ErrorValue: %? not subregion of %?", debug!("Setting %? to ErrorValue: %? not subregion of %?",
a_vid, a_region, b_region); a_vid, a_region, b_region);
@ -1408,11 +1413,12 @@ impl RegionVarBindings {
false false
} }
fn adjust_node(self: &RegionVarBindings, fn adjust_node(+self: &mut RegionVarBindings,
a_vid: RegionVid, a_vid: RegionVid,
a_node: &GraphNode, +a_node: &mut GraphNode,
a_region: Region, a_region: Region,
b_region: Region) -> bool { b_region: Region)
-> bool {
match self.glb_concrete_regions(a_region, b_region) { match self.glb_concrete_regions(a_region, b_region) {
Ok(glb) => { Ok(glb) => {
if glb == a_region { if glb == a_region {
@ -1432,12 +1438,10 @@ impl RegionVarBindings {
} }
} }
fn iterate_until_fixed_point( fn iterate_until_fixed_point(&mut self,
&self,
tag: ~str, tag: ~str,
graph: &Graph, graph: &mut Graph,
body: fn(edge: &GraphEdge) -> bool) body: &fn(edge: &GraphEdge) -> bool) {
{
let mut iteration = 0; let mut iteration = 0;
let mut changed = true; let mut changed = true;
let num_edges = graph.edges.len(); let num_edges = graph.edges.len();
@ -1454,10 +1458,9 @@ impl RegionVarBindings {
debug!("---- %s Complete after %u iteration(s)", tag, iteration); debug!("---- %s Complete after %u iteration(s)", tag, iteration);
} }
fn extract_values_and_report_conflicts( fn extract_values_and_report_conflicts(&mut self,
&self, graph: &mut Graph)
graph: &Graph) -> ~[GraphNodeValue] -> ~[GraphNodeValue] {
{
let dup_map = TwoRegionsMap(); let dup_map = TwoRegionsMap();
graph.nodes.mapi(|idx, node| { graph.nodes.mapi(|idx, node| {
match node.value { match node.value {
@ -1512,16 +1515,17 @@ impl RegionVarBindings {
} }
// Used to suppress reporting the same basic error over and over // Used to suppress reporting the same basic error over and over
fn is_reported(&self, fn is_reported(&mut self,
dup_map: TwoRegionsMap, dup_map: TwoRegionsMap,
r_a: Region, r_a: Region,
r_b: Region) -> bool { r_b: Region)
-> bool {
let key = TwoRegions { a: r_a, b: r_b }; let key = TwoRegions { a: r_a, b: r_b };
!dup_map.insert(key, ()) !dup_map.insert(key, ())
} }
fn report_error_for_expanding_node(&self, fn report_error_for_expanding_node(&mut self,
graph: &Graph, graph: &mut Graph,
dup_map: TwoRegionsMap, dup_map: TwoRegionsMap,
node_idx: RegionVid) { node_idx: RegionVid) {
// Errors in expanding nodes result from a lower-bound that is // Errors in expanding nodes result from a lower-bound that is
@ -1573,8 +1577,8 @@ impl RegionVarBindings {
} }
} }
fn report_error_for_contracting_node(&self, fn report_error_for_contracting_node(&mut self,
graph: &Graph, graph: &mut Graph,
dup_map: TwoRegionsMap, dup_map: TwoRegionsMap,
node_idx: RegionVid) { node_idx: RegionVid) {
// Errors in contracting nodes result from two upper-bounds // Errors in contracting nodes result from two upper-bounds
@ -1627,10 +1631,11 @@ impl RegionVarBindings {
} }
} }
fn collect_concrete_regions(&self, fn collect_concrete_regions(&mut self,
graph: &Graph, graph: &mut Graph,
orig_node_idx: RegionVid, orig_node_idx: RegionVid,
dir: Direction) -> ~[SpannedRegion] { dir: Direction)
-> ~[SpannedRegion] {
let set = HashMap(); let set = HashMap();
let mut stack = ~[orig_node_idx]; let mut stack = ~[orig_node_idx];
set.insert(*orig_node_idx, ()); set.insert(*orig_node_idx, ());
@ -1670,8 +1675,8 @@ impl RegionVarBindings {
return result; return result;
} }
fn each_edge(&self, fn each_edge(&mut self,
graph: &Graph, graph: &mut Graph,
node_idx: RegionVid, node_idx: RegionVid,
dir: Direction, dir: Direction,
op: fn(edge: &GraphEdge) -> bool) { op: fn(edge: &GraphEdge) -> bool) {

View file

@ -83,14 +83,14 @@ pub const resolve_and_force_all_but_regions: uint =
(resolve_all | force_all) & not_regions; (resolve_all | force_all) & not_regions;
pub struct ResolveState { pub struct ResolveState {
infcx: @InferCtxt, infcx: @mut InferCtxt,
modes: uint, modes: uint,
mut err: Option<fixup_err>, err: Option<fixup_err>,
mut v_seen: ~[TyVid], v_seen: ~[TyVid],
mut type_depth: uint type_depth: uint
} }
pub fn resolver(infcx: @InferCtxt, modes: uint) -> ResolveState { pub fn resolver(infcx: @mut InferCtxt, modes: uint) -> ResolveState {
ResolveState { ResolveState {
infcx: infcx, infcx: infcx,
modes: modes, modes: modes,
@ -101,11 +101,11 @@ pub fn resolver(infcx: @InferCtxt, modes: uint) -> ResolveState {
} }
pub impl ResolveState { pub impl ResolveState {
fn should(&self, mode: uint) -> bool { fn should(&mut self, mode: uint) -> bool {
(self.modes & mode) == mode (self.modes & mode) == mode
} }
fn resolve_type_chk(&self, typ: ty::t) -> fres<ty::t> { fn resolve_type_chk(&mut self, typ: ty::t) -> fres<ty::t> {
self.err = None; self.err = None;
debug!("Resolving %s (modes=%x)", debug!("Resolving %s (modes=%x)",
@ -130,7 +130,7 @@ pub impl ResolveState {
} }
} }
fn resolve_region_chk(&self, orig: ty::Region) -> fres<ty::Region> { fn resolve_region_chk(&mut self, orig: ty::Region) -> fres<ty::Region> {
self.err = None; self.err = None;
let resolved = indent(|| self.resolve_region(orig) ); let resolved = indent(|| self.resolve_region(orig) );
match self.err { match self.err {
@ -139,7 +139,7 @@ pub impl ResolveState {
} }
} }
fn resolve_type(&self, typ: ty::t) -> ty::t { fn resolve_type(&mut self, typ: ty::t) -> ty::t {
debug!("resolve_type(%s)", typ.inf_str(self.infcx)); debug!("resolve_type(%s)", typ.inf_str(self.infcx));
let _i = indenter(); let _i = indenter();
@ -181,7 +181,7 @@ pub impl ResolveState {
} }
} }
fn resolve_region(&self, orig: ty::Region) -> ty::Region { fn resolve_region(&mut self, orig: ty::Region) -> ty::Region {
debug!("Resolve_region(%s)", orig.inf_str(self.infcx)); debug!("Resolve_region(%s)", orig.inf_str(self.infcx));
match orig { match orig {
ty::re_infer(ty::ReVar(rid)) => self.resolve_region_var(rid), ty::re_infer(ty::ReVar(rid)) => self.resolve_region_var(rid),
@ -189,14 +189,14 @@ pub impl ResolveState {
} }
} }
fn resolve_region_var(&self, rid: RegionVid) -> ty::Region { fn resolve_region_var(&mut self, rid: RegionVid) -> ty::Region {
if !self.should(resolve_rvar) { if !self.should(resolve_rvar) {
return ty::re_infer(ty::ReVar(rid)); return ty::re_infer(ty::ReVar(rid));
} }
self.infcx.region_vars.resolve_var(rid) self.infcx.region_vars.resolve_var(rid)
} }
fn assert_not_rvar(&self, rid: RegionVid, r: ty::Region) { fn assert_not_rvar(&mut self, rid: RegionVid, r: ty::Region) {
match r { match r {
ty::re_infer(ty::ReVar(rid2)) => { ty::re_infer(ty::ReVar(rid2)) => {
self.err = Some(region_var_bound_by_region_var(rid, rid2)); self.err = Some(region_var_bound_by_region_var(rid, rid2));
@ -205,7 +205,7 @@ pub impl ResolveState {
} }
} }
fn resolve_ty_var(&self, vid: TyVid) -> ty::t { fn resolve_ty_var(&mut self, vid: TyVid) -> ty::t {
if vec::contains(self.v_seen, &vid) { if vec::contains(self.v_seen, &vid) {
self.err = Some(cyclic_ty(vid)); self.err = Some(cyclic_ty(vid));
return ty::mk_var(self.infcx.tcx, vid); return ty::mk_var(self.infcx.tcx, vid);
@ -238,7 +238,7 @@ pub impl ResolveState {
} }
} }
fn resolve_int_var(&self, vid: IntVid) -> ty::t { fn resolve_int_var(&mut self, vid: IntVid) -> ty::t {
if !self.should(resolve_ivar) { if !self.should(resolve_ivar) {
return ty::mk_int_var(self.infcx.tcx, vid); return ty::mk_int_var(self.infcx.tcx, vid);
} }
@ -261,7 +261,7 @@ pub impl ResolveState {
} }
} }
fn resolve_float_var(&self, vid: FloatVid) -> ty::t { fn resolve_float_var(&mut self, vid: FloatVid) -> ty::t {
if !self.should(resolve_fvar) { if !self.should(resolve_fvar) {
return ty::mk_float_var(self.infcx.tcx, vid); return ty::mk_float_var(self.infcx.tcx, vid);
} }

View file

@ -33,7 +33,7 @@ pub fn macros() {
pub enum Sub = CombineFields; // "subtype", "subregion" etc pub enum Sub = CombineFields; // "subtype", "subregion" etc
pub impl Sub: Combine { pub impl Sub: Combine {
fn infcx() -> @InferCtxt { self.infcx } fn infcx() -> @mut InferCtxt { self.infcx }
fn tag() -> ~str { ~"sub" } fn tag() -> ~str { ~"sub" }
fn a_is_expected() -> bool { self.a_is_expected } fn a_is_expected() -> bool { self.a_is_expected }
fn span() -> span { self.span } fn span() -> span { self.span }

View file

@ -28,7 +28,7 @@ pub enum VarValue<V, T> {
pub struct ValsAndBindings<V, T> { pub struct ValsAndBindings<V, T> {
vals: SmallIntMap<VarValue<V, T>>, vals: SmallIntMap<VarValue<V, T>>,
mut bindings: ~[(V, VarValue<V, T>)], bindings: ~[(V, VarValue<V, T>)],
} }
pub struct Node<V, T> { pub struct Node<V, T> {
@ -38,15 +38,14 @@ pub struct Node<V, T> {
} }
pub trait UnifyVid<T> { pub trait UnifyVid<T> {
static fn appropriate_vals_and_bindings(infcx: &v/InferCtxt) static fn appropriate_vals_and_bindings(infcx: &v/mut InferCtxt)
-> &v/ValsAndBindings<Self, T>; -> &v/mut ValsAndBindings<Self, T>;
} }
pub impl InferCtxt { pub impl InferCtxt {
fn get<T:Copy, V:Copy Eq Vid UnifyVid<T>>( fn get<T:Copy, V:Copy Eq Vid UnifyVid<T>>(&mut self,
&self, +vid: V)
+vid: V) -> Node<V, T> -> Node<V, T> {
{
/*! /*!
* *
* Find the root node for `vid`. This uses the standard * Find the root node for `vid`. This uses the standard
@ -79,10 +78,9 @@ pub impl InferCtxt {
} }
fn set<T:Copy InferStr, V:Copy Vid ToStr UnifyVid<T>>( fn set<T:Copy InferStr, V:Copy Vid ToStr UnifyVid<T>>(
&self, &mut self,
+vid: V, +vid: V,
+new_v: VarValue<V, T>) +new_v: VarValue<V, T>) {
{
/*! /*!
* *
* Sets the value for `vid` to `new_v`. `vid` MUST be a root node! * Sets the value for `vid` to `new_v`. `vid` MUST be a root node!
@ -98,7 +96,7 @@ pub impl InferCtxt {
} }
fn unify<T:Copy InferStr, V:Copy Vid ToStr UnifyVid<T>, R>( fn unify<T:Copy InferStr, V:Copy Vid ToStr UnifyVid<T>, R>(
&self, &mut self,
node_a: &Node<V, T>, node_a: &Node<V, T>,
node_b: &Node<V, T>, node_b: &Node<V, T>,
op: &fn(new_root: V, new_rank: uint) -> R op: &fn(new_root: V, new_rank: uint) -> R
@ -154,11 +152,11 @@ pub fn mk_err<T: SimplyUnifiable>(+a_is_expected: bool,
pub impl InferCtxt { pub impl InferCtxt {
fn simple_vars<T:Copy Eq InferStr SimplyUnifiable, fn simple_vars<T:Copy Eq InferStr SimplyUnifiable,
V:Copy Eq Vid ToStr UnifyVid<Option<T>>>( V:Copy Eq Vid ToStr UnifyVid<Option<T>>>(
&self, &mut self,
+a_is_expected: bool, +a_is_expected: bool,
+a_id: V, +a_id: V,
+b_id: V) -> ures +b_id: V)
{ -> ures {
/*! /*!
* *
* Unifies two simple variables. Because simple variables do * Unifies two simple variables. Because simple variables do
@ -193,10 +191,11 @@ pub impl InferCtxt {
fn simple_var_t<T:Copy Eq InferStr SimplyUnifiable, fn simple_var_t<T:Copy Eq InferStr SimplyUnifiable,
V:Copy Eq Vid ToStr UnifyVid<Option<T>>>( V:Copy Eq Vid ToStr UnifyVid<Option<T>>>(
&mut self,
+a_is_expected: bool, +a_is_expected: bool,
+a_id: V, +a_id: V,
+b: T) -> ures +b: T)
{ -> ures {
/*! /*!
* *
* Sets the value of the variable `a_id` to `b`. Because * Sets the value of the variable `a_id` to `b`. Because
@ -227,41 +226,36 @@ pub impl InferCtxt {
// ______________________________________________________________________ // ______________________________________________________________________
pub impl ty::TyVid : UnifyVid<Bounds<ty::t>> { pub impl ty::TyVid : UnifyVid<Bounds<ty::t>> {
static fn appropriate_vals_and_bindings(infcx: &v/InferCtxt) static fn appropriate_vals_and_bindings(infcx: &v/mut InferCtxt)
-> &v/ValsAndBindings<ty::TyVid, Bounds<ty::t>> -> &v/mut ValsAndBindings<ty::TyVid, Bounds<ty::t>> {
{ return &mut infcx.ty_var_bindings;
return &infcx.ty_var_bindings;
} }
} }
pub impl ty::IntVid : UnifyVid<Option<IntVarValue>> { pub impl ty::IntVid : UnifyVid<Option<IntVarValue>> {
static fn appropriate_vals_and_bindings(infcx: &v/InferCtxt) static fn appropriate_vals_and_bindings(infcx: &v/mut InferCtxt)
-> &v/ValsAndBindings<ty::IntVid, Option<IntVarValue>> -> &v/mut ValsAndBindings<ty::IntVid, Option<IntVarValue>> {
{ return &mut infcx.int_var_bindings;
return &infcx.int_var_bindings;
} }
} }
pub impl IntVarValue : SimplyUnifiable { pub impl IntVarValue : SimplyUnifiable {
static fn to_type_err(err: expected_found<IntVarValue>) static fn to_type_err(err: expected_found<IntVarValue>)
-> ty::type_err -> ty::type_err {
{
return ty::terr_int_mismatch(err); return ty::terr_int_mismatch(err);
} }
} }
pub impl ty::FloatVid : UnifyVid<Option<ast::float_ty>> { pub impl ty::FloatVid : UnifyVid<Option<ast::float_ty>> {
static fn appropriate_vals_and_bindings(infcx: &v/InferCtxt) static fn appropriate_vals_and_bindings(infcx: &v/mut InferCtxt)
-> &v/ValsAndBindings<ty::FloatVid, Option<ast::float_ty>> -> &v/mut ValsAndBindings<ty::FloatVid, Option<ast::float_ty>> {
{ return &mut infcx.float_var_bindings;
return &infcx.float_var_bindings;
} }
} }
pub impl ast::float_ty : SimplyUnifiable { pub impl ast::float_ty : SimplyUnifiable {
static fn to_type_err(err: expected_found<ast::float_ty>) static fn to_type_err(err: expected_found<ast::float_ty>)
-> ty::type_err -> ty::type_err {
{
return ty::terr_float_mismatch(err); return ty::terr_float_mismatch(err);
} }
} }

View file

@ -196,7 +196,7 @@ pub impl vtable_origin {
pub type vtable_map = HashMap<ast::node_id, vtable_res>; pub type vtable_map = HashMap<ast::node_id, vtable_res>;
struct crate_ctxt__ { pub struct CrateCtxt {
// A mapping from method call sites to traits that have that method. // A mapping from method call sites to traits that have that method.
trait_map: resolve::TraitMap, trait_map: resolve::TraitMap,
method_map: method_map, method_map: method_map,
@ -205,10 +205,6 @@ struct crate_ctxt__ {
tcx: ty::ctxt tcx: ty::ctxt
} }
pub enum crate_ctxt {
crate_ctxt_(crate_ctxt__)
}
// Functions that write types into the node type table // Functions that write types into the node type table
pub fn write_ty_to_tcx(tcx: ty::ctxt, node_id: ast::node_id, ty: ty::t) { pub fn write_ty_to_tcx(tcx: ty::ctxt, node_id: ast::node_id, ty: ty::t) {
debug!("write_ty_to_tcx(%d, %s)", node_id, ppaux::ty_to_str(tcx, ty)); debug!("write_ty_to_tcx(%d, %s)", node_id, ppaux::ty_to_str(tcx, ty));
@ -233,7 +229,7 @@ pub fn lookup_def_tcx(tcx: ty::ctxt, sp: span, id: ast::node_id) -> ast::def {
} }
} }
pub fn lookup_def_ccx(ccx: @crate_ctxt, sp: span, id: ast::node_id) pub fn lookup_def_ccx(ccx: @mut CrateCtxt, sp: span, id: ast::node_id)
-> ast::def { -> ast::def {
lookup_def_tcx(ccx.tcx, sp, id) lookup_def_tcx(ccx.tcx, sp, id)
} }
@ -244,7 +240,7 @@ pub fn no_params(t: ty::t) -> ty::ty_param_bounds_and_ty {
pub fn require_same_types( pub fn require_same_types(
tcx: ty::ctxt, tcx: ty::ctxt,
maybe_infcx: Option<@infer::InferCtxt>, maybe_infcx: Option<@mut infer::InferCtxt>,
t1_is_expected: bool, t1_is_expected: bool,
span: span, span: span,
t1: ty::t, t1: ty::t,
@ -317,10 +313,9 @@ fn arg_is_argv_ty(tcx: ty::ctxt, a: ty::arg) -> bool {
} }
} }
fn check_main_fn_ty(ccx: @crate_ctxt, fn check_main_fn_ty(ccx: @mut CrateCtxt,
main_id: ast::node_id, main_id: ast::node_id,
main_span: span) { main_span: span) {
let tcx = ccx.tcx; let tcx = ccx.tcx;
let main_t = ty::node_id_to_type(tcx, main_id); let main_t = ty::node_id_to_type(tcx, main_id);
match ty::get(main_t).sty { match ty::get(main_t).sty {
@ -360,10 +355,10 @@ fn check_main_fn_ty(ccx: @crate_ctxt,
} }
} }
fn check_for_main_fn(ccx: @crate_ctxt) { fn check_for_main_fn(ccx: @mut CrateCtxt) {
let tcx = ccx.tcx; let tcx = ccx.tcx;
if !tcx.sess.building_library { if !*tcx.sess.building_library {
match copy tcx.sess.main_fn { match *tcx.sess.main_fn {
Some((id, sp)) => check_main_fn_ty(ccx, id, sp), Some((id, sp)) => check_main_fn_ty(ccx, id, sp),
None => tcx.sess.err(~"main function not found") None => tcx.sess.err(~"main function not found")
} }
@ -374,14 +369,13 @@ pub fn check_crate(tcx: ty::ctxt,
trait_map: resolve::TraitMap, trait_map: resolve::TraitMap,
crate: @ast::crate) crate: @ast::crate)
-> (method_map, vtable_map) { -> (method_map, vtable_map) {
let ccx = @mut CrateCtxt {
let ccx = @crate_ctxt_(crate_ctxt__ {
trait_map: trait_map, trait_map: trait_map,
method_map: oldmap::HashMap(), method_map: oldmap::HashMap(),
vtable_map: oldmap::HashMap(), vtable_map: oldmap::HashMap(),
coherence_info: @coherence::CoherenceInfo(), coherence_info: @coherence::CoherenceInfo(),
tcx: tcx tcx: tcx
}); };
collect::collect_item_types(ccx, crate); collect::collect_item_types(ccx, crate);
coherence::check_coherence(ccx, crate); coherence::check_coherence(ccx, crate);

View file

@ -19,39 +19,40 @@ use syntax::codemap::span;
use syntax::parse::token::special_idents; use syntax::parse::token::special_idents;
pub trait region_scope { pub trait region_scope {
fn anon_region(span: span) -> Result<ty::Region, ~str>; pure fn anon_region(span: span) -> Result<ty::Region, ~str>;
fn self_region(span: span) -> Result<ty::Region, ~str>; pure fn self_region(span: span) -> Result<ty::Region, ~str>;
fn named_region(span: span, id: ast::ident) -> Result<ty::Region, ~str>; pure fn named_region(span: span, id: ast::ident)
-> Result<ty::Region, ~str>;
} }
pub enum empty_rscope { empty_rscope } pub enum empty_rscope { empty_rscope }
pub impl empty_rscope: region_scope { pub impl empty_rscope: region_scope {
fn anon_region(_span: span) -> Result<ty::Region, ~str> { pure fn anon_region(_span: span) -> Result<ty::Region, ~str> {
result::Ok(ty::re_static) result::Ok(ty::re_static)
} }
fn self_region(_span: span) -> Result<ty::Region, ~str> { pure fn self_region(_span: span) -> Result<ty::Region, ~str> {
result::Err(~"only the static region is allowed here") result::Err(~"only the static region is allowed here")
} }
fn named_region(_span: span, _id: ast::ident) pure fn named_region(_span: span, _id: ast::ident)
-> Result<ty::Region, ~str> -> Result<ty::Region, ~str> {
{
result::Err(~"only the static region is allowed here") result::Err(~"only the static region is allowed here")
} }
} }
pub enum type_rscope = Option<ty::region_variance>; pub enum type_rscope = Option<ty::region_variance>;
pub impl type_rscope: region_scope { pub impl type_rscope: region_scope {
fn anon_region(_span: span) -> Result<ty::Region, ~str> { pure fn anon_region(_span: span) -> Result<ty::Region, ~str> {
match *self { match *self {
Some(_) => result::Ok(ty::re_bound(ty::br_self)), Some(_) => result::Ok(ty::re_bound(ty::br_self)),
None => result::Err(~"to use region types here, the containing \ None => result::Err(~"to use region types here, the containing \
type must be declared with a region bound") type must be declared with a region bound")
} }
} }
fn self_region(span: span) -> Result<ty::Region, ~str> { pure fn self_region(span: span) -> Result<ty::Region, ~str> {
self.anon_region(span) self.anon_region(span)
} }
fn named_region(span: span, id: ast::ident) -> Result<ty::Region, ~str> { pure fn named_region(span: span, id: ast::ident)
-> Result<ty::Region, ~str> {
do empty_rscope.named_region(span, id).chain_err |_e| { do empty_rscope.named_region(span, id).chain_err |_e| {
result::Err(~"named regions other than `self` are not \ result::Err(~"named regions other than `self` are not \
allowed as part of a type declaration") allowed as part of a type declaration")
@ -73,36 +74,42 @@ pub fn in_anon_rscope<RS: region_scope Copy Durable>(self: RS, r: ty::Region)
@anon_rscope({anon: r, base: self as region_scope}) @anon_rscope({anon: r, base: self as region_scope})
} }
pub impl @anon_rscope: region_scope { pub impl @anon_rscope: region_scope {
fn anon_region(_span: span) -> Result<ty::Region, ~str> { pure fn anon_region(_span: span) -> Result<ty::Region, ~str> {
result::Ok(self.anon) result::Ok(self.anon)
} }
fn self_region(span: span) -> Result<ty::Region, ~str> { pure fn self_region(span: span) -> Result<ty::Region, ~str> {
self.base.self_region(span) self.base.self_region(span)
} }
fn named_region(span: span, id: ast::ident) -> Result<ty::Region, ~str> { pure fn named_region(span: span, id: ast::ident)
-> Result<ty::Region, ~str> {
self.base.named_region(span, id) self.base.named_region(span, id)
} }
} }
pub struct binding_rscope { pub struct binding_rscope {
base: region_scope, base: region_scope,
mut anon_bindings: uint, anon_bindings: uint,
} }
pub fn in_binding_rscope<RS: region_scope Copy Durable>(self: RS) pub fn in_binding_rscope<RS: region_scope Copy Durable>(self: RS)
-> @binding_rscope { -> @mut binding_rscope {
let base = self as region_scope; let base = self as region_scope;
@binding_rscope { base: base, anon_bindings: 0 } @mut binding_rscope { base: base, anon_bindings: 0 }
} }
pub impl @binding_rscope: region_scope { pub impl @mut binding_rscope: region_scope {
fn anon_region(_span: span) -> Result<ty::Region, ~str> { pure fn anon_region(_span: span) -> Result<ty::Region, ~str> {
// XXX: Unsafe to work around purity
unsafe {
let idx = self.anon_bindings; let idx = self.anon_bindings;
self.anon_bindings += 1; self.anon_bindings += 1;
result::Ok(ty::re_bound(ty::br_anon(idx))) result::Ok(ty::re_bound(ty::br_anon(idx)))
} }
fn self_region(span: span) -> Result<ty::Region, ~str> { }
pure fn self_region(span: span) -> Result<ty::Region, ~str> {
self.base.self_region(span) self.base.self_region(span)
} }
fn named_region(span: span, id: ast::ident) -> Result<ty::Region, ~str> { pure fn named_region(span: span, id: ast::ident)
-> Result<ty::Region, ~str> {
do self.base.named_region(span, id).chain_err |_e| { do self.base.named_region(span, id).chain_err |_e| {
result::Ok(ty::re_bound(ty::br_named(id))) result::Ok(ty::re_bound(ty::br_named(id)))
} }

View file

@ -209,7 +209,7 @@ pub fn describe_debug_flags() {
} }
} }
pub fn run_compiler(args: &~[~str], demitter: diagnostic::emitter) { pub fn run_compiler(args: &~[~str], demitter: diagnostic::Emitter) {
// Don't display log spew by default. Can override with RUST_LOG. // Don't display log spew by default. Can override with RUST_LOG.
logging::console_off(); logging::console_off();
@ -320,7 +320,7 @@ diagnostic emitter which records when we hit a fatal error. If the task
fails without recording a fatal error then we've encountered a compiler fails without recording a fatal error then we've encountered a compiler
bug and need to present an error. bug and need to present an error.
*/ */
pub fn monitor(+f: fn~(diagnostic::emitter)) { pub fn monitor(+f: fn~(diagnostic::Emitter)) {
use core::pipes::*; use core::pipes::*;
use std::cell::Cell; use std::cell::Cell;
let (p, ch) = stream(); let (p, ch) = stream();

View file

@ -64,7 +64,7 @@ enum CmdAction {
/// A utility function that hands off a pretty printer to a callback. /// A utility function that hands off a pretty printer to a callback.
fn with_pp(intr: @token::ident_interner, fn with_pp(intr: @token::ident_interner,
cb: fn(pprust::ps, io::Writer)) -> ~str { cb: fn(@pprust::ps, io::Writer)) -> ~str {
do io::with_str_writer |writer| { do io::with_str_writer |writer| {
let pp = pprust::rust_printer(writer, intr); let pp = pprust::rust_printer(writer, intr);
@ -203,7 +203,7 @@ fn compile_crate(src_filename: ~str, binary: ~str) -> Option<bool> {
}; };
let input = driver::file_input(src_path); let input = driver::file_input(src_path);
let sess = driver::build_session(options, diagnostic::emit); let sess = driver::build_session(options, diagnostic::emit);
sess.building_library = true; *sess.building_library = true;
let cfg = driver::build_configuration(sess, binary, input); let cfg = driver::build_configuration(sess, binary, input);
let outputs = driver::build_output_filenames( let outputs = driver::build_output_filenames(
input, &None, &None, sess); input, &None, &None, sess);

View file

@ -72,7 +72,7 @@ pub struct uv_handle_fields {
loop_handle: *libc::c_void, loop_handle: *libc::c_void,
type_: handle_type, type_: handle_type,
close_cb: *u8, close_cb: *u8,
mut data: *libc::c_void, data: *libc::c_void,
} }
// unix size: 8 // unix size: 8
@ -240,10 +240,10 @@ pub struct uv_timer_t {
// unix size: 16 // unix size: 16
pub struct sockaddr_in { pub struct sockaddr_in {
mut sin_family: u16, sin_family: u16,
mut sin_port: u16, sin_port: u16,
mut sin_addr: u32, // in_addr: this is an opaque, per-platform struct sin_addr: u32, // in_addr: this is an opaque, per-platform struct
mut sin_zero: (u8, u8, u8, u8, u8, u8, u8, u8), sin_zero: (u8, u8, u8, u8, u8, u8, u8, u8),
} }
// unix size: 28 .. FIXME #1645 // unix size: 28 .. FIXME #1645

View file

@ -107,15 +107,17 @@ pub enum ast_node {
} }
pub type map = std::oldmap::HashMap<node_id, ast_node>; pub type map = std::oldmap::HashMap<node_id, ast_node>;
pub struct ctx {
map: map, pub struct Ctx {
mut path: path, map: @map,
mut local_id: uint, path: path,
local_id: uint,
diag: span_handler, diag: span_handler,
} }
pub type vt = visit::vt<ctx>;
pub fn extend(cx: ctx, +elt: ident) -> @path { pub type vt = visit::vt<@mut Ctx>;
pub fn extend(cx: @mut Ctx, +elt: ident) -> @path {
@(vec::append(cx.path, ~[path_name(elt)])) @(vec::append(cx.path, ~[path_name(elt)]))
} }
@ -133,31 +135,33 @@ pub fn mk_ast_map_visitor() -> vt {
} }
pub fn map_crate(diag: span_handler, c: crate) -> map { pub fn map_crate(diag: span_handler, c: crate) -> map {
let cx = ctx { let cx = @mut Ctx {
map: std::oldmap::HashMap(), map: @std::oldmap::HashMap(),
mut path: ~[], path: ~[],
mut local_id: 0u, local_id: 0u,
diag: diag, diag: diag,
}; };
visit::visit_crate(c, cx, mk_ast_map_visitor()); visit::visit_crate(c, cx, mk_ast_map_visitor());
cx.map *cx.map
} }
// Used for items loaded from external crate that are being inlined into this // Used for items loaded from external crate that are being inlined into this
// crate. The `path` should be the path to the item but should not include // crate. The `path` should be the path to the item but should not include
// the item itself. // the item itself.
pub fn map_decoded_item(diag: span_handler, pub fn map_decoded_item(diag: span_handler,
map: map, path: path, ii: inlined_item) { map: map,
path: path,
ii: inlined_item) {
// I believe it is ok for the local IDs of inlined items from other crates // I believe it is ok for the local IDs of inlined items from other crates
// to overlap with the local ids from this crate, so just generate the ids // to overlap with the local ids from this crate, so just generate the ids
// starting from 0. (In particular, I think these ids are only used in // starting from 0. (In particular, I think these ids are only used in
// alias analysis, which we will not be running on the inlined items, and // alias analysis, which we will not be running on the inlined items, and
// even if we did I think it only needs an ordering between local // even if we did I think it only needs an ordering between local
// variables that are simultaneously in scope). // variables that are simultaneously in scope).
let cx = ctx { let cx = @mut Ctx {
map: map, map: @map,
mut path: path, path: path,
mut local_id: 0, local_id: 0,
diag: diag, diag: diag,
}; };
let v = mk_ast_map_visitor(); let v = mk_ast_map_visitor();
@ -181,7 +185,7 @@ pub fn map_decoded_item(diag: span_handler,
} }
pub fn map_fn(fk: visit::fn_kind, decl: fn_decl, body: blk, pub fn map_fn(fk: visit::fn_kind, decl: fn_decl, body: blk,
sp: codemap::span, id: node_id, cx: ctx, v: vt) { sp: codemap::span, id: node_id, &&cx: @mut Ctx, v: vt) {
for decl.inputs.each |a| { for decl.inputs.each |a| {
cx.map.insert(a.id, cx.map.insert(a.id,
node_arg(/* FIXME (#2543) */ node_arg(/* FIXME (#2543) */
@ -208,12 +212,12 @@ pub fn map_fn(fk: visit::fn_kind, decl: fn_decl, body: blk,
visit::visit_fn(fk, decl, body, sp, id, cx, v); visit::visit_fn(fk, decl, body, sp, id, cx, v);
} }
pub fn map_block(b: blk, cx: ctx, v: vt) { pub fn map_block(b: blk, &&cx: @mut Ctx, v: vt) {
cx.map.insert(b.node.id, node_block(/* FIXME (#2543) */ copy b)); cx.map.insert(b.node.id, node_block(/* FIXME (#2543) */ copy b));
visit::visit_block(b, cx, v); visit::visit_block(b, cx, v);
} }
pub fn number_pat(cx: ctx, pat: @pat) { pub fn number_pat(cx: @mut Ctx, pat: @pat) {
do ast_util::walk_pat(pat) |p| { do ast_util::walk_pat(pat) |p| {
match p.node { match p.node {
pat_ident(*) => { pat_ident(*) => {
@ -225,24 +229,24 @@ pub fn number_pat(cx: ctx, pat: @pat) {
}; };
} }
pub fn map_local(loc: @local, cx: ctx, v: vt) { pub fn map_local(loc: @local, &&cx: @mut Ctx, v: vt) {
number_pat(cx, loc.node.pat); number_pat(cx, loc.node.pat);
visit::visit_local(loc, cx, v); visit::visit_local(loc, cx, v);
} }
pub fn map_arm(arm: arm, cx: ctx, v: vt) { pub fn map_arm(arm: arm, &&cx: @mut Ctx, v: vt) {
number_pat(cx, arm.pats[0]); number_pat(cx, arm.pats[0]);
visit::visit_arm(arm, cx, v); visit::visit_arm(arm, cx, v);
} }
pub fn map_method(impl_did: def_id, impl_path: @path, pub fn map_method(impl_did: def_id, impl_path: @path,
m: @method, cx: ctx) { m: @method, &&cx: @mut Ctx) {
cx.map.insert(m.id, node_method(m, impl_did, impl_path)); cx.map.insert(m.id, node_method(m, impl_did, impl_path));
cx.map.insert(m.self_id, node_local(cx.local_id)); cx.map.insert(m.self_id, node_local(cx.local_id));
cx.local_id += 1u; cx.local_id += 1u;
} }
pub fn map_item(i: @item, cx: ctx, v: vt) { pub fn map_item(i: @item, &&cx: @mut Ctx, v: vt) {
let item_path = @/* FIXME (#2543) */ copy cx.path; let item_path = @/* FIXME (#2543) */ copy cx.path;
cx.map.insert(i.id, node_item(i, item_path)); cx.map.insert(i.id, node_item(i, item_path));
match i.node { match i.node {
@ -305,7 +309,7 @@ pub fn map_item(i: @item, cx: ctx, v: vt) {
} }
pub fn map_struct_def(struct_def: @ast::struct_def, parent_node: ast_node, pub fn map_struct_def(struct_def: @ast::struct_def, parent_node: ast_node,
ident: ast::ident, cx: ctx, _v: vt) { ident: ast::ident, cx: @mut Ctx, _v: vt) {
let p = extend(cx, ident); let p = extend(cx, ident);
// If this is a tuple-like struct, register the constructor. // If this is a tuple-like struct, register the constructor.
match struct_def.ctor_id { match struct_def.ctor_id {
@ -322,12 +326,12 @@ pub fn map_struct_def(struct_def: @ast::struct_def, parent_node: ast_node,
} }
} }
pub fn map_expr(ex: @expr, cx: ctx, v: vt) { pub fn map_expr(ex: @expr, &&cx: @mut Ctx, v: vt) {
cx.map.insert(ex.id, node_expr(ex)); cx.map.insert(ex.id, node_expr(ex));
visit::visit_expr(ex, cx, v); visit::visit_expr(ex, cx, v);
} }
pub fn map_stmt(stmt: @stmt, cx: ctx, v: vt) { pub fn map_stmt(stmt: @stmt, &&cx: @mut Ctx, v: vt) {
cx.map.insert(stmt_id(*stmt), node_stmt(stmt)); cx.map.insert(stmt_id(*stmt), node_stmt(stmt));
visit::visit_stmt(stmt, cx, v); visit::visit_stmt(stmt, cx, v);
} }

View file

@ -23,83 +23,86 @@ use core::dvec::DVec;
use std::term; use std::term;
pub type emitter = fn@(cmsp: Option<(@codemap::CodeMap, span)>, pub type Emitter = fn@(cmsp: Option<(@codemap::CodeMap, span)>,
msg: &str, lvl: level); msg: &str, lvl: level);
pub trait span_handler { pub trait span_handler {
fn span_fatal(sp: span, msg: &str) -> !; fn span_fatal(@mut self, sp: span, msg: &str) -> !;
fn span_err(sp: span, msg: &str); fn span_err(@mut self, sp: span, msg: &str);
fn span_warn(sp: span, msg: &str); fn span_warn(@mut self, sp: span, msg: &str);
fn span_note(sp: span, msg: &str); fn span_note(@mut self, sp: span, msg: &str);
fn span_bug(sp: span, msg: &str) -> !; fn span_bug(@mut self, sp: span, msg: &str) -> !;
fn span_unimpl(sp: span, msg: &str) -> !; fn span_unimpl(@mut self, sp: span, msg: &str) -> !;
fn handler() -> handler; fn handler(@mut self) -> handler;
} }
pub trait handler { pub trait handler {
fn fatal(msg: &str) -> !; fn fatal(@mut self, msg: &str) -> !;
fn err(msg: &str); fn err(@mut self, msg: &str);
fn bump_err_count(); fn bump_err_count(@mut self);
fn has_errors() -> bool; fn has_errors(@mut self) -> bool;
fn abort_if_errors(); fn abort_if_errors(@mut self);
fn warn(msg: &str); fn warn(@mut self, msg: &str);
fn note(msg: &str); fn note(@mut self, msg: &str);
fn bug(msg: &str) -> !; fn bug(@mut self, msg: &str) -> !;
fn unimpl(msg: &str) -> !; fn unimpl(@mut self, msg: &str) -> !;
fn emit(cmsp: Option<(@codemap::CodeMap, span)>, msg: &str, lvl: level); fn emit(@mut self,
cmsp: Option<(@codemap::CodeMap, span)>,
msg: &str,
lvl: level);
} }
struct handler_t { struct HandlerT {
mut err_count: uint, err_count: uint,
emit: emitter, emit: Emitter,
} }
struct codemap_t { struct CodemapT {
handler: handler, handler: handler,
cm: @codemap::CodeMap, cm: @codemap::CodeMap,
} }
impl codemap_t: span_handler { impl CodemapT: span_handler {
fn span_fatal(sp: span, msg: &str) -> ! { fn span_fatal(@mut self, sp: span, msg: &str) -> ! {
self.handler.emit(Some((self.cm, sp)), msg, fatal); self.handler.emit(Some((self.cm, sp)), msg, fatal);
die!(); die!();
} }
fn span_err(sp: span, msg: &str) { fn span_err(@mut self, sp: span, msg: &str) {
self.handler.emit(Some((self.cm, sp)), msg, error); self.handler.emit(Some((self.cm, sp)), msg, error);
self.handler.bump_err_count(); self.handler.bump_err_count();
} }
fn span_warn(sp: span, msg: &str) { fn span_warn(@mut self, sp: span, msg: &str) {
self.handler.emit(Some((self.cm, sp)), msg, warning); self.handler.emit(Some((self.cm, sp)), msg, warning);
} }
fn span_note(sp: span, msg: &str) { fn span_note(@mut self, sp: span, msg: &str) {
self.handler.emit(Some((self.cm, sp)), msg, note); self.handler.emit(Some((self.cm, sp)), msg, note);
} }
fn span_bug(sp: span, msg: &str) -> ! { fn span_bug(@mut self, sp: span, msg: &str) -> ! {
self.span_fatal(sp, ice_msg(msg)); self.span_fatal(sp, ice_msg(msg));
} }
fn span_unimpl(sp: span, msg: &str) -> ! { fn span_unimpl(@mut self, sp: span, msg: &str) -> ! {
self.span_bug(sp, ~"unimplemented " + msg); self.span_bug(sp, ~"unimplemented " + msg);
} }
fn handler() -> handler { fn handler(@mut self) -> handler {
self.handler self.handler
} }
} }
impl handler_t: handler { impl HandlerT: handler {
fn fatal(msg: &str) -> ! { fn fatal(@mut self, msg: &str) -> ! {
(self.emit)(None, msg, fatal); (self.emit)(None, msg, fatal);
die!(); die!();
} }
fn err(msg: &str) { fn err(@mut self, msg: &str) {
(self.emit)(None, msg, error); (self.emit)(None, msg, error);
self.bump_err_count(); self.bump_err_count();
} }
fn bump_err_count() { fn bump_err_count(@mut self) {
self.err_count += 1u; self.err_count += 1u;
} }
fn has_errors() -> bool { self.err_count > 0u } fn has_errors(@mut self) -> bool { self.err_count > 0u }
fn abort_if_errors() { fn abort_if_errors(@mut self) {
let s; let s;
match self.err_count { match self.err_count {
0u => return, 0u => return,
@ -111,17 +114,22 @@ impl handler_t: handler {
} }
self.fatal(s); self.fatal(s);
} }
fn warn(msg: &str) { fn warn(@mut self, msg: &str) {
(self.emit)(None, msg, warning); (self.emit)(None, msg, warning);
} }
fn note(msg: &str) { fn note(@mut self, msg: &str) {
(self.emit)(None, msg, note); (self.emit)(None, msg, note);
} }
fn bug(msg: &str) -> ! { fn bug(@mut self, msg: &str) -> ! {
self.fatal(ice_msg(msg)); self.fatal(ice_msg(msg));
} }
fn unimpl(msg: &str) -> ! { self.bug(~"unimplemented " + msg); } fn unimpl(@mut self, msg: &str) -> ! {
fn emit(cmsp: Option<(@codemap::CodeMap, span)>, msg: &str, lvl: level) { self.bug(~"unimplemented " + msg);
}
fn emit(@mut self,
cmsp: Option<(@codemap::CodeMap, span)>,
msg: &str,
lvl: level) {
(self.emit)(cmsp, msg, lvl); (self.emit)(cmsp, msg, lvl);
} }
} }
@ -132,25 +140,22 @@ pub fn ice_msg(msg: &str) -> ~str {
pub fn mk_span_handler(handler: handler, cm: @codemap::CodeMap) pub fn mk_span_handler(handler: handler, cm: @codemap::CodeMap)
-> span_handler { -> span_handler {
@codemap_t { handler: handler, cm: cm } as span_handler @mut CodemapT { handler: handler, cm: cm } as @span_handler
} }
pub fn mk_handler(emitter: Option<emitter>) -> handler { pub fn mk_handler(emitter: Option<Emitter>) -> @handler {
let emit: Emitter = match emitter {
let emit = match emitter {
Some(e) => e, Some(e) => e,
None => { None => {
let f = fn@(cmsp: Option<(@codemap::CodeMap, span)>, let emit: Emitter = |cmsp, msg, t| emit(cmsp, msg, t);
msg: &str, t: level) { emit
emit(cmsp, msg, t);
};
f
} }
}; };
@handler_t { mut err_count: 0, emit: emit } as handler @mut HandlerT { mut err_count: 0, emit: emit } as @handler
} }
#[deriving_eq]
pub enum level { pub enum level {
fatal, fatal,
error, error,
@ -158,13 +163,6 @@ pub enum level {
note, note,
} }
impl level : cmp::Eq {
pure fn eq(&self, other: &level) -> bool {
((*self) as uint) == ((*other) as uint)
}
pure fn ne(&self, other: &level) -> bool { !(*self).eq(other) }
}
fn diagnosticstr(lvl: level) -> ~str { fn diagnosticstr(lvl: level) -> ~str {
match lvl { match lvl {
fatal => ~"error", fatal => ~"error",
@ -223,9 +221,9 @@ pub fn emit(cmsp: Option<(@codemap::CodeMap, span)>, msg: &str, lvl: level) {
} }
} }
fn highlight_lines(cm: @codemap::CodeMap, sp: span, fn highlight_lines(cm: @codemap::CodeMap,
sp: span,
lines: @codemap::FileLines) { lines: @codemap::FileLines) {
let fm = lines.file; let fm = lines.file;
// arbitrarily only print up to six lines of the error // arbitrarily only print up to six lines of the error

View file

@ -161,54 +161,56 @@ pub fn syntax_expander_table() -> HashMap<~str, SyntaxExtension> {
// when a macro expansion occurs, the resulting nodes have the backtrace() // when a macro expansion occurs, the resulting nodes have the backtrace()
// -> expn_info of their expansion context stored into their span. // -> expn_info of their expansion context stored into their span.
pub trait ext_ctxt { pub trait ext_ctxt {
fn codemap() -> @CodeMap; fn codemap(@mut self) -> @CodeMap;
fn parse_sess() -> parse::parse_sess; fn parse_sess(@mut self) -> parse::parse_sess;
fn cfg() -> ast::crate_cfg; fn cfg(@mut self) -> ast::crate_cfg;
fn call_site() -> span; fn call_site(@mut self) -> span;
fn print_backtrace(); fn print_backtrace(@mut self);
fn backtrace() -> Option<@ExpnInfo>; fn backtrace(@mut self) -> Option<@ExpnInfo>;
fn mod_push(mod_name: ast::ident); fn mod_push(@mut self, mod_name: ast::ident);
fn mod_pop(); fn mod_pop(@mut self);
fn mod_path() -> ~[ast::ident]; fn mod_path(@mut self) -> ~[ast::ident];
fn bt_push(ei: codemap::ExpnInfo); fn bt_push(@mut self, ei: codemap::ExpnInfo);
fn bt_pop(); fn bt_pop(@mut self);
fn span_fatal(sp: span, msg: &str) -> !; fn span_fatal(@mut self, sp: span, msg: &str) -> !;
fn span_err(sp: span, msg: &str); fn span_err(@mut self, sp: span, msg: &str);
fn span_warn(sp: span, msg: &str); fn span_warn(@mut self, sp: span, msg: &str);
fn span_unimpl(sp: span, msg: &str) -> !; fn span_unimpl(@mut self, sp: span, msg: &str) -> !;
fn span_bug(sp: span, msg: &str) -> !; fn span_bug(@mut self, sp: span, msg: &str) -> !;
fn bug(msg: &str) -> !; fn bug(@mut self, msg: &str) -> !;
fn next_id() -> ast::node_id; fn next_id(@mut self) -> ast::node_id;
pure fn trace_macros() -> bool; pure fn trace_macros(@mut self) -> bool;
fn set_trace_macros(x: bool); fn set_trace_macros(@mut self, x: bool);
/* for unhygienic identifier transformation */ /* for unhygienic identifier transformation */
fn str_of(id: ast::ident) -> ~str; fn str_of(@mut self, id: ast::ident) -> ~str;
fn ident_of(st: ~str) -> ast::ident; fn ident_of(@mut self, st: ~str) -> ast::ident;
} }
pub fn mk_ctxt(parse_sess: parse::parse_sess, pub fn mk_ctxt(parse_sess: parse::parse_sess,
cfg: ast::crate_cfg) -> ext_ctxt { cfg: ast::crate_cfg) -> ext_ctxt {
type ctxt_repr = {parse_sess: parse::parse_sess, struct CtxtRepr {
parse_sess: parse::parse_sess,
cfg: ast::crate_cfg, cfg: ast::crate_cfg,
mut backtrace: Option<@ExpnInfo>, backtrace: Option<@ExpnInfo>,
mut mod_path: ~[ast::ident], mod_path: ~[ast::ident],
mut trace_mac: bool}; trace_mac: bool
impl ctxt_repr: ext_ctxt { }
fn codemap() -> @CodeMap { self.parse_sess.cm } impl CtxtRepr: ext_ctxt {
fn parse_sess() -> parse::parse_sess { self.parse_sess } fn codemap(@mut self) -> @CodeMap { self.parse_sess.cm }
fn cfg() -> ast::crate_cfg { self.cfg } fn parse_sess(@mut self) -> parse::parse_sess { self.parse_sess }
fn call_site() -> span { fn cfg(@mut self) -> ast::crate_cfg { self.cfg }
fn call_site(@mut self) -> span {
match self.backtrace { match self.backtrace {
Some(@ExpandedFrom({call_site: cs, _})) => cs, Some(@ExpandedFrom({call_site: cs, _})) => cs,
None => self.bug(~"missing top span") None => self.bug(~"missing top span")
} }
} }
fn print_backtrace() { } fn print_backtrace(@mut self) { }
fn backtrace() -> Option<@ExpnInfo> { self.backtrace } fn backtrace(@mut self) -> Option<@ExpnInfo> { self.backtrace }
fn mod_push(i: ast::ident) { self.mod_path.push(i); } fn mod_push(@mut self, i: ast::ident) { self.mod_path.push(i); }
fn mod_pop() { self.mod_path.pop(); } fn mod_pop(@mut self) { self.mod_path.pop(); }
fn mod_path() -> ~[ast::ident] { return self.mod_path; } fn mod_path(@mut self) -> ~[ast::ident] { return self.mod_path; }
fn bt_push(ei: codemap::ExpnInfo) { fn bt_push(@mut self, ei: codemap::ExpnInfo) {
match ei { match ei {
ExpandedFrom({call_site: cs, callie: ref callie}) => { ExpandedFrom({call_site: cs, callie: ref callie}) => {
self.backtrace = self.backtrace =
@ -219,7 +221,7 @@ pub fn mk_ctxt(parse_sess: parse::parse_sess,
} }
} }
} }
fn bt_pop() { fn bt_pop(@mut self) {
match self.backtrace { match self.backtrace {
Some(@ExpandedFrom({ Some(@ExpandedFrom({
call_site: span {expn_info: prev, _}, _ call_site: span {expn_info: prev, _}, _
@ -229,55 +231,55 @@ pub fn mk_ctxt(parse_sess: parse::parse_sess,
_ => self.bug(~"tried to pop without a push") _ => self.bug(~"tried to pop without a push")
} }
} }
fn span_fatal(sp: span, msg: &str) -> ! { fn span_fatal(@mut self, sp: span, msg: &str) -> ! {
self.print_backtrace(); self.print_backtrace();
self.parse_sess.span_diagnostic.span_fatal(sp, msg); self.parse_sess.span_diagnostic.span_fatal(sp, msg);
} }
fn span_err(sp: span, msg: &str) { fn span_err(@mut self, sp: span, msg: &str) {
self.print_backtrace(); self.print_backtrace();
self.parse_sess.span_diagnostic.span_err(sp, msg); self.parse_sess.span_diagnostic.span_err(sp, msg);
} }
fn span_warn(sp: span, msg: &str) { fn span_warn(@mut self, sp: span, msg: &str) {
self.print_backtrace(); self.print_backtrace();
self.parse_sess.span_diagnostic.span_warn(sp, msg); self.parse_sess.span_diagnostic.span_warn(sp, msg);
} }
fn span_unimpl(sp: span, msg: &str) -> ! { fn span_unimpl(@mut self, sp: span, msg: &str) -> ! {
self.print_backtrace(); self.print_backtrace();
self.parse_sess.span_diagnostic.span_unimpl(sp, msg); self.parse_sess.span_diagnostic.span_unimpl(sp, msg);
} }
fn span_bug(sp: span, msg: &str) -> ! { fn span_bug(@mut self, sp: span, msg: &str) -> ! {
self.print_backtrace(); self.print_backtrace();
self.parse_sess.span_diagnostic.span_bug(sp, msg); self.parse_sess.span_diagnostic.span_bug(sp, msg);
} }
fn bug(msg: &str) -> ! { fn bug(@mut self, msg: &str) -> ! {
self.print_backtrace(); self.print_backtrace();
self.parse_sess.span_diagnostic.handler().bug(msg); self.parse_sess.span_diagnostic.handler().bug(msg);
} }
fn next_id() -> ast::node_id { fn next_id(@mut self) -> ast::node_id {
return parse::next_node_id(self.parse_sess); return parse::next_node_id(self.parse_sess);
} }
pure fn trace_macros() -> bool { pure fn trace_macros(@mut self) -> bool {
self.trace_mac self.trace_mac
} }
fn set_trace_macros(x: bool) { fn set_trace_macros(@mut self, x: bool) {
self.trace_mac = x self.trace_mac = x
} }
fn str_of(id: ast::ident) -> ~str { fn str_of(@mut self, id: ast::ident) -> ~str {
*self.parse_sess.interner.get(id) *self.parse_sess.interner.get(id)
} }
fn ident_of(st: ~str) -> ast::ident { fn ident_of(@mut self, st: ~str) -> ast::ident {
self.parse_sess.interner.intern(@st) self.parse_sess.interner.intern(@st)
} }
} }
let imp: ctxt_repr = { let imp: @mut CtxtRepr = @mut CtxtRepr {
parse_sess: parse_sess, parse_sess: parse_sess,
cfg: cfg, cfg: cfg,
mut backtrace: None, backtrace: None,
mut mod_path: ~[], mod_path: ~[],
mut trace_mac: false trace_mac: false
}; };
move ((move imp) as ext_ctxt) move ((move imp) as @ext_ctxt)
} }
pub fn expr_to_str(cx: ext_ctxt, expr: @ast::expr, err_msg: ~str) -> ~str { pub fn expr_to_str(cx: ext_ctxt, expr: @ast::expr, err_msg: ~str) -> ~str {

View file

@ -24,80 +24,90 @@ use core::vec;
use std; use std;
use std::oldmap::HashMap; use std::oldmap::HashMap;
enum tt_frame_up { /* to break a circularity */
tt_frame_up(Option<tt_frame>)
}
/* FIXME #2811: figure out how to have a uniquely linked stack, and change to /* FIXME #2811: figure out how to have a uniquely linked stack, and change to
`~` */ `~` */
///an unzipping of `token_tree`s ///an unzipping of `token_tree`s
type tt_frame = @{ struct TtFrame {
readme: ~[ast::token_tree], readme: ~[ast::token_tree],
mut idx: uint, idx: uint,
dotdotdoted: bool, dotdotdoted: bool,
sep: Option<Token>, sep: Option<Token>,
up: tt_frame_up, up: Option<@mut TtFrame>,
}; }
pub type tt_reader = @tt_reader_; pub struct TtReader {
pub type tt_reader_ = {
sp_diag: span_handler, sp_diag: span_handler,
interner: @ident_interner, interner: @ident_interner,
mut cur: tt_frame, cur: @mut TtFrame,
/* for MBE-style macro transcription */ /* for MBE-style macro transcription */
interpolations: std::oldmap::HashMap<ident, @named_match>, interpolations: std::oldmap::HashMap<ident, @named_match>,
mut repeat_idx: ~[uint], repeat_idx: ~[uint],
mut repeat_len: ~[uint], repeat_len: ~[uint],
/* cached: */ /* cached: */
mut cur_tok: Token, cur_tok: Token,
mut cur_span: span cur_span: span
}; }
/** This can do Macro-By-Example transcription. On the other hand, if /** This can do Macro-By-Example transcription. On the other hand, if
* `src` contains no `tt_seq`s and `tt_nonterminal`s, `interp` can (and * `src` contains no `tt_seq`s and `tt_nonterminal`s, `interp` can (and
* should) be none. */ * should) be none. */
pub fn new_tt_reader(sp_diag: span_handler, itr: @ident_interner, pub fn new_tt_reader(sp_diag: span_handler,
itr: @ident_interner,
interp: Option<std::oldmap::HashMap<ident,@named_match>>, interp: Option<std::oldmap::HashMap<ident,@named_match>>,
src: ~[ast::token_tree]) src: ~[ast::token_tree])
-> tt_reader { -> @mut TtReader {
let r = @{sp_diag: sp_diag, interner: itr, let r = @mut TtReader {
mut cur: @{readme: src, mut idx: 0u, dotdotdoted: false, sp_diag: sp_diag,
sep: None, up: tt_frame_up(option::None)}, interner: itr,
mut cur: @mut TtFrame {
readme: src,
idx: 0u,
dotdotdoted: false,
sep: None,
up: option::None
},
interpolations: match interp { /* just a convienience */ interpolations: match interp { /* just a convienience */
None => std::oldmap::HashMap(), None => std::oldmap::HashMap(),
Some(x) => x Some(x) => x
}, },
mut repeat_idx: ~[], repeat_idx: ~[],
mut repeat_len: ~[], repeat_len: ~[],
/* dummy values, never read: */ /* dummy values, never read: */
mut cur_tok: EOF, cur_tok: EOF,
mut cur_span: dummy_sp() cur_span: dummy_sp()
}; };
tt_next_token(r); /* get cur_tok and cur_span set up */ tt_next_token(r); /* get cur_tok and cur_span set up */
return r; return r;
} }
pure fn dup_tt_frame(&&f: tt_frame) -> tt_frame { pure fn dup_tt_frame(f: @mut TtFrame) -> @mut TtFrame {
@{readme: f.readme, mut idx: f.idx, dotdotdoted: f.dotdotdoted, @mut TtFrame {
sep: f.sep, up: match f.up { readme: f.readme,
tt_frame_up(Some(up_frame)) => { idx: f.idx,
tt_frame_up(Some(dup_tt_frame(up_frame))) dotdotdoted: f.dotdotdoted,
} sep: f.sep,
tt_frame_up(none) => tt_frame_up(none) up: match f.up {
Some(up_frame) => Some(dup_tt_frame(up_frame)),
None => None
} }
} }
} }
pub pure fn dup_tt_reader(r: &tt_reader_) -> tt_reader { pub pure fn dup_tt_reader(r: @mut TtReader) -> @mut TtReader {
@{sp_diag: r.sp_diag, interner: r.interner, @mut TtReader {
mut cur: dup_tt_frame(r.cur), sp_diag: r.sp_diag,
interner: r.interner,
cur: dup_tt_frame(r.cur),
interpolations: r.interpolations, interpolations: r.interpolations,
mut repeat_idx: copy r.repeat_idx, mut repeat_len: copy r.repeat_len, repeat_idx: copy r.repeat_idx,
mut cur_tok: r.cur_tok, mut cur_span: r.cur_span} repeat_len: copy r.repeat_len,
cur_tok: r.cur_tok,
cur_span: r.cur_span
}
} }
pure fn lookup_cur_matched_by_matched(r: &tt_reader_, pure fn lookup_cur_matched_by_matched(r: @mut TtReader,
start: @named_match) -> @named_match { start: @named_match) -> @named_match {
pure fn red(+ad: @named_match, idx: &uint) -> @named_match { pure fn red(+ad: @named_match, idx: &uint) -> @named_match {
match *ad { match *ad {
@ -111,15 +121,15 @@ pure fn lookup_cur_matched_by_matched(r: &tt_reader_,
vec::foldl(start, r.repeat_idx, red) vec::foldl(start, r.repeat_idx, red)
} }
fn lookup_cur_matched(r: &tt_reader_, name: ident) -> @named_match { fn lookup_cur_matched(r: @mut TtReader, name: ident) -> @named_match {
lookup_cur_matched_by_matched(r, r.interpolations.get(&name)) lookup_cur_matched_by_matched(r, r.interpolations.get(&name))
} }
enum lis { enum lis {
lis_unconstrained, lis_constraint(uint, ident), lis_contradiction(~str) lis_unconstrained, lis_constraint(uint, ident), lis_contradiction(~str)
} }
fn lockstep_iter_size(t: token_tree, r: &tt_reader_) -> lis { fn lockstep_iter_size(t: token_tree, r: @mut TtReader) -> lis {
fn lis_merge(lhs: lis, rhs: lis, r: &tt_reader_) -> lis { fn lis_merge(lhs: lis, rhs: lis, r: @mut TtReader) -> lis {
match lhs { match lhs {
lis_unconstrained => rhs, lis_unconstrained => rhs,
lis_contradiction(_) => lhs, lis_contradiction(_) => lhs,
@ -151,7 +161,7 @@ fn lockstep_iter_size(t: token_tree, r: &tt_reader_) -> lis {
} }
pub fn tt_next_token(r: &tt_reader_) -> TokenAndSpan { pub fn tt_next_token(r: @mut TtReader) -> TokenAndSpan {
let ret_val = TokenAndSpan { tok: r.cur_tok, sp: r.cur_span }; let ret_val = TokenAndSpan { tok: r.cur_tok, sp: r.cur_span };
while r.cur.idx >= r.cur.readme.len() { while r.cur.idx >= r.cur.readme.len() {
/* done with this set; pop or repeat? */ /* done with this set; pop or repeat? */
@ -159,11 +169,11 @@ pub fn tt_next_token(r: &tt_reader_) -> TokenAndSpan {
|| r.repeat_idx.last() == r.repeat_len.last() - 1 { || r.repeat_idx.last() == r.repeat_len.last() - 1 {
match r.cur.up { match r.cur.up {
tt_frame_up(None) => { None => {
r.cur_tok = EOF; r.cur_tok = EOF;
return ret_val; return ret_val;
} }
tt_frame_up(Some(tt_f)) => { Some(tt_f) => {
if r.cur.dotdotdoted { if r.cur.dotdotdoted {
r.repeat_idx.pop(); r.repeat_idx.pop();
r.repeat_len.pop(); r.repeat_len.pop();
@ -178,8 +188,8 @@ pub fn tt_next_token(r: &tt_reader_) -> TokenAndSpan {
r.cur.idx = 0u; r.cur.idx = 0u;
r.repeat_idx[r.repeat_idx.len() - 1u] += 1u; r.repeat_idx[r.repeat_idx.len() - 1u] += 1u;
match r.cur.sep { match r.cur.sep {
Some(ref tk) => { Some(tk) => {
r.cur_tok = (*tk); /* repeat same span, I guess */ r.cur_tok = tk; /* repeat same span, I guess */
return ret_val; return ret_val;
} }
None => () None => ()
@ -189,18 +199,23 @@ pub fn tt_next_token(r: &tt_reader_) -> TokenAndSpan {
loop { /* because it's easiest, this handles `tt_delim` not starting loop { /* because it's easiest, this handles `tt_delim` not starting
with a `tt_tok`, even though it won't happen */ with a `tt_tok`, even though it won't happen */
match r.cur.readme[r.cur.idx] { match r.cur.readme[r.cur.idx] {
tt_delim(ref tts) => { tt_delim(copy tts) => {
r.cur = @{readme: (*tts), mut idx: 0u, dotdotdoted: false, r.cur = @mut TtFrame {
sep: None, up: tt_frame_up(option::Some(r.cur)) }; readme: tts,
idx: 0u,
dotdotdoted: false,
sep: None,
up: option::Some(r.cur)
};
// if this could be 0-length, we'd need to potentially recur here // if this could be 0-length, we'd need to potentially recur here
} }
tt_tok(sp, ref tok) => { tt_tok(sp, copy tok) => {
r.cur_span = sp; r.cur_tok = (*tok); r.cur_span = sp; r.cur_tok = tok;
r.cur.idx += 1u; r.cur.idx += 1u;
return ret_val; return ret_val;
} }
tt_seq(sp, ref tts, ref sep, zerok) => { tt_seq(sp, copy tts, copy sep, zerok) => {
match lockstep_iter_size(tt_seq(sp, (*tts), (*sep), zerok), r) { match lockstep_iter_size(tt_seq(sp, tts, sep, zerok), r) {
lis_unconstrained => { lis_unconstrained => {
r.sp_diag.span_fatal( r.sp_diag.span_fatal(
sp, /* blame macro writer */ sp, /* blame macro writer */
@ -226,12 +241,12 @@ pub fn tt_next_token(r: &tt_reader_) -> TokenAndSpan {
} else { } else {
r.repeat_len.push(len); r.repeat_len.push(len);
r.repeat_idx.push(0u); r.repeat_idx.push(0u);
r.cur = @{ r.cur = @mut TtFrame {
readme: (*tts), readme: tts,
mut idx: 0u, idx: 0u,
dotdotdoted: true, dotdotdoted: true,
sep: (*sep), sep: sep,
up: tt_frame_up(option::Some(r.cur)) up: option::Some(r.cur)
}; };
} }
} }

View file

@ -14,7 +14,7 @@ use ast;
use codemap::{BytePos, CharPos, CodeMap, FileMap, Pos}; use codemap::{BytePos, CharPos, CodeMap, FileMap, Pos};
use diagnostic; use diagnostic;
use parse::lexer::{is_whitespace, get_str_from, reader}; use parse::lexer::{is_whitespace, get_str_from, reader};
use parse::lexer::{string_reader, bump, is_eof, nextch, TokenAndSpan}; use parse::lexer::{StringReader, bump, is_eof, nextch, TokenAndSpan};
use parse::lexer; use parse::lexer;
use parse::token; use parse::token;
use parse; use parse;
@ -120,7 +120,7 @@ pub fn strip_doc_comment_decoration(comment: ~str) -> ~str {
die!(~"not a doc-comment: " + comment); die!(~"not a doc-comment: " + comment);
} }
fn read_to_eol(rdr: string_reader) -> ~str { fn read_to_eol(rdr: @mut StringReader) -> ~str {
let mut val = ~""; let mut val = ~"";
while rdr.curr != '\n' && !is_eof(rdr) { while rdr.curr != '\n' && !is_eof(rdr) {
str::push_char(&mut val, rdr.curr); str::push_char(&mut val, rdr.curr);
@ -130,26 +130,26 @@ fn read_to_eol(rdr: string_reader) -> ~str {
return val; return val;
} }
fn read_one_line_comment(rdr: string_reader) -> ~str { fn read_one_line_comment(rdr: @mut StringReader) -> ~str {
let val = read_to_eol(rdr); let val = read_to_eol(rdr);
assert ((val[0] == '/' as u8 && val[1] == '/' as u8) || assert ((val[0] == '/' as u8 && val[1] == '/' as u8) ||
(val[0] == '#' as u8 && val[1] == '!' as u8)); (val[0] == '#' as u8 && val[1] == '!' as u8));
return val; return val;
} }
fn consume_non_eol_whitespace(rdr: string_reader) { fn consume_non_eol_whitespace(rdr: @mut StringReader) {
while is_whitespace(rdr.curr) && rdr.curr != '\n' && !is_eof(rdr) { while is_whitespace(rdr.curr) && rdr.curr != '\n' && !is_eof(rdr) {
bump(rdr); bump(rdr);
} }
} }
fn push_blank_line_comment(rdr: string_reader, comments: &mut ~[cmnt]) { fn push_blank_line_comment(rdr: @mut StringReader, comments: &mut ~[cmnt]) {
debug!(">>> blank-line comment"); debug!(">>> blank-line comment");
let v: ~[~str] = ~[]; let v: ~[~str] = ~[];
comments.push({style: blank_line, lines: v, pos: rdr.last_pos}); comments.push({style: blank_line, lines: v, pos: rdr.last_pos});
} }
fn consume_whitespace_counting_blank_lines(rdr: string_reader, fn consume_whitespace_counting_blank_lines(rdr: @mut StringReader,
comments: &mut ~[cmnt]) { comments: &mut ~[cmnt]) {
while is_whitespace(rdr.curr) && !is_eof(rdr) { while is_whitespace(rdr.curr) && !is_eof(rdr) {
if rdr.col == CharPos(0u) && rdr.curr == '\n' { if rdr.col == CharPos(0u) && rdr.curr == '\n' {
@ -160,7 +160,7 @@ fn consume_whitespace_counting_blank_lines(rdr: string_reader,
} }
fn read_shebang_comment(rdr: string_reader, code_to_the_left: bool, fn read_shebang_comment(rdr: @mut StringReader, code_to_the_left: bool,
comments: &mut ~[cmnt]) { comments: &mut ~[cmnt]) {
debug!(">>> shebang comment"); debug!(">>> shebang comment");
let p = rdr.last_pos; let p = rdr.last_pos;
@ -172,7 +172,7 @@ fn read_shebang_comment(rdr: string_reader, code_to_the_left: bool,
}); });
} }
fn read_line_comments(rdr: string_reader, code_to_the_left: bool, fn read_line_comments(rdr: @mut StringReader, code_to_the_left: bool,
comments: &mut ~[cmnt]) { comments: &mut ~[cmnt]) {
debug!(">>> line comments"); debug!(">>> line comments");
let p = rdr.last_pos; let p = rdr.last_pos;
@ -221,7 +221,8 @@ fn trim_whitespace_prefix_and_push_line(lines: &mut ~[~str],
lines.push(s1); lines.push(s1);
} }
fn read_block_comment(rdr: string_reader, code_to_the_left: bool, fn read_block_comment(rdr: @mut StringReader,
code_to_the_left: bool,
comments: &mut ~[cmnt]) { comments: &mut ~[cmnt]) {
debug!(">>> block comment"); debug!(">>> block comment");
let p = rdr.last_pos; let p = rdr.last_pos;
@ -280,13 +281,14 @@ fn read_block_comment(rdr: string_reader, code_to_the_left: bool,
comments.push({style: style, lines: lines, pos: p}); comments.push({style: style, lines: lines, pos: p});
} }
fn peeking_at_comment(rdr: string_reader) -> bool { fn peeking_at_comment(rdr: @mut StringReader) -> bool {
return ((rdr.curr == '/' && nextch(rdr) == '/') || return ((rdr.curr == '/' && nextch(rdr) == '/') ||
(rdr.curr == '/' && nextch(rdr) == '*')) || (rdr.curr == '/' && nextch(rdr) == '*')) ||
(rdr.curr == '#' && nextch(rdr) == '!'); (rdr.curr == '#' && nextch(rdr) == '!');
} }
fn consume_comment(rdr: string_reader, code_to_the_left: bool, fn consume_comment(rdr: @mut StringReader,
code_to_the_left: bool,
comments: &mut ~[cmnt]) { comments: &mut ~[cmnt]) {
debug!(">>> consume comment"); debug!(">>> consume comment");
if rdr.curr == '/' && nextch(rdr) == '/' { if rdr.curr == '/' && nextch(rdr) == '/' {
@ -309,8 +311,9 @@ pub fn gather_comments_and_literals(span_diagnostic: diagnostic::span_handler,
let itr = parse::token::mk_fake_ident_interner(); let itr = parse::token::mk_fake_ident_interner();
let cm = CodeMap::new(); let cm = CodeMap::new();
let filemap = cm.new_filemap(path, src); let filemap = cm.new_filemap(path, src);
let rdr = lexer::new_low_level_string_reader( let rdr = lexer::new_low_level_string_reader(span_diagnostic,
span_diagnostic, filemap, itr); filemap,
itr);
let mut comments: ~[cmnt] = ~[]; let mut comments: ~[cmnt] = ~[];
let mut literals: ~[lit] = ~[]; let mut literals: ~[lit] = ~[];

View file

@ -24,45 +24,45 @@ use core::either;
use core::str; use core::str;
use core::u64; use core::u64;
pub use ext::tt::transcribe::{tt_reader, tt_reader_, new_tt_reader}; pub use ext::tt::transcribe::{TtReader, new_tt_reader};
//use std; //use std;
pub trait reader { pub trait reader {
fn is_eof(&self) -> bool; fn is_eof(@mut self) -> bool;
fn next_token(&self) -> TokenAndSpan; fn next_token(@mut self) -> TokenAndSpan;
fn fatal(&self,~str) -> !; fn fatal(@mut self, ~str) -> !;
fn span_diag(&self) -> span_handler; fn span_diag(@mut self) -> span_handler;
pure fn interner(&self) -> @token::ident_interner; pure fn interner(@mut self) -> @token::ident_interner;
fn peek(&self) -> TokenAndSpan; fn peek(@mut self) -> TokenAndSpan;
fn dup(&self) -> reader; fn dup(@mut self) -> reader;
} }
#[deriving_eq] #[deriving_eq]
pub struct TokenAndSpan {tok: token::Token, sp: span} pub struct TokenAndSpan {tok: token::Token, sp: span}
pub type string_reader = @string_reader_; pub struct StringReader {
pub type string_reader_ = {
span_diagnostic: span_handler, span_diagnostic: span_handler,
src: @~str, src: @~str,
// The absolute offset within the codemap of the next character to read // The absolute offset within the codemap of the next character to read
mut pos: BytePos, pos: BytePos,
// The absolute offset within the codemap of the last character read(curr) // The absolute offset within the codemap of the last character read(curr)
mut last_pos: BytePos, last_pos: BytePos,
// The column of the next character to read // The column of the next character to read
mut col: CharPos, col: CharPos,
// The last character to be read // The last character to be read
mut curr: char, curr: char,
filemap: @codemap::FileMap, filemap: @codemap::FileMap,
interner: @token::ident_interner, interner: @token::ident_interner,
/* cached: */ /* cached: */
mut peek_tok: token::Token, peek_tok: token::Token,
mut peek_span: span peek_span: span
}; }
pub fn new_string_reader(span_diagnostic: span_handler, pub fn new_string_reader(span_diagnostic: span_handler,
filemap: @codemap::FileMap, filemap: @codemap::FileMap,
itr: @token::ident_interner) -> string_reader { itr: @token::ident_interner)
-> @mut StringReader {
let r = new_low_level_string_reader(span_diagnostic, filemap, itr); let r = new_low_level_string_reader(span_diagnostic, filemap, itr);
string_advance_token(r); /* fill in peek_* */ string_advance_token(r); /* fill in peek_* */
return r; return r;
@ -72,18 +72,20 @@ pub fn new_string_reader(span_diagnostic: span_handler,
pub fn new_low_level_string_reader(span_diagnostic: span_handler, pub fn new_low_level_string_reader(span_diagnostic: span_handler,
filemap: @codemap::FileMap, filemap: @codemap::FileMap,
itr: @token::ident_interner) itr: @token::ident_interner)
-> string_reader { -> @mut StringReader {
// Force the initial reader bump to start on a fresh line // Force the initial reader bump to start on a fresh line
let initial_char = '\n'; let initial_char = '\n';
let r = @{span_diagnostic: span_diagnostic, src: filemap.src, let r = @mut StringReader {
mut pos: filemap.start_pos, span_diagnostic: span_diagnostic, src: filemap.src,
mut last_pos: filemap.start_pos, pos: filemap.start_pos,
mut col: CharPos(0), last_pos: filemap.start_pos,
mut curr: initial_char, col: CharPos(0),
curr: initial_char,
filemap: filemap, interner: itr, filemap: filemap, interner: itr,
/* dummy values; not read */ /* dummy values; not read */
mut peek_tok: token::EOF, peek_tok: token::EOF,
mut peek_span: codemap::dummy_sp()}; peek_span: codemap::dummy_sp()
};
bump(r); bump(r);
return r; return r;
} }
@ -91,58 +93,56 @@ pub fn new_low_level_string_reader(span_diagnostic: span_handler,
// duplicating the string reader is probably a bad idea, in // duplicating the string reader is probably a bad idea, in
// that using them will cause interleaved pushes of line // that using them will cause interleaved pushes of line
// offsets to the underlying filemap... // offsets to the underlying filemap...
fn dup_string_reader(r: &string_reader_) -> string_reader { fn dup_string_reader(r: @mut StringReader) -> @mut StringReader {
@{span_diagnostic: r.span_diagnostic, src: r.src, @mut StringReader {
mut pos: r.pos, span_diagnostic: r.span_diagnostic,
mut last_pos: r.last_pos, src: r.src,
mut col: r.col, mut curr: r.curr, pos: r.pos,
filemap: r.filemap, interner: r.interner, last_pos: r.last_pos,
mut peek_tok: r.peek_tok, mut peek_span: r.peek_span} col: r.col,
curr: r.curr,
filemap: r.filemap,
interner: r.interner,
peek_tok: r.peek_tok,
peek_span: r.peek_span
}
} }
impl string_reader_: reader { impl StringReader: reader {
fn is_eof(&self) -> bool { is_eof(self) } fn is_eof(@mut self) -> bool { is_eof(self) }
// return the next token. EFFECT: advances the string_reader. // return the next token. EFFECT: advances the string_reader.
fn next_token(&self) -> TokenAndSpan { fn next_token(@mut self) -> TokenAndSpan {
let ret_val = TokenAndSpan {tok: self.peek_tok, sp: self.peek_span}; let ret_val = TokenAndSpan {tok: self.peek_tok, sp: self.peek_span};
string_advance_token(self); string_advance_token(self);
return ret_val; return ret_val;
} }
fn fatal(&self, m: ~str) -> ! { fn fatal(@mut self, m: ~str) -> ! {
self.span_diagnostic.span_fatal(copy self.peek_span, m) self.span_diagnostic.span_fatal(copy self.peek_span, m)
} }
fn span_diag(&self) -> span_handler { self.span_diagnostic } fn span_diag(@mut self) -> span_handler { self.span_diagnostic }
pure fn interner(&self) -> @token::ident_interner { self.interner } pure fn interner(@mut self) -> @token::ident_interner { self.interner }
fn peek(&self) -> TokenAndSpan { fn peek(@mut self) -> TokenAndSpan {
TokenAndSpan {tok: self.peek_tok, sp: self.peek_span} TokenAndSpan {tok: self.peek_tok, sp: self.peek_span}
} }
fn dup(&self) -> reader { dup_string_reader(self) as reader } fn dup(@mut self) -> reader { dup_string_reader(self) as reader }
} }
pub impl tt_reader_: reader { pub impl TtReader: reader {
fn is_eof(&self) -> bool { self.cur_tok == token::EOF } fn is_eof(@mut self) -> bool { self.cur_tok == token::EOF }
fn next_token(&self) -> TokenAndSpan { fn next_token(@mut self) -> TokenAndSpan { tt_next_token(self) }
/* weird resolve bug: if the following `if`, or any of its fn fatal(@mut self, m: ~str) -> ! {
statements are removed, we get resolution errors */
if false {
let _ignore_me = 0;
let _me_too = self.cur.readme[self.cur.idx];
}
tt_next_token(self)
}
fn fatal(&self, m: ~str) -> ! {
self.sp_diag.span_fatal(copy self.cur_span, m); self.sp_diag.span_fatal(copy self.cur_span, m);
} }
fn span_diag(&self) -> span_handler { self.sp_diag } fn span_diag(@mut self) -> span_handler { self.sp_diag }
pure fn interner(&self) -> @token::ident_interner { self.interner } pure fn interner(@mut self) -> @token::ident_interner { self.interner }
fn peek(&self) -> TokenAndSpan { fn peek(@mut self) -> TokenAndSpan {
TokenAndSpan { tok: self.cur_tok, sp: self.cur_span } TokenAndSpan { tok: self.cur_tok, sp: self.cur_span }
} }
fn dup(&self) -> reader { dup_tt_reader(self) as reader } fn dup(@mut self) -> reader { dup_tt_reader(self) as reader }
} }
// EFFECT: advance peek_tok and peek_span to refer to the next token. // EFFECT: advance peek_tok and peek_span to refer to the next token.
fn string_advance_token(r: &string_reader_) { fn string_advance_token(r: @mut StringReader) {
match (consume_whitespace_and_comments(r)) { match (consume_whitespace_and_comments(r)) {
Some(comment) => { Some(comment) => {
r.peek_tok = comment.tok; r.peek_tok = comment.tok;
@ -160,11 +160,11 @@ fn string_advance_token(r: &string_reader_) {
} }
} }
fn byte_offset(rdr: &string_reader_) -> BytePos { fn byte_offset(rdr: @mut StringReader) -> BytePos {
(rdr.pos - rdr.filemap.start_pos) (rdr.pos - rdr.filemap.start_pos)
} }
pub fn get_str_from(rdr: &string_reader_, start: BytePos) -> ~str { pub fn get_str_from(rdr: @mut StringReader, start: BytePos) -> ~str {
unsafe { unsafe {
// I'm pretty skeptical about this subtraction. What if there's a // I'm pretty skeptical about this subtraction. What if there's a
// multi-byte character before the mark? // multi-byte character before the mark?
@ -175,7 +175,7 @@ pub fn get_str_from(rdr: &string_reader_, start: BytePos) -> ~str {
// EFFECT: advance the StringReader by one character. If a newline is // EFFECT: advance the StringReader by one character. If a newline is
// discovered, add it to the FileMap's list of line start offsets. // discovered, add it to the FileMap's list of line start offsets.
pub fn bump(rdr: &string_reader_) { pub fn bump(rdr: @mut StringReader) {
rdr.last_pos = rdr.pos; rdr.last_pos = rdr.pos;
let current_byte_offset = byte_offset(rdr).to_uint();; let current_byte_offset = byte_offset(rdr).to_uint();;
if current_byte_offset < (*rdr.src).len() { if current_byte_offset < (*rdr.src).len() {
@ -199,10 +199,10 @@ pub fn bump(rdr: &string_reader_) {
rdr.curr = -1 as char; rdr.curr = -1 as char;
} }
} }
pub fn is_eof(rdr: &string_reader_) -> bool { pub fn is_eof(rdr: @mut StringReader) -> bool {
rdr.curr == -1 as char rdr.curr == -1 as char
} }
pub fn nextch(rdr: &string_reader_) -> char { pub fn nextch(rdr: @mut StringReader) -> char {
let offset = byte_offset(rdr).to_uint(); let offset = byte_offset(rdr).to_uint();
if offset < (*rdr.src).len() { if offset < (*rdr.src).len() {
return str::char_at(*rdr.src, offset); return str::char_at(*rdr.src, offset);
@ -247,7 +247,7 @@ fn is_bin_digit(c: char) -> bool { return c == '0' || c == '1'; }
// EFFECT: eats whitespace and comments. // EFFECT: eats whitespace and comments.
// returns a Some(sugared-doc-attr) if one exists, None otherwise. // returns a Some(sugared-doc-attr) if one exists, None otherwise.
fn consume_whitespace_and_comments(rdr: &string_reader_) fn consume_whitespace_and_comments(rdr: @mut StringReader)
-> Option<TokenAndSpan> { -> Option<TokenAndSpan> {
while is_whitespace(rdr.curr) { bump(rdr); } while is_whitespace(rdr.curr) { bump(rdr); }
return consume_any_line_comment(rdr); return consume_any_line_comment(rdr);
@ -256,7 +256,7 @@ fn consume_whitespace_and_comments(rdr: &string_reader_)
// PRECONDITION: rdr.curr is not whitespace // PRECONDITION: rdr.curr is not whitespace
// EFFECT: eats any kind of comment. // EFFECT: eats any kind of comment.
// returns a Some(sugared-doc-attr) if one exists, None otherwise // returns a Some(sugared-doc-attr) if one exists, None otherwise
fn consume_any_line_comment(rdr: &string_reader_) fn consume_any_line_comment(rdr: @mut StringReader)
-> Option<TokenAndSpan> { -> Option<TokenAndSpan> {
if rdr.curr == '/' { if rdr.curr == '/' {
match nextch(rdr) { match nextch(rdr) {
@ -299,9 +299,8 @@ fn consume_any_line_comment(rdr: &string_reader_)
} }
// might return a sugared-doc-attr // might return a sugared-doc-attr
fn consume_block_comment(rdr: &string_reader_) fn consume_block_comment(rdr: @mut StringReader)
-> Option<TokenAndSpan> { -> Option<TokenAndSpan> {
// block comments starting with "/**" or "/*!" are doc-comments // block comments starting with "/**" or "/*!" are doc-comments
if rdr.curr == '*' || rdr.curr == '!' { if rdr.curr == '*' || rdr.curr == '!' {
let start_bpos = rdr.pos - BytePos(2u); let start_bpos = rdr.pos - BytePos(2u);
@ -338,7 +337,7 @@ fn consume_block_comment(rdr: &string_reader_)
return consume_whitespace_and_comments(rdr); return consume_whitespace_and_comments(rdr);
} }
fn scan_exponent(rdr: &string_reader_) -> Option<~str> { fn scan_exponent(rdr: @mut StringReader) -> Option<~str> {
let mut c = rdr.curr; let mut c = rdr.curr;
let mut rslt = ~""; let mut rslt = ~"";
if c == 'e' || c == 'E' { if c == 'e' || c == 'E' {
@ -356,7 +355,7 @@ fn scan_exponent(rdr: &string_reader_) -> Option<~str> {
} else { return None::<~str>; } } else { return None::<~str>; }
} }
fn scan_digits(rdr: &string_reader_, radix: uint) -> ~str { fn scan_digits(rdr: @mut StringReader, radix: uint) -> ~str {
let mut rslt = ~""; let mut rslt = ~"";
loop { loop {
let c = rdr.curr; let c = rdr.curr;
@ -371,7 +370,7 @@ fn scan_digits(rdr: &string_reader_, radix: uint) -> ~str {
}; };
} }
fn scan_number(c: char, rdr: &string_reader_) -> token::Token { fn scan_number(c: char, rdr: @mut StringReader) -> token::Token {
let mut num_str, base = 10u, c = c, n = nextch(rdr); let mut num_str, base = 10u, c = c, n = nextch(rdr);
if c == '0' && n == 'x' { if c == '0' && n == 'x' {
bump(rdr); bump(rdr);
@ -487,7 +486,7 @@ fn scan_number(c: char, rdr: &string_reader_) -> token::Token {
} }
} }
fn scan_numeric_escape(rdr: &string_reader_, n_hex_digits: uint) -> char { fn scan_numeric_escape(rdr: @mut StringReader, n_hex_digits: uint) -> char {
let mut accum_int = 0, i = n_hex_digits; let mut accum_int = 0, i = n_hex_digits;
while i != 0u { while i != 0u {
let n = rdr.curr; let n = rdr.curr;
@ -502,7 +501,7 @@ fn scan_numeric_escape(rdr: &string_reader_, n_hex_digits: uint) -> char {
return accum_int as char; return accum_int as char;
} }
fn next_token_inner(rdr: &string_reader_) -> token::Token { fn next_token_inner(rdr: @mut StringReader) -> token::Token {
let mut accum_str = ~""; let mut accum_str = ~"";
let mut c = rdr.curr; let mut c = rdr.curr;
if (c >= 'a' && c <= 'z') if (c >= 'a' && c <= 'z')
@ -527,7 +526,7 @@ fn next_token_inner(rdr: &string_reader_) -> token::Token {
if is_dec_digit(c) { if is_dec_digit(c) {
return scan_number(c, rdr); return scan_number(c, rdr);
} }
fn binop(rdr: &string_reader_, op: token::binop) -> token::Token { fn binop(rdr: @mut StringReader, op: token::binop) -> token::Token {
bump(rdr); bump(rdr);
if rdr.curr == '=' { if rdr.curr == '=' {
bump(rdr); bump(rdr);
@ -720,7 +719,7 @@ fn next_token_inner(rdr: &string_reader_) -> token::Token {
} }
} }
fn consume_whitespace(rdr: &string_reader_) { fn consume_whitespace(rdr: @mut StringReader) {
while is_whitespace(rdr.curr) && !is_eof(rdr) { bump(rdr); } while is_whitespace(rdr.curr) && !is_eof(rdr) { bump(rdr); }
} }

View file

@ -15,9 +15,9 @@ use ast::node_id;
use ast; use ast;
use codemap::{span, CodeMap, FileMap, CharPos, BytePos}; use codemap::{span, CodeMap, FileMap, CharPos, BytePos};
use codemap; use codemap;
use diagnostic::{span_handler, mk_span_handler, mk_handler, emitter}; use diagnostic::{span_handler, mk_span_handler, mk_handler, Emitter};
use parse::attr::parser_attr; use parse::attr::parser_attr;
use parse::lexer::{reader, string_reader}; use parse::lexer::{reader, StringReader};
use parse::parser::Parser; use parse::parser::Parser;
use parse::token::{ident_interner, mk_ident_interner}; use parse::token::{ident_interner, mk_ident_interner};
use util::interner; use util::interner;
@ -54,7 +54,7 @@ pub type parse_sess = @{
interner: @ident_interner, interner: @ident_interner,
}; };
pub fn new_parse_sess(demitter: Option<emitter>) -> parse_sess { pub fn new_parse_sess(demitter: Option<Emitter>) -> parse_sess {
let cm = @CodeMap::new(); let cm = @CodeMap::new();
return @{cm: cm, return @{cm: cm,
mut next_id: 1, mut next_id: 1,
@ -166,18 +166,22 @@ pub fn new_parser_from_source_str(sess: parse_sess, cfg: ast::crate_cfg,
+name: ~str, +ss: codemap::FileSubstr, +name: ~str, +ss: codemap::FileSubstr,
source: @~str) -> Parser { source: @~str) -> Parser {
let filemap = sess.cm.new_filemap_w_substr(name, ss, source); let filemap = sess.cm.new_filemap_w_substr(name, ss, source);
let srdr = lexer::new_string_reader(sess.span_diagnostic, filemap, let srdr = lexer::new_string_reader(sess.span_diagnostic,
filemap,
sess.interner); sess.interner);
return Parser(sess, cfg, srdr as reader); return Parser(sess, cfg, srdr as reader);
} }
pub fn new_parser_from_file(sess: parse_sess, cfg: ast::crate_cfg, pub fn new_parser_from_file(sess: parse_sess,
path: &Path) -> Result<Parser, ~str> { cfg: ast::crate_cfg,
path: &Path)
-> Result<Parser, ~str> {
match io::read_whole_file_str(path) { match io::read_whole_file_str(path) {
result::Ok(move src) => { result::Ok(move src) => {
let filemap = sess.cm.new_filemap(path.to_str(), @move src); let filemap = sess.cm.new_filemap(path.to_str(), @move src);
let srdr = lexer::new_string_reader(sess.span_diagnostic, filemap, let srdr = lexer::new_string_reader(sess.span_diagnostic,
filemap,
sess.interner); sess.interner);
Ok(Parser(sess, cfg, srdr as reader)) Ok(Parser(sess, cfg, srdr as reader))

View file

@ -32,7 +32,7 @@ use core::vec;
* I am implementing this algorithm because it comes with 20 pages of * I am implementing this algorithm because it comes with 20 pages of
* documentation explaining its theory, and because it addresses the set of * documentation explaining its theory, and because it addresses the set of
* concerns I've seen other pretty-printers fall down on. Weirdly. Even though * concerns I've seen other pretty-printers fall down on. Weirdly. Even though
* it's 32 years old and not written in Haskell. What can I say? * it's 32 years old. What can I say?
* *
* Despite some redundancies and quirks in the way it's implemented in that * Despite some redundancies and quirks in the way it's implemented in that
* paper, I've opted to keep the implementation here as similar as I can, * paper, I've opted to keep the implementation here as similar as I can,
@ -69,20 +69,9 @@ use core::vec;
* line (which it can't) and so naturally place the content on its own line to * line (which it can't) and so naturally place the content on its own line to
* avoid combining it with other lines and making matters even worse. * avoid combining it with other lines and making matters even worse.
*/ */
#[deriving_eq]
pub enum breaks { consistent, inconsistent, } pub enum breaks { consistent, inconsistent, }
pub impl breaks : cmp::Eq {
pure fn eq(&self, other: &breaks) -> bool {
match ((*self), (*other)) {
(consistent, consistent) => true,
(inconsistent, inconsistent) => true,
(consistent, _) => false,
(inconsistent, _) => false,
}
}
pure fn ne(&self, other: &breaks) -> bool { !(*self).eq(other) }
}
pub type break_t = {offset: int, blank_space: int}; pub type break_t = {offset: int, blank_space: int};
pub type begin_t = {offset: int, breaks: breaks}; pub type begin_t = {offset: int, breaks: breaks};
@ -96,11 +85,11 @@ pub enum token {
} }
pub impl token { pub impl token {
fn is_eof() -> bool { fn is_eof(&self) -> bool {
match self { EOF => true, _ => false } match *self { EOF => true, _ => false }
} }
fn is_hardbreak_tok() -> bool { fn is_hardbreak_tok(&self) -> bool {
match self { match *self {
BREAK({offset: 0, blank_space: bs }) if bs == size_infinity => BREAK({offset: 0, blank_space: bs }) if bs == size_infinity =>
true, true,
_ => _ =>
@ -143,7 +132,7 @@ pub type print_stack_elt = {offset: int, pbreak: print_stack_break};
pub const size_infinity: int = 0xffff; pub const size_infinity: int = 0xffff;
pub fn mk_printer(out: io::Writer, linewidth: uint) -> printer { pub fn mk_printer(out: @io::Writer, linewidth: uint) -> @mut Printer {
// Yes 3, it makes the ring buffers big enough to never // Yes 3, it makes the ring buffers big enough to never
// fall behind. // fall behind.
let n: uint = 3 * linewidth; let n: uint = 3 * linewidth;
@ -151,22 +140,24 @@ pub fn mk_printer(out: io::Writer, linewidth: uint) -> printer {
let mut token: ~[token] = vec::from_elem(n, EOF); let mut token: ~[token] = vec::from_elem(n, EOF);
let mut size: ~[int] = vec::from_elem(n, 0); let mut size: ~[int] = vec::from_elem(n, 0);
let mut scan_stack: ~[uint] = vec::from_elem(n, 0u); let mut scan_stack: ~[uint] = vec::from_elem(n, 0u);
printer_(@{out: out, @mut Printer {
out: @out,
buf_len: n, buf_len: n,
mut margin: linewidth as int, margin: linewidth as int,
mut space: linewidth as int, space: linewidth as int,
mut left: 0, left: 0,
mut right: 0, right: 0,
mut token: move token, token: move token,
mut size: move size, size: move size,
mut left_total: 0, left_total: 0,
mut right_total: 0, right_total: 0,
mut scan_stack: move scan_stack, scan_stack: move scan_stack,
mut scan_stack_empty: true, scan_stack_empty: true,
mut top: 0, top: 0,
mut bottom: 0, bottom: 0,
print_stack: DVec(), print_stack: @mut ~[],
mut pending_indentation: 0 }) pending_indentation: 0
}
} }
@ -247,42 +238,38 @@ pub fn mk_printer(out: io::Writer, linewidth: uint) -> printer {
* the method called 'pretty_print', and the 'PRINT' process is the method * the method called 'pretty_print', and the 'PRINT' process is the method
* called 'print'. * called 'print'.
*/ */
pub type printer_ = { pub struct Printer {
out: io::Writer, out: @@io::Writer,
buf_len: uint, buf_len: uint,
mut margin: int, // width of lines we're constrained to margin: int, // width of lines we're constrained to
mut space: int, // number of spaces left on line space: int, // number of spaces left on line
mut left: uint, // index of left side of input stream left: uint, // index of left side of input stream
mut right: uint, // index of right side of input stream right: uint, // index of right side of input stream
mut token: ~[token], // ring-buffr stream goes through token: ~[token], // ring-buffr stream goes through
mut size: ~[int], // ring-buffer of calculated sizes size: ~[int], // ring-buffer of calculated sizes
mut left_total: int, // running size of stream "...left" left_total: int, // running size of stream "...left"
mut right_total: int, // running size of stream "...right" right_total: int, // running size of stream "...right"
// pseudo-stack, really a ring too. Holds the // pseudo-stack, really a ring too. Holds the
// primary-ring-buffers index of the BEGIN that started the // primary-ring-buffers index of the BEGIN that started the
// current block, possibly with the most recent BREAK after that // current block, possibly with the most recent BREAK after that
// BEGIN (if there is any) on top of it. Stuff is flushed off the // BEGIN (if there is any) on top of it. Stuff is flushed off the
// bottom as it becomes irrelevant due to the primary ring-buffer // bottom as it becomes irrelevant due to the primary ring-buffer
// advancing. // advancing.
mut scan_stack: ~[uint], scan_stack: ~[uint],
mut scan_stack_empty: bool, // top==bottom disambiguator scan_stack_empty: bool, // top==bottom disambiguator
mut top: uint, // index of top of scan_stack top: uint, // index of top of scan_stack
mut bottom: uint, // index of bottom of scan_stack bottom: uint, // index of bottom of scan_stack
// stack of blocks-in-progress being flushed by print // stack of blocks-in-progress being flushed by print
print_stack: DVec<print_stack_elt>, print_stack: @mut ~[print_stack_elt],
// buffered indentation to avoid writing trailing whitespace // buffered indentation to avoid writing trailing whitespace
mut pending_indentation: int, pending_indentation: int,
};
pub enum printer {
printer_(@printer_)
} }
pub impl printer { pub impl Printer {
fn last_token() -> token { self.token[self.right] } fn last_token(&mut self) -> token { self.token[self.right] }
// be very careful with this! // be very careful with this!
fn replace_last_token(t: token) { self.token[self.right] = t; } fn replace_last_token(&mut self, t: token) { self.token[self.right] = t; }
fn pretty_print(t: token) { fn pretty_print(&mut self, t: token) {
debug!("pp ~[%u,%u]", self.left, self.right); debug!("pp ~[%u,%u]", self.left, self.right);
match t { match t {
EOF => { EOF => {
@ -350,7 +337,7 @@ pub impl printer {
} }
} }
} }
fn check_stream() { fn check_stream(&mut self) {
debug!("check_stream ~[%u, %u] with left_total=%d, right_total=%d", debug!("check_stream ~[%u, %u] with left_total=%d, right_total=%d",
self.left, self.right, self.left_total, self.right_total); self.left, self.right, self.left_total, self.right_total);
if self.right_total - self.left_total > self.space { if self.right_total - self.left_total > self.space {
@ -366,7 +353,7 @@ pub impl printer {
if self.left != self.right { self.check_stream(); } if self.left != self.right { self.check_stream(); }
} }
} }
fn scan_push(x: uint) { fn scan_push(&mut self, x: uint) {
debug!("scan_push %u", x); debug!("scan_push %u", x);
if self.scan_stack_empty { if self.scan_stack_empty {
self.scan_stack_empty = false; self.scan_stack_empty = false;
@ -377,7 +364,7 @@ pub impl printer {
} }
self.scan_stack[self.top] = x; self.scan_stack[self.top] = x;
} }
fn scan_pop() -> uint { fn scan_pop(&mut self) -> uint {
assert (!self.scan_stack_empty); assert (!self.scan_stack_empty);
let x = self.scan_stack[self.top]; let x = self.scan_stack[self.top];
if self.top == self.bottom { if self.top == self.bottom {
@ -385,11 +372,11 @@ pub impl printer {
} else { self.top += self.buf_len - 1u; self.top %= self.buf_len; } } else { self.top += self.buf_len - 1u; self.top %= self.buf_len; }
return x; return x;
} }
fn scan_top() -> uint { fn scan_top(&mut self) -> uint {
assert (!self.scan_stack_empty); assert (!self.scan_stack_empty);
return self.scan_stack[self.top]; return self.scan_stack[self.top];
} }
fn scan_pop_bottom() -> uint { fn scan_pop_bottom(&mut self) -> uint {
assert (!self.scan_stack_empty); assert (!self.scan_stack_empty);
let x = self.scan_stack[self.bottom]; let x = self.scan_stack[self.bottom];
if self.top == self.bottom { if self.top == self.bottom {
@ -397,12 +384,12 @@ pub impl printer {
} else { self.bottom += 1u; self.bottom %= self.buf_len; } } else { self.bottom += 1u; self.bottom %= self.buf_len; }
return x; return x;
} }
fn advance_right() { fn advance_right(&mut self) {
self.right += 1u; self.right += 1u;
self.right %= self.buf_len; self.right %= self.buf_len;
assert (self.right != self.left); assert (self.right != self.left);
} }
fn advance_left(++x: token, L: int) { fn advance_left(&mut self, ++x: token, L: int) {
debug!("advnce_left ~[%u,%u], sizeof(%u)=%d", self.left, self.right, debug!("advnce_left ~[%u,%u], sizeof(%u)=%d", self.left, self.right,
self.left, L); self.left, L);
if L >= 0 { if L >= 0 {
@ -420,7 +407,7 @@ pub impl printer {
} }
} }
} }
fn check_stack(k: int) { fn check_stack(&mut self, k: int) {
if !self.scan_stack_empty { if !self.scan_stack_empty {
let x = self.scan_top(); let x = self.scan_top();
match copy self.token[x] { match copy self.token[x] {
@ -443,17 +430,17 @@ pub impl printer {
} }
} }
} }
fn print_newline(amount: int) { fn print_newline(&mut self, amount: int) {
debug!("NEWLINE %d", amount); debug!("NEWLINE %d", amount);
self.out.write_str(~"\n"); (*self.out).write_str(~"\n");
self.pending_indentation = 0; self.pending_indentation = 0;
self.indent(amount); self.indent(amount);
} }
fn indent(amount: int) { fn indent(&mut self, amount: int) {
debug!("INDENT %d", amount); debug!("INDENT %d", amount);
self.pending_indentation += amount; self.pending_indentation += amount;
} }
fn get_top() -> print_stack_elt { fn get_top(&mut self) -> print_stack_elt {
let n = self.print_stack.len(); let n = self.print_stack.len();
if n != 0u { if n != 0u {
self.print_stack[n - 1u] self.print_stack[n - 1u]
@ -461,14 +448,14 @@ pub impl printer {
{offset: 0, pbreak: broken(inconsistent)} {offset: 0, pbreak: broken(inconsistent)}
} }
} }
fn print_str(s: ~str) { fn print_str(&mut self, s: ~str) {
while self.pending_indentation > 0 { while self.pending_indentation > 0 {
self.out.write_str(~" "); (*self.out).write_str(~" ");
self.pending_indentation -= 1; self.pending_indentation -= 1;
} }
self.out.write_str(s); (*self.out).write_str(s);
} }
fn print(x: token, L: int) { fn print(&mut self, x: token, L: int) {
debug!("print %s %d (remaining line space=%d)", tok_str(x), L, debug!("print %s %d (remaining line space=%d)", tok_str(x), L,
self.space); self.space);
log(debug, buf_str(copy self.token, log(debug, buf_str(copy self.token,
@ -539,39 +526,41 @@ pub impl printer {
} }
// Convenience functions to talk to the printer. // Convenience functions to talk to the printer.
pub fn box(p: printer, indent: uint, b: breaks) { pub fn box(p: @mut Printer, indent: uint, b: breaks) {
p.pretty_print(BEGIN({offset: indent as int, breaks: b})); p.pretty_print(BEGIN({offset: indent as int, breaks: b}));
} }
pub fn ibox(p: printer, indent: uint) { box(p, indent, inconsistent); } pub fn ibox(p: @mut Printer, indent: uint) { box(p, indent, inconsistent); }
pub fn cbox(p: printer, indent: uint) { box(p, indent, consistent); } pub fn cbox(p: @mut Printer, indent: uint) { box(p, indent, consistent); }
pub fn break_offset(p: printer, n: uint, off: int) { pub fn break_offset(p: @mut Printer, n: uint, off: int) {
p.pretty_print(BREAK({offset: off, blank_space: n as int})); p.pretty_print(BREAK({offset: off, blank_space: n as int}));
} }
pub fn end(p: printer) { p.pretty_print(END); } pub fn end(p: @mut Printer) { p.pretty_print(END); }
pub fn eof(p: printer) { p.pretty_print(EOF); } pub fn eof(p: @mut Printer) { p.pretty_print(EOF); }
pub fn word(p: printer, wrd: ~str) { pub fn word(p: @mut Printer, wrd: ~str) {
p.pretty_print(STRING(@wrd, str::len(wrd) as int)); p.pretty_print(STRING(@wrd, str::len(wrd) as int));
} }
pub fn huge_word(p: printer, wrd: ~str) { pub fn huge_word(p: @mut Printer, wrd: ~str) {
p.pretty_print(STRING(@wrd, size_infinity)); p.pretty_print(STRING(@wrd, size_infinity));
} }
pub fn zero_word(p: printer, wrd: ~str) { p.pretty_print(STRING(@wrd, 0)); } pub fn zero_word(p: @mut Printer, wrd: ~str) {
p.pretty_print(STRING(@wrd, 0));
}
pub fn spaces(p: printer, n: uint) { break_offset(p, n, 0); } pub fn spaces(p: @mut Printer, n: uint) { break_offset(p, n, 0); }
pub fn zerobreak(p: printer) { spaces(p, 0u); } pub fn zerobreak(p: @mut Printer) { spaces(p, 0u); }
pub fn space(p: printer) { spaces(p, 1u); } pub fn space(p: @mut Printer) { spaces(p, 1u); }
pub fn hardbreak(p: printer) { spaces(p, size_infinity as uint); } pub fn hardbreak(p: @mut Printer) { spaces(p, size_infinity as uint); }
pub fn hardbreak_tok_offset(off: int) -> token { pub fn hardbreak_tok_offset(off: int) -> token {
return BREAK({offset: off, blank_space: size_infinity}); return BREAK({offset: off, blank_space: size_infinity});

View file

@ -23,7 +23,7 @@ use parse::classify::{stmt_ends_with_semi};
use parse::token::ident_interner; use parse::token::ident_interner;
use parse::{comments, lexer, token}; use parse::{comments, lexer, token};
use parse; use parse;
use print::pp::{break_offset, word, printer, space, zerobreak, hardbreak}; use print::pp::{break_offset, word, Printer, space, zerobreak, hardbreak};
use print::pp::{breaks, consistent, inconsistent, eof}; use print::pp::{breaks, consistent, inconsistent, eof};
use print::pp; use print::pp;
use print::pprust; use print::pprust;
@ -37,12 +37,12 @@ use core::str;
use core::u64; use core::u64;
use core::vec; use core::vec;
// The ps is stored here to prevent recursive type. // The @ps is stored here to prevent recursive type.
pub enum ann_node { pub enum ann_node {
node_block(ps, ast::blk), node_block(@ps, ast::blk),
node_item(ps, @ast::item), node_item(@ps, @ast::item),
node_expr(ps, @ast::expr), node_expr(@ps, @ast::expr),
node_pat(ps, @ast::pat), node_pat(@ps, @ast::pat),
} }
pub struct pp_ann { pub struct pp_ann {
pre: fn@(ann_node), pre: fn@(ann_node),
@ -54,37 +54,46 @@ pub fn no_ann() -> pp_ann {
return pp_ann {pre: ignore, post: ignore}; return pp_ann {pre: ignore, post: ignore};
} }
pub type ps = pub struct CurrentCommentAndLiteral {
@{s: pp::printer, cur_cmnt: uint,
cur_lit: uint,
}
pub struct ps {
s: @mut pp::Printer,
cm: Option<@CodeMap>, cm: Option<@CodeMap>,
intr: @token::ident_interner, intr: @token::ident_interner,
comments: Option<~[comments::cmnt]>, comments: Option<~[comments::cmnt]>,
literals: Option<~[comments::lit]>, literals: Option<~[comments::lit]>,
mut cur_cmnt: uint, cur_cmnt_and_lit: @mut CurrentCommentAndLiteral,
mut cur_lit: uint,
boxes: DVec<pp::breaks>, boxes: DVec<pp::breaks>,
ann: pp_ann}; ann: pp_ann
}
pub fn ibox(s: ps, u: uint) { pub fn ibox(s: @ps, u: uint) {
s.boxes.push(pp::inconsistent); s.boxes.push(pp::inconsistent);
pp::ibox(s.s, u); pp::ibox(s.s, u);
} }
pub fn end(s: ps) { pub fn end(s: @ps) {
s.boxes.pop(); s.boxes.pop();
pp::end(s.s); pp::end(s.s);
} }
pub fn rust_printer(writer: io::Writer, intr: @ident_interner) -> ps { pub fn rust_printer(writer: io::Writer, intr: @ident_interner) -> @ps {
return @{s: pp::mk_printer(writer, default_columns), return @ps {
s: pp::mk_printer(writer, default_columns),
cm: None::<@CodeMap>, cm: None::<@CodeMap>,
intr: intr, intr: intr,
comments: None::<~[comments::cmnt]>, comments: None::<~[comments::cmnt]>,
literals: None::<~[comments::lit]>, literals: None::<~[comments::lit]>,
mut cur_cmnt: 0u, cur_cmnt_and_lit: @mut CurrentCommentAndLiteral {
mut cur_lit: 0u, cur_cmnt: 0,
cur_lit: 0
},
boxes: DVec(), boxes: DVec(),
ann: no_ann()}; ann: no_ann()
};
} }
pub const indent_unit: uint = 4u; pub const indent_unit: uint = 4u;
@ -101,8 +110,8 @@ pub fn print_crate(cm: @CodeMap, intr: @ident_interner,
out: io::Writer, ann: pp_ann, is_expanded: bool) { out: io::Writer, ann: pp_ann, is_expanded: bool) {
let r = comments::gather_comments_and_literals(span_diagnostic, let r = comments::gather_comments_and_literals(span_diagnostic,
filename, in); filename, in);
let s = let s = @ps {
@{s: pp::mk_printer(out, default_columns), s: pp::mk_printer(out, default_columns),
cm: Some(cm), cm: Some(cm),
intr: intr, intr: intr,
comments: Some(r.cmnts), comments: Some(r.cmnts),
@ -110,14 +119,17 @@ pub fn print_crate(cm: @CodeMap, intr: @ident_interner,
// literals, since it doesn't correspond with the literals // literals, since it doesn't correspond with the literals
// in the AST anymore. // in the AST anymore.
literals: if is_expanded { None } else { Some(r.lits) }, literals: if is_expanded { None } else { Some(r.lits) },
mut cur_cmnt: 0u, cur_cmnt_and_lit: @mut CurrentCommentAndLiteral {
mut cur_lit: 0u, cur_cmnt: 0,
cur_lit: 0
},
boxes: DVec(), boxes: DVec(),
ann: ann}; ann: ann
};
print_crate_(s, crate); print_crate_(s, crate);
} }
pub fn print_crate_(s: ps, &&crate: @ast::crate) { pub fn print_crate_(s: @ps, &&crate: @ast::crate) {
print_mod(s, crate.node.module, crate.node.attrs); print_mod(s, crate.node.module, crate.node.attrs);
print_remaining_comments(s); print_remaining_comments(s);
eof(s.s); eof(s.s);
@ -194,27 +206,27 @@ pub fn variant_to_str(var: ast::variant, intr: @ident_interner) -> ~str {
to_str(var, print_variant, intr) to_str(var, print_variant, intr)
} }
pub fn cbox(s: ps, u: uint) { pub fn cbox(s: @ps, u: uint) {
s.boxes.push(pp::consistent); s.boxes.push(pp::consistent);
pp::cbox(s.s, u); pp::cbox(s.s, u);
} }
pub fn box(s: ps, u: uint, b: pp::breaks) { pub fn box(s: @ps, u: uint, b: pp::breaks) {
s.boxes.push(b); s.boxes.push(b);
pp::box(s.s, u, b); pp::box(s.s, u, b);
} }
pub fn nbsp(s: ps) { word(s.s, ~" "); } pub fn nbsp(s: @ps) { word(s.s, ~" "); }
pub fn word_nbsp(s: ps, w: ~str) { word(s.s, w); nbsp(s); } pub fn word_nbsp(s: @ps, w: ~str) { word(s.s, w); nbsp(s); }
pub fn word_space(s: ps, w: ~str) { word(s.s, w); space(s.s); } pub fn word_space(s: @ps, w: ~str) { word(s.s, w); space(s.s); }
pub fn popen(s: ps) { word(s.s, ~"("); } pub fn popen(s: @ps) { word(s.s, ~"("); }
pub fn pclose(s: ps) { word(s.s, ~")"); } pub fn pclose(s: @ps) { word(s.s, ~")"); }
pub fn head(s: ps, w: ~str) { pub fn head(s: @ps, w: ~str) {
// outer-box is consistent // outer-box is consistent
cbox(s, indent_unit); cbox(s, indent_unit);
// head-box is inconsistent // head-box is inconsistent
@ -225,15 +237,15 @@ pub fn head(s: ps, w: ~str) {
} }
} }
pub fn bopen(s: ps) { pub fn bopen(s: @ps) {
word(s.s, ~"{"); word(s.s, ~"{");
end(s); // close the head-box end(s); // close the head-box
} }
pub fn bclose_(s: ps, span: codemap::span, indented: uint) { pub fn bclose_(s: @ps, span: codemap::span, indented: uint) {
bclose_maybe_open(s, span, indented, true); bclose_maybe_open(s, span, indented, true);
} }
pub fn bclose_maybe_open (s: ps, span: codemap::span, indented: uint, pub fn bclose_maybe_open (s: @ps, span: codemap::span, indented: uint,
close_box: bool) { close_box: bool) {
maybe_print_comment(s, span.hi); maybe_print_comment(s, span.hi);
break_offset_if_not_bol(s, 1u, -(indented as int)); break_offset_if_not_bol(s, 1u, -(indented as int));
@ -242,29 +254,29 @@ pub fn bclose_maybe_open (s: ps, span: codemap::span, indented: uint,
end(s); // close the outer-box end(s); // close the outer-box
} }
} }
pub fn bclose(s: ps, span: codemap::span) { bclose_(s, span, indent_unit); } pub fn bclose(s: @ps, span: codemap::span) { bclose_(s, span, indent_unit); }
pub fn is_begin(s: ps) -> bool { pub fn is_begin(s: @ps) -> bool {
match s.s.last_token() { pp::BEGIN(_) => true, _ => false } match s.s.last_token() { pp::BEGIN(_) => true, _ => false }
} }
pub fn is_end(s: ps) -> bool { pub fn is_end(s: @ps) -> bool {
match s.s.last_token() { pp::END => true, _ => false } match s.s.last_token() { pp::END => true, _ => false }
} }
pub fn is_bol(s: ps) -> bool { pub fn is_bol(s: @ps) -> bool {
return s.s.last_token().is_eof() || s.s.last_token().is_hardbreak_tok(); return s.s.last_token().is_eof() || s.s.last_token().is_hardbreak_tok();
} }
pub fn in_cbox(s: ps) -> bool { pub fn in_cbox(s: @ps) -> bool {
let len = s.boxes.len(); let len = s.boxes.len();
if len == 0u { return false; } if len == 0u { return false; }
return s.boxes[len - 1u] == pp::consistent; return s.boxes[len - 1u] == pp::consistent;
} }
pub fn hardbreak_if_not_bol(s: ps) { if !is_bol(s) { hardbreak(s.s); } } pub fn hardbreak_if_not_bol(s: @ps) { if !is_bol(s) { hardbreak(s.s); } }
pub fn space_if_not_bol(s: ps) { if !is_bol(s) { space(s.s); } } pub fn space_if_not_bol(s: @ps) { if !is_bol(s) { space(s.s); } }
pub fn break_offset_if_not_bol(s: ps, n: uint, off: int) { pub fn break_offset_if_not_bol(s: @ps, n: uint, off: int) {
if !is_bol(s) { if !is_bol(s) {
break_offset(s.s, n, off); break_offset(s.s, n, off);
} else { } else {
@ -279,7 +291,7 @@ pub fn break_offset_if_not_bol(s: ps, n: uint, off: int) {
// Synthesizes a comment that was not textually present in the original source // Synthesizes a comment that was not textually present in the original source
// file. // file.
pub fn synth_comment(s: ps, text: ~str) { pub fn synth_comment(s: @ps, text: ~str) {
word(s.s, ~"/*"); word(s.s, ~"/*");
space(s.s); space(s.s);
word(s.s, text); word(s.s, text);
@ -287,7 +299,7 @@ pub fn synth_comment(s: ps, text: ~str) {
word(s.s, ~"*/"); word(s.s, ~"*/");
} }
pub fn commasep<IN>(s: ps, b: breaks, elts: ~[IN], op: fn(ps, IN)) { pub fn commasep<IN>(s: @ps, b: breaks, elts: ~[IN], op: fn(@ps, IN)) {
box(s, 0u, b); box(s, 0u, b);
let mut first = true; let mut first = true;
for elts.each |elt| { for elts.each |elt| {
@ -298,7 +310,7 @@ pub fn commasep<IN>(s: ps, b: breaks, elts: ~[IN], op: fn(ps, IN)) {
} }
pub fn commasep_cmnt<IN>(s: ps, b: breaks, elts: ~[IN], op: fn(ps, IN), pub fn commasep_cmnt<IN>(s: @ps, b: breaks, elts: ~[IN], op: fn(@ps, IN),
get_span: fn(IN) -> codemap::span) { get_span: fn(IN) -> codemap::span) {
box(s, 0u, b); box(s, 0u, b);
let len = vec::len::<IN>(elts); let len = vec::len::<IN>(elts);
@ -317,12 +329,12 @@ pub fn commasep_cmnt<IN>(s: ps, b: breaks, elts: ~[IN], op: fn(ps, IN),
end(s); end(s);
} }
pub fn commasep_exprs(s: ps, b: breaks, exprs: ~[@ast::expr]) { pub fn commasep_exprs(s: @ps, b: breaks, exprs: ~[@ast::expr]) {
fn expr_span(&&expr: @ast::expr) -> codemap::span { return expr.span; } fn expr_span(&&expr: @ast::expr) -> codemap::span { return expr.span; }
commasep_cmnt(s, b, exprs, print_expr, expr_span); commasep_cmnt(s, b, exprs, print_expr, expr_span);
} }
pub fn print_mod(s: ps, _mod: ast::_mod, attrs: ~[ast::attribute]) { pub fn print_mod(s: @ps, _mod: ast::_mod, attrs: ~[ast::attribute]) {
print_inner_attributes(s, attrs); print_inner_attributes(s, attrs);
for _mod.view_items.each |vitem| { for _mod.view_items.each |vitem| {
print_view_item(s, *vitem); print_view_item(s, *vitem);
@ -330,7 +342,7 @@ pub fn print_mod(s: ps, _mod: ast::_mod, attrs: ~[ast::attribute]) {
for _mod.items.each |item| { print_item(s, *item); } for _mod.items.each |item| { print_item(s, *item); }
} }
pub fn print_foreign_mod(s: ps, nmod: ast::foreign_mod, pub fn print_foreign_mod(s: @ps, nmod: ast::foreign_mod,
attrs: ~[ast::attribute]) { attrs: ~[ast::attribute]) {
print_inner_attributes(s, attrs); print_inner_attributes(s, attrs);
for nmod.view_items.each |vitem| { for nmod.view_items.each |vitem| {
@ -339,7 +351,7 @@ pub fn print_foreign_mod(s: ps, nmod: ast::foreign_mod,
for nmod.items.each |item| { print_foreign_item(s, *item); } for nmod.items.each |item| { print_foreign_item(s, *item); }
} }
pub fn print_region(s: ps, prefix: ~str, region: @ast::region, sep: ~str) { pub fn print_region(s: @ps, prefix: ~str, region: @ast::region, sep: ~str) {
word(s.s, prefix); word(s.s, prefix);
match region.node { match region.node {
ast::re_anon => { ast::re_anon => {
@ -358,11 +370,11 @@ pub fn print_region(s: ps, prefix: ~str, region: @ast::region, sep: ~str) {
word(s.s, sep); word(s.s, sep);
} }
pub fn print_type(s: ps, &&ty: @ast::Ty) { pub fn print_type(s: @ps, &&ty: @ast::Ty) {
print_type_ex(s, ty, false); print_type_ex(s, ty, false);
} }
pub fn print_type_ex(s: ps, &&ty: @ast::Ty, print_colons: bool) { pub fn print_type_ex(s: @ps, &&ty: @ast::Ty, print_colons: bool) {
maybe_print_comment(s, ty.span.lo); maybe_print_comment(s, ty.span.lo);
ibox(s, 0u); ibox(s, 0u);
match ty.node { match ty.node {
@ -387,7 +399,7 @@ pub fn print_type_ex(s: ps, &&ty: @ast::Ty, print_colons: bool) {
} }
ast::ty_rec(ref fields) => { ast::ty_rec(ref fields) => {
word(s.s, ~"{"); word(s.s, ~"{");
fn print_field(s: ps, f: ast::ty_field) { fn print_field(s: @ps, f: ast::ty_field) {
cbox(s, indent_unit); cbox(s, indent_unit);
print_mutability(s, f.node.mt.mutbl); print_mutability(s, f.node.mt.mutbl);
print_ident(s, f.node.ident); print_ident(s, f.node.ident);
@ -438,7 +450,7 @@ pub fn print_type_ex(s: ps, &&ty: @ast::Ty, print_colons: bool) {
end(s); end(s);
} }
pub fn print_foreign_item(s: ps, item: @ast::foreign_item) { pub fn print_foreign_item(s: @ps, item: @ast::foreign_item) {
hardbreak_if_not_bol(s); hardbreak_if_not_bol(s);
maybe_print_comment(s, item.span.lo); maybe_print_comment(s, item.span.lo);
print_outer_attributes(s, item.attrs); print_outer_attributes(s, item.attrs);
@ -462,7 +474,7 @@ pub fn print_foreign_item(s: ps, item: @ast::foreign_item) {
} }
} }
pub fn print_item(s: ps, &&item: @ast::item) { pub fn print_item(s: @ps, &&item: @ast::item) {
hardbreak_if_not_bol(s); hardbreak_if_not_bol(s);
maybe_print_comment(s, item.span.lo); maybe_print_comment(s, item.span.lo);
print_outer_attributes(s, item.attrs); print_outer_attributes(s, item.attrs);
@ -597,7 +609,7 @@ pub fn print_item(s: ps, &&item: @ast::item) {
(s.ann.post)(ann_node); (s.ann.post)(ann_node);
} }
pub fn print_enum_def(s: ps, enum_definition: ast::enum_def, pub fn print_enum_def(s: @ps, enum_definition: ast::enum_def,
params: ~[ast::ty_param], ident: ast::ident, params: ~[ast::ty_param], ident: ast::ident,
span: codemap::span, visibility: ast::visibility) { span: codemap::span, visibility: ast::visibility) {
let mut newtype = let mut newtype =
@ -632,7 +644,7 @@ pub fn print_enum_def(s: ps, enum_definition: ast::enum_def,
} }
} }
pub fn print_variants(s: ps, pub fn print_variants(s: @ps,
variants: ~[ast::variant], variants: ~[ast::variant],
span: codemap::span) { span: codemap::span) {
bopen(s); bopen(s);
@ -665,7 +677,7 @@ pub fn visibility_qualified(vis: ast::visibility, s: ~str) -> ~str {
} }
} }
pub fn print_visibility(s: ps, vis: ast::visibility) { pub fn print_visibility(s: @ps, vis: ast::visibility) {
match vis { match vis {
ast::private | ast::public => ast::private | ast::public =>
word_nbsp(s, visibility_to_str(vis)), word_nbsp(s, visibility_to_str(vis)),
@ -673,7 +685,7 @@ pub fn print_visibility(s: ps, vis: ast::visibility) {
} }
} }
pub fn print_struct(s: ps, pub fn print_struct(s: @ps,
struct_def: @ast::struct_def, struct_def: @ast::struct_def,
tps: ~[ast::ty_param], tps: ~[ast::ty_param],
ident: ast::ident, ident: ast::ident,
@ -742,7 +754,7 @@ pub fn print_struct(s: ps,
/// appropriate macro, transcribe back into the grammar we just parsed from, /// appropriate macro, transcribe back into the grammar we just parsed from,
/// and then pretty-print the resulting AST nodes (so, e.g., we print /// and then pretty-print the resulting AST nodes (so, e.g., we print
/// expression arguments as expressions). It can be done! I think. /// expression arguments as expressions). It can be done! I think.
pub fn print_tt(s: ps, tt: ast::token_tree) { pub fn print_tt(s: @ps, tt: ast::token_tree) {
match tt { match tt {
ast::tt_delim(ref tts) => print_tts(s, *tts), ast::tt_delim(ref tts) => print_tts(s, *tts),
ast::tt_tok(_, ref tk) => { ast::tt_tok(_, ref tk) => {
@ -765,7 +777,7 @@ pub fn print_tt(s: ps, tt: ast::token_tree) {
} }
} }
pub fn print_tts(s: ps, &&tts: &[ast::token_tree]) { pub fn print_tts(s: @ps, &&tts: &[ast::token_tree]) {
ibox(s, 0); ibox(s, 0);
for tts.eachi |i, tt| { for tts.eachi |i, tt| {
if i != 0 { if i != 0 {
@ -776,14 +788,14 @@ pub fn print_tts(s: ps, &&tts: &[ast::token_tree]) {
end(s); end(s);
} }
pub fn print_variant(s: ps, v: ast::variant) { pub fn print_variant(s: @ps, v: ast::variant) {
print_visibility(s, v.node.vis); print_visibility(s, v.node.vis);
match v.node.kind { match v.node.kind {
ast::tuple_variant_kind(args) => { ast::tuple_variant_kind(args) => {
print_ident(s, v.node.name); print_ident(s, v.node.name);
if !args.is_empty() { if !args.is_empty() {
popen(s); popen(s);
fn print_variant_arg(s: ps, arg: ast::variant_arg) { fn print_variant_arg(s: @ps, arg: ast::variant_arg) {
print_type(s, arg.ty); print_type(s, arg.ty);
} }
commasep(s, consistent, args, print_variant_arg); commasep(s, consistent, args, print_variant_arg);
@ -808,7 +820,7 @@ pub fn print_variant(s: ps, v: ast::variant) {
} }
} }
pub fn print_ty_method(s: ps, m: ast::ty_method) { pub fn print_ty_method(s: @ps, m: ast::ty_method) {
hardbreak_if_not_bol(s); hardbreak_if_not_bol(s);
maybe_print_comment(s, m.span.lo); maybe_print_comment(s, m.span.lo);
print_outer_attributes(s, m.attrs); print_outer_attributes(s, m.attrs);
@ -818,14 +830,14 @@ pub fn print_ty_method(s: ps, m: ast::ty_method) {
word(s.s, ~";"); word(s.s, ~";");
} }
pub fn print_trait_method(s: ps, m: ast::trait_method) { pub fn print_trait_method(s: @ps, m: ast::trait_method) {
match m { match m {
required(ref ty_m) => print_ty_method(s, (*ty_m)), required(ref ty_m) => print_ty_method(s, (*ty_m)),
provided(m) => print_method(s, m) provided(m) => print_method(s, m)
} }
} }
pub fn print_method(s: ps, meth: @ast::method) { pub fn print_method(s: @ps, meth: @ast::method) {
hardbreak_if_not_bol(s); hardbreak_if_not_bol(s);
maybe_print_comment(s, meth.span.lo); maybe_print_comment(s, meth.span.lo);
print_outer_attributes(s, meth.attrs); print_outer_attributes(s, meth.attrs);
@ -836,7 +848,7 @@ pub fn print_method(s: ps, meth: @ast::method) {
print_block_with_attrs(s, meth.body, meth.attrs); print_block_with_attrs(s, meth.body, meth.attrs);
} }
pub fn print_outer_attributes(s: ps, attrs: ~[ast::attribute]) { pub fn print_outer_attributes(s: @ps, attrs: ~[ast::attribute]) {
let mut count = 0; let mut count = 0;
for attrs.each |attr| { for attrs.each |attr| {
match attr.node.style { match attr.node.style {
@ -847,7 +859,7 @@ pub fn print_outer_attributes(s: ps, attrs: ~[ast::attribute]) {
if count > 0 { hardbreak_if_not_bol(s); } if count > 0 { hardbreak_if_not_bol(s); }
} }
pub fn print_inner_attributes(s: ps, attrs: ~[ast::attribute]) { pub fn print_inner_attributes(s: @ps, attrs: ~[ast::attribute]) {
let mut count = 0; let mut count = 0;
for attrs.each |attr| { for attrs.each |attr| {
match attr.node.style { match attr.node.style {
@ -864,7 +876,7 @@ pub fn print_inner_attributes(s: ps, attrs: ~[ast::attribute]) {
if count > 0 { hardbreak_if_not_bol(s); } if count > 0 { hardbreak_if_not_bol(s); }
} }
pub fn print_attribute(s: ps, attr: ast::attribute) { pub fn print_attribute(s: @ps, attr: ast::attribute) {
hardbreak_if_not_bol(s); hardbreak_if_not_bol(s);
maybe_print_comment(s, attr.span.lo); maybe_print_comment(s, attr.span.lo);
if attr.node.is_sugared_doc { if attr.node.is_sugared_doc {
@ -879,7 +891,7 @@ pub fn print_attribute(s: ps, attr: ast::attribute) {
} }
pub fn print_stmt(s: ps, st: ast::stmt) { pub fn print_stmt(s: @ps, st: ast::stmt) {
maybe_print_comment(s, st.span.lo); maybe_print_comment(s, st.span.lo);
match st.node { match st.node {
ast::stmt_decl(decl, _) => { ast::stmt_decl(decl, _) => {
@ -904,21 +916,21 @@ pub fn print_stmt(s: ps, st: ast::stmt) {
maybe_print_trailing_comment(s, st.span, None); maybe_print_trailing_comment(s, st.span, None);
} }
pub fn print_block(s: ps, blk: ast::blk) { pub fn print_block(s: @ps, blk: ast::blk) {
print_possibly_embedded_block(s, blk, block_normal, indent_unit); print_possibly_embedded_block(s, blk, block_normal, indent_unit);
} }
pub fn print_block_unclosed(s: ps, blk: ast::blk) { pub fn print_block_unclosed(s: @ps, blk: ast::blk) {
print_possibly_embedded_block_(s, blk, block_normal, indent_unit, ~[], print_possibly_embedded_block_(s, blk, block_normal, indent_unit, ~[],
false); false);
} }
pub fn print_block_unclosed_indent(s: ps, blk: ast::blk, indented: uint) { pub fn print_block_unclosed_indent(s: @ps, blk: ast::blk, indented: uint) {
print_possibly_embedded_block_(s, blk, block_normal, indented, ~[], print_possibly_embedded_block_(s, blk, block_normal, indented, ~[],
false); false);
} }
pub fn print_block_with_attrs(s: ps, pub fn print_block_with_attrs(s: @ps,
blk: ast::blk, blk: ast::blk,
attrs: ~[ast::attribute]) { attrs: ~[ast::attribute]) {
print_possibly_embedded_block_(s, blk, block_normal, indent_unit, attrs, print_possibly_embedded_block_(s, blk, block_normal, indent_unit, attrs,
@ -927,7 +939,7 @@ pub fn print_block_with_attrs(s: ps,
pub enum embed_type { block_block_fn, block_normal, } pub enum embed_type { block_block_fn, block_normal, }
pub fn print_possibly_embedded_block(s: ps, pub fn print_possibly_embedded_block(s: @ps,
blk: ast::blk, blk: ast::blk,
embedded: embed_type, embedded: embed_type,
indented: uint) { indented: uint) {
@ -935,7 +947,7 @@ pub fn print_possibly_embedded_block(s: ps,
s, blk, embedded, indented, ~[], true); s, blk, embedded, indented, ~[], true);
} }
pub fn print_possibly_embedded_block_(s: ps, pub fn print_possibly_embedded_block_(s: @ps,
blk: ast::blk, blk: ast::blk,
embedded: embed_type, embedded: embed_type,
indented: uint, indented: uint,
@ -971,14 +983,14 @@ pub fn print_possibly_embedded_block_(s: ps,
(s.ann.post)(ann_node); (s.ann.post)(ann_node);
} }
pub fn print_if(s: ps, test: @ast::expr, blk: ast::blk, pub fn print_if(s: @ps, test: @ast::expr, blk: ast::blk,
elseopt: Option<@ast::expr>, chk: bool) { elseopt: Option<@ast::expr>, chk: bool) {
head(s, ~"if"); head(s, ~"if");
if chk { word_nbsp(s, ~"check"); } if chk { word_nbsp(s, ~"check"); }
print_expr(s, test); print_expr(s, test);
space(s.s); space(s.s);
print_block(s, blk); print_block(s, blk);
fn do_else(s: ps, els: Option<@ast::expr>) { fn do_else(s: @ps, els: Option<@ast::expr>) {
match els { match els {
Some(_else) => { Some(_else) => {
match _else.node { match _else.node {
@ -1011,7 +1023,7 @@ pub fn print_if(s: ps, test: @ast::expr, blk: ast::blk,
do_else(s, elseopt); do_else(s, elseopt);
} }
pub fn print_mac(s: ps, m: ast::mac) { pub fn print_mac(s: @ps, m: ast::mac) {
match m.node { match m.node {
ast::mac_invoc_tt(pth, ref tts) => { ast::mac_invoc_tt(pth, ref tts) => {
print_path(s, pth, false); print_path(s, pth, false);
@ -1023,7 +1035,7 @@ pub fn print_mac(s: ps, m: ast::mac) {
} }
} }
pub fn print_vstore(s: ps, t: ast::vstore) { pub fn print_vstore(s: @ps, t: ast::vstore) {
match t { match t {
ast::vstore_fixed(Some(i)) => word(s.s, fmt!("%u", i)), ast::vstore_fixed(Some(i)) => word(s.s, fmt!("%u", i)),
ast::vstore_fixed(None) => word(s.s, ~"_"), ast::vstore_fixed(None) => word(s.s, ~"_"),
@ -1033,7 +1045,7 @@ pub fn print_vstore(s: ps, t: ast::vstore) {
} }
} }
pub fn print_expr_vstore(s: ps, t: ast::expr_vstore) { pub fn print_expr_vstore(s: @ps, t: ast::expr_vstore) {
match t { match t {
ast::expr_vstore_fixed(Some(i)) => word(s.s, fmt!("%u", i)), ast::expr_vstore_fixed(Some(i)) => word(s.s, fmt!("%u", i)),
ast::expr_vstore_fixed(None) => word(s.s, ~"_"), ast::expr_vstore_fixed(None) => word(s.s, ~"_"),
@ -1051,7 +1063,7 @@ pub fn print_expr_vstore(s: ps, t: ast::expr_vstore) {
} }
} }
pub fn print_call_pre(s: ps, pub fn print_call_pre(s: @ps,
sugar: ast::CallSugar, sugar: ast::CallSugar,
base_args: &mut ~[@ast::expr]) base_args: &mut ~[@ast::expr])
-> Option<@ast::expr> { -> Option<@ast::expr> {
@ -1068,7 +1080,7 @@ pub fn print_call_pre(s: ps,
} }
} }
pub fn print_call_post(s: ps, pub fn print_call_post(s: @ps,
sugar: ast::CallSugar, sugar: ast::CallSugar,
blk: &Option<@ast::expr>, blk: &Option<@ast::expr>,
base_args: &mut ~[@ast::expr]) { base_args: &mut ~[@ast::expr]) {
@ -1095,8 +1107,8 @@ pub fn print_call_post(s: ps,
} }
} }
pub fn print_expr(s: ps, &&expr: @ast::expr) { pub fn print_expr(s: @ps, &&expr: @ast::expr) {
fn print_field(s: ps, field: ast::field) { fn print_field(s: @ps, field: ast::field) {
ibox(s, indent_unit); ibox(s, indent_unit);
if field.node.mutbl == ast::m_mutbl { word_nbsp(s, ~"mut"); } if field.node.mutbl == ast::m_mutbl { word_nbsp(s, ~"mut"); }
print_ident(s, field.node.ident); print_ident(s, field.node.ident);
@ -1446,7 +1458,7 @@ pub fn print_expr(s: ps, &&expr: @ast::expr) {
end(s); end(s);
} }
pub fn print_local_decl(s: ps, loc: @ast::local) { pub fn print_local_decl(s: @ps, loc: @ast::local) {
print_irrefutable_pat(s, loc.node.pat); print_irrefutable_pat(s, loc.node.pat);
match loc.node.ty.node { match loc.node.ty.node {
ast::ty_infer => (), ast::ty_infer => (),
@ -1454,7 +1466,7 @@ pub fn print_local_decl(s: ps, loc: @ast::local) {
} }
} }
pub fn print_decl(s: ps, decl: @ast::decl) { pub fn print_decl(s: @ps, decl: @ast::decl) {
maybe_print_comment(s, decl.span.lo); maybe_print_comment(s, decl.span.lo);
match decl.node { match decl.node {
ast::decl_local(locs) => { ast::decl_local(locs) => {
@ -1468,7 +1480,7 @@ pub fn print_decl(s: ps, decl: @ast::decl) {
word_nbsp(s, ~"mut"); word_nbsp(s, ~"mut");
} }
fn print_local(s: ps, &&loc: @ast::local) { fn print_local(s: @ps, &&loc: @ast::local) {
ibox(s, indent_unit); ibox(s, indent_unit);
print_local_decl(s, loc); print_local_decl(s, loc);
end(s); end(s);
@ -1488,18 +1500,18 @@ pub fn print_decl(s: ps, decl: @ast::decl) {
} }
} }
pub fn print_ident(s: ps, ident: ast::ident) { pub fn print_ident(s: @ps, ident: ast::ident) {
word(s.s, *s.intr.get(ident)); word(s.s, *s.intr.get(ident));
} }
pub fn print_for_decl(s: ps, loc: @ast::local, coll: @ast::expr) { pub fn print_for_decl(s: @ps, loc: @ast::local, coll: @ast::expr) {
print_local_decl(s, loc); print_local_decl(s, loc);
space(s.s); space(s.s);
word_space(s, ~"in"); word_space(s, ~"in");
print_expr(s, coll); print_expr(s, coll);
} }
pub fn print_path(s: ps, &&path: @ast::path, colons_before_params: bool) { pub fn print_path(s: @ps, &&path: @ast::path, colons_before_params: bool) {
maybe_print_comment(s, path.span.lo); maybe_print_comment(s, path.span.lo);
if path.global { word(s.s, ~"::"); } if path.global { word(s.s, ~"::"); }
let mut first = true; let mut first = true;
@ -1526,15 +1538,15 @@ pub fn print_path(s: ps, &&path: @ast::path, colons_before_params: bool) {
} }
} }
pub fn print_irrefutable_pat(s: ps, &&pat: @ast::pat) { pub fn print_irrefutable_pat(s: @ps, &&pat: @ast::pat) {
print_pat(s, pat, false) print_pat(s, pat, false)
} }
pub fn print_refutable_pat(s: ps, &&pat: @ast::pat) { pub fn print_refutable_pat(s: @ps, &&pat: @ast::pat) {
print_pat(s, pat, true) print_pat(s, pat, true)
} }
pub fn print_pat(s: ps, &&pat: @ast::pat, refutable: bool) { pub fn print_pat(s: @ps, &&pat: @ast::pat, refutable: bool) {
maybe_print_comment(s, pat.span.lo); maybe_print_comment(s, pat.span.lo);
let ann_node = node_pat(s, pat); let ann_node = node_pat(s, pat);
(s.ann.pre)(ann_node); (s.ann.pre)(ann_node);
@ -1580,7 +1592,7 @@ pub fn print_pat(s: ps, &&pat: @ast::pat, refutable: bool) {
} }
ast::pat_rec(fields, etc) => { ast::pat_rec(fields, etc) => {
word(s.s, ~"{"); word(s.s, ~"{");
fn print_field(s: ps, f: ast::field_pat, refutable: bool) { fn print_field(s: @ps, f: ast::field_pat, refutable: bool) {
cbox(s, indent_unit); cbox(s, indent_unit);
print_ident(s, f.ident); print_ident(s, f.ident);
word_space(s, ~":"); word_space(s, ~":");
@ -1600,7 +1612,7 @@ pub fn print_pat(s: ps, &&pat: @ast::pat, refutable: bool) {
ast::pat_struct(path, fields, etc) => { ast::pat_struct(path, fields, etc) => {
print_path(s, path, true); print_path(s, path, true);
word(s.s, ~"{"); word(s.s, ~"{");
fn print_field(s: ps, f: ast::field_pat, refutable: bool) { fn print_field(s: @ps, f: ast::field_pat, refutable: bool) {
cbox(s, indent_unit); cbox(s, indent_unit);
print_ident(s, f.ident); print_ident(s, f.ident);
word_space(s, ~":"); word_space(s, ~":");
@ -1656,7 +1668,7 @@ pub fn print_pat(s: ps, &&pat: @ast::pat, refutable: bool) {
} }
// Returns whether it printed anything // Returns whether it printed anything
pub fn print_self_ty(s: ps, self_ty: ast::self_ty_) -> bool { pub fn print_self_ty(s: @ps, self_ty: ast::self_ty_) -> bool {
match self_ty { match self_ty {
ast::sty_static | ast::sty_by_ref => { return false; } ast::sty_static | ast::sty_by_ref => { return false; }
ast::sty_value => { word(s.s, ~"self"); } ast::sty_value => { word(s.s, ~"self"); }
@ -1673,7 +1685,7 @@ pub fn print_self_ty(s: ps, self_ty: ast::self_ty_) -> bool {
return true; return true;
} }
pub fn print_fn(s: ps, pub fn print_fn(s: @ps,
decl: ast::fn_decl, decl: ast::fn_decl,
purity: Option<ast::purity>, purity: Option<ast::purity>,
name: ast::ident, name: ast::ident,
@ -1688,7 +1700,7 @@ pub fn print_fn(s: ps,
print_fn_args_and_ret(s, decl, opt_self_ty); print_fn_args_and_ret(s, decl, opt_self_ty);
} }
pub fn print_fn_args(s: ps, decl: ast::fn_decl, pub fn print_fn_args(s: @ps, decl: ast::fn_decl,
opt_self_ty: Option<ast::self_ty_>) { opt_self_ty: Option<ast::self_ty_>) {
// It is unfortunate to duplicate the commasep logic, but we we want the // It is unfortunate to duplicate the commasep logic, but we we want the
// self type and the args all in the same box. // self type and the args all in the same box.
@ -1706,7 +1718,7 @@ pub fn print_fn_args(s: ps, decl: ast::fn_decl,
end(s); end(s);
} }
pub fn print_fn_args_and_ret(s: ps, decl: ast::fn_decl, pub fn print_fn_args_and_ret(s: @ps, decl: ast::fn_decl,
opt_self_ty: Option<ast::self_ty_>) { opt_self_ty: Option<ast::self_ty_>) {
popen(s); popen(s);
print_fn_args(s, decl, opt_self_ty); print_fn_args(s, decl, opt_self_ty);
@ -1723,7 +1735,7 @@ pub fn print_fn_args_and_ret(s: ps, decl: ast::fn_decl,
} }
} }
pub fn print_fn_block_args(s: ps, decl: ast::fn_decl) { pub fn print_fn_block_args(s: @ps, decl: ast::fn_decl) {
word(s.s, ~"|"); word(s.s, ~"|");
print_fn_args(s, decl, None); print_fn_args(s, decl, None);
word(s.s, ~"|"); word(s.s, ~"|");
@ -1749,12 +1761,12 @@ pub fn mode_to_str(m: ast::mode) -> ~str {
} }
} }
pub fn print_arg_mode(s: ps, m: ast::mode) { pub fn print_arg_mode(s: @ps, m: ast::mode) {
let ms = mode_to_str(m); let ms = mode_to_str(m);
if ms != ~"" { word(s.s, ms); } if ms != ~"" { word(s.s, ms); }
} }
pub fn print_bounds(s: ps, bounds: @~[ast::ty_param_bound]) { pub fn print_bounds(s: @ps, bounds: @~[ast::ty_param_bound]) {
if !bounds.is_empty() { if !bounds.is_empty() {
word(s.s, ~":"); word(s.s, ~":");
let mut first = true; let mut first = true;
@ -1774,10 +1786,10 @@ pub fn print_bounds(s: ps, bounds: @~[ast::ty_param_bound]) {
} }
} }
pub fn print_type_params(s: ps, &&params: ~[ast::ty_param]) { pub fn print_type_params(s: @ps, &&params: ~[ast::ty_param]) {
if vec::len(params) > 0u { if vec::len(params) > 0u {
word(s.s, ~"<"); word(s.s, ~"<");
fn printParam(s: ps, param: ast::ty_param) { fn printParam(s: @ps, param: ast::ty_param) {
print_ident(s, param.ident); print_ident(s, param.ident);
print_bounds(s, param.bounds); print_bounds(s, param.bounds);
} }
@ -1786,7 +1798,7 @@ pub fn print_type_params(s: ps, &&params: ~[ast::ty_param]) {
} }
} }
pub fn print_meta_item(s: ps, &&item: @ast::meta_item) { pub fn print_meta_item(s: @ps, &&item: @ast::meta_item) {
ibox(s, indent_unit); ibox(s, indent_unit);
match item.node { match item.node {
ast::meta_word(ref name) => word(s.s, (*name)), ast::meta_word(ref name) => word(s.s, (*name)),
@ -1805,7 +1817,7 @@ pub fn print_meta_item(s: ps, &&item: @ast::meta_item) {
end(s); end(s);
} }
pub fn print_view_path(s: ps, &&vp: @ast::view_path) { pub fn print_view_path(s: @ps, &&vp: @ast::view_path) {
match vp.node { match vp.node {
ast::view_path_simple(ident, path, namespace, _) => { ast::view_path_simple(ident, path, namespace, _) => {
if namespace == ast::module_ns { if namespace == ast::module_ns {
@ -1835,11 +1847,11 @@ pub fn print_view_path(s: ps, &&vp: @ast::view_path) {
} }
} }
pub fn print_view_paths(s: ps, vps: ~[@ast::view_path]) { pub fn print_view_paths(s: @ps, vps: ~[@ast::view_path]) {
commasep(s, inconsistent, vps, print_view_path); commasep(s, inconsistent, vps, print_view_path);
} }
pub fn print_view_item(s: ps, item: @ast::view_item) { pub fn print_view_item(s: @ps, item: @ast::view_item) {
hardbreak_if_not_bol(s); hardbreak_if_not_bol(s);
maybe_print_comment(s, item.span.lo); maybe_print_comment(s, item.span.lo);
print_outer_attributes(s, item.attrs); print_outer_attributes(s, item.attrs);
@ -1865,7 +1877,7 @@ pub fn print_view_item(s: ps, item: @ast::view_item) {
end(s); // end outer head-block end(s); // end outer head-block
} }
pub fn print_mutability(s: ps, mutbl: ast::mutability) { pub fn print_mutability(s: @ps, mutbl: ast::mutability) {
match mutbl { match mutbl {
ast::m_mutbl => word_nbsp(s, ~"mut"), ast::m_mutbl => word_nbsp(s, ~"mut"),
ast::m_const => word_nbsp(s, ~"const"), ast::m_const => word_nbsp(s, ~"const"),
@ -1873,12 +1885,12 @@ pub fn print_mutability(s: ps, mutbl: ast::mutability) {
} }
} }
pub fn print_mt(s: ps, mt: ast::mt) { pub fn print_mt(s: @ps, mt: ast::mt) {
print_mutability(s, mt.mutbl); print_mutability(s, mt.mutbl);
print_type(s, mt.ty); print_type(s, mt.ty);
} }
pub fn print_arg(s: ps, input: ast::arg) { pub fn print_arg(s: @ps, input: ast::arg) {
ibox(s, indent_unit); ibox(s, indent_unit);
print_arg_mode(s, input.mode); print_arg_mode(s, input.mode);
if input.is_mutbl { if input.is_mutbl {
@ -1905,7 +1917,7 @@ pub fn print_arg(s: ps, input: ast::arg) {
end(s); end(s);
} }
pub fn print_ty_fn(s: ps, pub fn print_ty_fn(s: @ps,
opt_abi: Option<ast::Abi>, opt_abi: Option<ast::Abi>,
opt_sigil: Option<ast::Sigil>, opt_sigil: Option<ast::Sigil>,
opt_region: Option<@ast::region>, opt_region: Option<@ast::region>,
@ -1961,7 +1973,7 @@ pub fn print_ty_fn(s: ps,
end(s); end(s);
} }
pub fn maybe_print_trailing_comment(s: ps, span: codemap::span, pub fn maybe_print_trailing_comment(s: @ps, span: codemap::span,
next_pos: Option<BytePos>) { next_pos: Option<BytePos>) {
let mut cm; let mut cm;
match s.cm { Some(ccm) => cm = ccm, _ => return } match s.cm { Some(ccm) => cm = ccm, _ => return }
@ -1975,26 +1987,29 @@ pub fn maybe_print_trailing_comment(s: ps, span: codemap::span,
if span.hi < (*cmnt).pos && (*cmnt).pos < next && if span.hi < (*cmnt).pos && (*cmnt).pos < next &&
span_line.line == comment_line.line { span_line.line == comment_line.line {
print_comment(s, (*cmnt)); print_comment(s, (*cmnt));
s.cur_cmnt += 1u; s.cur_cmnt_and_lit.cur_cmnt += 1u;
} }
} }
_ => () _ => ()
} }
} }
pub fn print_remaining_comments(s: ps) { pub fn print_remaining_comments(s: @ps) {
// If there aren't any remaining comments, then we need to manually // If there aren't any remaining comments, then we need to manually
// make sure there is a line break at the end. // make sure there is a line break at the end.
if next_comment(s).is_none() { hardbreak(s.s); } if next_comment(s).is_none() { hardbreak(s.s); }
loop { loop {
match next_comment(s) { match next_comment(s) {
Some(ref cmnt) => { print_comment(s, (*cmnt)); s.cur_cmnt += 1u; } Some(ref cmnt) => {
print_comment(s, (*cmnt));
s.cur_cmnt_and_lit.cur_cmnt += 1u;
}
_ => break _ => break
} }
} }
} }
pub fn print_literal(s: ps, &&lit: @ast::lit) { pub fn print_literal(s: @ps, &&lit: @ast::lit) {
maybe_print_comment(s, lit.span.lo); maybe_print_comment(s, lit.span.lo);
match next_lit(s, lit.span.lo) { match next_lit(s, lit.span.lo) {
Some(ref ltrl) => { Some(ref ltrl) => {
@ -2046,13 +2061,13 @@ pub fn lit_to_str(l: @ast::lit) -> ~str {
return to_str(l, print_literal, parse::token::mk_fake_ident_interner()); return to_str(l, print_literal, parse::token::mk_fake_ident_interner());
} }
pub fn next_lit(s: ps, pos: BytePos) -> Option<comments::lit> { pub fn next_lit(s: @ps, pos: BytePos) -> Option<comments::lit> {
match s.literals { match s.literals {
Some(ref lits) => { Some(ref lits) => {
while s.cur_lit < vec::len((*lits)) { while s.cur_cmnt_and_lit.cur_lit < vec::len((*lits)) {
let ltrl = (*lits)[s.cur_lit]; let ltrl = (*lits)[s.cur_cmnt_and_lit.cur_lit];
if ltrl.pos > pos { return None; } if ltrl.pos > pos { return None; }
s.cur_lit += 1u; s.cur_cmnt_and_lit.cur_lit += 1u;
if ltrl.pos == pos { return Some(ltrl); } if ltrl.pos == pos { return Some(ltrl); }
} }
return None; return None;
@ -2061,13 +2076,13 @@ pub fn next_lit(s: ps, pos: BytePos) -> Option<comments::lit> {
} }
} }
pub fn maybe_print_comment(s: ps, pos: BytePos) { pub fn maybe_print_comment(s: @ps, pos: BytePos) {
loop { loop {
match next_comment(s) { match next_comment(s) {
Some(ref cmnt) => { Some(ref cmnt) => {
if (*cmnt).pos < pos { if (*cmnt).pos < pos {
print_comment(s, (*cmnt)); print_comment(s, (*cmnt));
s.cur_cmnt += 1u; s.cur_cmnt_and_lit.cur_cmnt += 1u;
} else { break; } } else { break; }
} }
_ => break _ => break
@ -2075,7 +2090,7 @@ pub fn maybe_print_comment(s: ps, pos: BytePos) {
} }
} }
pub fn print_comment(s: ps, cmnt: comments::cmnt) { pub fn print_comment(s: @ps, cmnt: comments::cmnt) {
match cmnt.style { match cmnt.style {
comments::mixed => { comments::mixed => {
assert (vec::len(cmnt.lines) == 1u); assert (vec::len(cmnt.lines) == 1u);
@ -2119,13 +2134,13 @@ pub fn print_comment(s: ps, cmnt: comments::cmnt) {
} }
} }
pub fn print_string(s: ps, st: ~str) { pub fn print_string(s: @ps, st: ~str) {
word(s.s, ~"\""); word(s.s, ~"\"");
word(s.s, str::escape_default(st)); word(s.s, str::escape_default(st));
word(s.s, ~"\""); word(s.s, ~"\"");
} }
pub fn to_str<T>(t: T, f: fn@(ps, T), intr: @ident_interner) -> ~str { pub fn to_str<T>(t: T, f: fn@(@ps, T), intr: @ident_interner) -> ~str {
do io::with_str_writer |wr| { do io::with_str_writer |wr| {
let s = rust_printer(wr, intr); let s = rust_printer(wr, intr);
f(s, t); f(s, t);
@ -2133,18 +2148,18 @@ pub fn to_str<T>(t: T, f: fn@(ps, T), intr: @ident_interner) -> ~str {
} }
} }
pub fn next_comment(s: ps) -> Option<comments::cmnt> { pub fn next_comment(s: @ps) -> Option<comments::cmnt> {
match s.comments { match s.comments {
Some(ref cmnts) => { Some(ref cmnts) => {
if s.cur_cmnt < vec::len((*cmnts)) { if s.cur_cmnt_and_lit.cur_cmnt < vec::len((*cmnts)) {
return Some((*cmnts)[s.cur_cmnt]); return Some((*cmnts)[s.cur_cmnt_and_lit.cur_cmnt]);
} else { return None::<comments::cmnt>; } } else { return None::<comments::cmnt>; }
} }
_ => return None::<comments::cmnt> _ => return None::<comments::cmnt>
} }
} }
pub fn print_self_ty_if_static(s: ps, pub fn print_self_ty_if_static(s: @ps,
opt_self_ty: Option<ast::self_ty_>) { opt_self_ty: Option<ast::self_ty_>) {
match opt_self_ty { match opt_self_ty {
Some(ast::sty_static) => { word(s.s, ~"static "); } Some(ast::sty_static) => { word(s.s, ~"static "); }
@ -2152,7 +2167,7 @@ pub fn print_self_ty_if_static(s: ps,
} }
} }
pub fn print_opt_purity(s: ps, opt_purity: Option<ast::purity>) { pub fn print_opt_purity(s: @ps, opt_purity: Option<ast::purity>) {
match opt_purity { match opt_purity {
Some(ast::impure_fn) => { } Some(ast::impure_fn) => { }
Some(purity) => { Some(purity) => {
@ -2162,14 +2177,14 @@ pub fn print_opt_purity(s: ps, opt_purity: Option<ast::purity>) {
} }
} }
pub fn print_opt_abi(s: ps, opt_abi: Option<ast::Abi>) { pub fn print_opt_abi(s: @ps, opt_abi: Option<ast::Abi>) {
match opt_abi { match opt_abi {
Some(ast::RustAbi) => { word_nbsp(s, ~"extern"); } Some(ast::RustAbi) => { word_nbsp(s, ~"extern"); }
None => {} None => {}
}; };
} }
pub fn print_opt_sigil(s: ps, opt_sigil: Option<ast::Sigil>) { pub fn print_opt_sigil(s: @ps, opt_sigil: Option<ast::Sigil>) {
match opt_sigil { match opt_sigil {
Some(ast::BorrowedSigil) => { word(s.s, ~"&"); } Some(ast::BorrowedSigil) => { word(s.s, ~"&"); }
Some(ast::OwnedSigil) => { word(s.s, ~"~"); } Some(ast::OwnedSigil) => { word(s.s, ~"~"); }
@ -2178,7 +2193,7 @@ pub fn print_opt_sigil(s: ps, opt_sigil: Option<ast::Sigil>) {
}; };
} }
pub fn print_fn_header_info(s: ps, pub fn print_fn_header_info(s: @ps,
opt_sty: Option<ast::self_ty_>, opt_sty: Option<ast::self_ty_>,
opt_purity: Option<ast::purity>, opt_purity: Option<ast::purity>,
onceness: ast::Onceness, onceness: ast::Onceness,
@ -2215,14 +2230,14 @@ pub pure fn onceness_to_str(o: ast::Onceness) -> ~str {
} }
} }
pub fn print_purity(s: ps, p: ast::purity) { pub fn print_purity(s: @ps, p: ast::purity) {
match p { match p {
ast::impure_fn => (), ast::impure_fn => (),
_ => word_nbsp(s, purity_to_str(p)) _ => word_nbsp(s, purity_to_str(p))
} }
} }
pub fn print_onceness(s: ps, o: ast::Onceness) { pub fn print_onceness(s: @ps, o: ast::Onceness) {
match o { match o {
ast::Once => { word_nbsp(s, ~"once"); } ast::Once => { word_nbsp(s, ~"once"); }
ast::Many => {} ast::Many => {}