From bdc1b7a8a71b0168a26e1f897246abc6c6ab1091 Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Tue, 9 Oct 2012 08:36:06 -0700 Subject: [PATCH 01/40] libcore: add a str::with_capacity to match the fn in vec --- src/libcore/str.rs | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/libcore/str.rs b/src/libcore/str.rs index d743f0c7f3be1..33016bf9923e8 100644 --- a/src/libcore/str.rs +++ b/src/libcore/str.rs @@ -1473,6 +1473,11 @@ pub pure fn from_utf16(v: &[u16]) -> ~str { move buf } +pub pure fn with_capacity(capacity: uint) -> ~str { + let mut buf = ~""; + unsafe { reserve(&mut buf, capacity); } + move buf +} /** * As char_len but for a slice of a string From 25096a212a9ccaa0d181630af5971532c3472182 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Philipp=20Br=C3=BCschweiler?= Date: Wed, 10 Oct 2012 22:40:17 +0200 Subject: [PATCH 02/40] rustc: fix size computation of structs for the FFI It didn't take alignment into account. Fixes #3656. --- src/rustc/middle/trans/foreign.rs | 7 ++++--- src/test/run-pass/issue-3656.rs | 20 ++++++++++++++++++++ 2 files changed, 24 insertions(+), 3 deletions(-) create mode 100644 src/test/run-pass/issue-3656.rs diff --git a/src/rustc/middle/trans/foreign.rs b/src/rustc/middle/trans/foreign.rs index 74dadd2cab498..4a87eb57cbe24 100644 --- a/src/rustc/middle/trans/foreign.rs +++ b/src/rustc/middle/trans/foreign.rs @@ -112,9 +112,10 @@ fn classify_ty(ty: TypeRef) -> ~[x86_64_reg_class] { Float => 4, Double => 8, Struct => { - do vec::foldl(0, struct_tys(ty)) |s, t| { - s + ty_size(*t) - } + let size = do vec::foldl(0, struct_tys(ty)) |s, t| { + align(s, *t) + ty_size(*t) + }; + align(size, ty) } Array => { let len = llvm::LLVMGetArrayLength(ty) as uint; diff --git a/src/test/run-pass/issue-3656.rs b/src/test/run-pass/issue-3656.rs new file mode 100644 index 0000000000000..feb7ad1db7a8e --- /dev/null +++ b/src/test/run-pass/issue-3656.rs @@ -0,0 +1,20 @@ +// Issue #3656 +// Incorrect struct size computation in the FFI, because of not taking +// the alignment of elements into account. + +use libc::*; + +struct KEYGEN { + hash_algorithm: [c_uint]/2, + count: uint32_t, + salt: *c_void, + salt_size: uint32_t, +} + +extern { + // Bogus signature, just need to test if it compiles. + pub fn malloc(++data: KEYGEN); +} + +fn main() { +} From 97ecde297e2b49fc6445c233cb95d8cd1c341b2d Mon Sep 17 00:00:00 2001 From: Luca Bruno Date: Mon, 8 Oct 2012 21:40:09 +0200 Subject: [PATCH 03/40] Conditional usage of LLVM DebugFlag DebugFlag is conditionally exported by LLVM in llvm/Support/Debug.h in-between an #ifndef NDEBUG block; RustWrapper should not unconditionally use it. This closes #3701. Signed-off-by: Luca Bruno --- src/rustllvm/RustWrapper.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/rustllvm/RustWrapper.cpp b/src/rustllvm/RustWrapper.cpp index fc2049507eed9..498a4e137f0e3 100644 --- a/src/rustllvm/RustWrapper.cpp +++ b/src/rustllvm/RustWrapper.cpp @@ -493,5 +493,7 @@ extern "C" LLVMValueRef LLVMBuildAtomicRMW(LLVMBuilderRef B, } extern "C" void LLVMSetDebug(int Enabled) { +#ifndef NDEBUG DebugFlag = Enabled; +#endif } From 01aaeef619cc249a6c4555b3c865bb081e5b80c5 Mon Sep 17 00:00:00 2001 From: Luca Bruno Date: Sun, 14 Oct 2012 14:45:21 +0200 Subject: [PATCH 04/40] Use gpgv for signature verification in cargo Parsing gpg output for signature verification is not recommended, as it can break easily (and doesn't work with i18n). This patch makes use of gpgv, as suggested by gpg authors: http://lists.gnupg.org/pipermail/gnupg-users/2004-August/023141.html This closes #3762. Signed-off-by: Luca Bruno --- src/cargo/cargo.rs | 37 ++++++++++++++++++------------------- src/cargo/pgp.rs | 23 +++++++++++------------ 2 files changed, 29 insertions(+), 31 deletions(-) diff --git a/src/cargo/cargo.rs b/src/cargo/cargo.rs index 5f39eb6b960e2..322d780042089 100644 --- a/src/cargo/cargo.rs +++ b/src/cargo/cargo.rs @@ -1162,20 +1162,20 @@ fn sync_one_file(c: &Cargo, dir: &Path, src: @Source) -> bool { } match (src.key, src.keyfp) { (Some(_), Some(f)) => { - let r = pgp::verify(&c.root, &pkgfile, &sigfile, f); + let r = pgp::verify(&c.root, &pkgfile, &sigfile); if !r { - error(fmt!("signature verification failed for source %s", - name)); + error(fmt!("signature verification failed for source %s with key %s", + name, f)); return false; } if has_src_file { - let e = pgp::verify(&c.root, &srcfile, &srcsigfile, f); + let e = pgp::verify(&c.root, &srcfile, &srcsigfile); if !e { - error(fmt!("signature verification failed for source %s", - name)); + error(fmt!("signature verification failed for source %s with key %s", + name, f)); return false; } } @@ -1273,21 +1273,21 @@ fn sync_one_git(c: &Cargo, dir: &Path, src: @Source) -> bool { } match (src.key, src.keyfp) { (Some(_), Some(f)) => { - let r = pgp::verify(&c.root, &pkgfile, &sigfile, f); + let r = pgp::verify(&c.root, &pkgfile, &sigfile); if !r { - error(fmt!("signature verification failed for source %s", - name)); + error(fmt!("signature verification failed for source %s with key %s", + name, f)); rollback(name, dir, false); return false; } if has_src_file { - let e = pgp::verify(&c.root, &srcfile, &srcsigfile, f); + let e = pgp::verify(&c.root, &srcfile, &srcsigfile); if !e { - error(fmt!("signature verification failed for source %s", - name)); + error(fmt!("signature verification failed for source %s with key %s", + name, f)); rollback(name, dir, false); return false; } @@ -1370,11 +1370,11 @@ fn sync_one_curl(c: &Cargo, dir: &Path, src: @Source) -> bool { return false; } - let r = pgp::verify(&c.root, &pkgfile, &sigfile, f); + let r = pgp::verify(&c.root, &pkgfile, &sigfile); if !r { - error(fmt!("signature verification failed for source %s", - name)); + error(fmt!("signature verification failed for source %s with key %s", + name, f)); return false; } @@ -1390,11 +1390,11 @@ fn sync_one_curl(c: &Cargo, dir: &Path, src: @Source) -> bool { return false; } - let e = pgp::verify(&c.root, &srcfile, &srcsigfile, f); + let e = pgp::verify(&c.root, &srcfile, &srcsigfile); if !e { error(~"signature verification failed for " + - ~"source " + name); + ~"source " + name + ~" with key " + f); return false; } } @@ -1463,8 +1463,7 @@ fn cmd_init(c: &Cargo) { return; } - let r = pgp::verify(&c.root, &srcfile, &sigfile, - pgp::signing_key_fp()); + let r = pgp::verify(&c.root, &srcfile, &sigfile); if !r { error(fmt!("signature verification failed for '%s'", srcfile.to_str())); diff --git a/src/cargo/pgp.rs b/src/cargo/pgp.rs index 17cb8dc648789..5fbfa55838c8e 100644 --- a/src/cargo/pgp.rs +++ b/src/cargo/pgp.rs @@ -1,5 +1,5 @@ -fn gpg(args: ~[~str]) -> { status: int, out: ~str, err: ~str } { - return run::program_output(~"gpg", args); +fn gpgv(args: ~[~str]) -> { status: int, out: ~str, err: ~str } { + return run::program_output(~"gpgv", args); } fn signing_key() -> ~str { @@ -59,7 +59,7 @@ fn signing_key_fp() -> ~str { } fn supported() -> bool { - let r = gpg(~[~"--version"]); + let r = gpgv(~[~"--version"]); r.status == 0 } @@ -88,15 +88,14 @@ fn add(root: &Path, key: &Path) { } } -fn verify(root: &Path, data: &Path, sig: &Path, keyfp: ~str) -> bool { +fn verify(root: &Path, data: &Path, sig: &Path) -> bool { let path = root.push("gpg"); - let p = gpg(~[~"--homedir", path.to_str(), - ~"--with-fingerprint", - ~"--verify", sig.to_str(), - data.to_str()]); - let res = ~"Primary key fingerprint: " + keyfp; - for str::split_char_each(p.err, '\n') |line| { - if line == res { return true; } + let res = gpgv(~[~"--homedir", path.to_str(), + ~"--keyring", ~"pubring.gpg", + ~"--verbose", + sig.to_str(), data.to_str()]); + if res.status != 0 { + return false; } - return false; + return true; } From ab89b5c294ac511d4d58809f9f20dfe4f2c8fe52 Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Sat, 13 Oct 2012 09:11:33 -0700 Subject: [PATCH 05/40] libstd: make Serializer a trait-level typaram --- src/libstd/json.rs | 28 ++ src/libstd/serialization.rs | 487 ++++++++++++++++++++++++++++ src/libsyntax/ast.rs | 49 +++ src/libsyntax/ext/auto_serialize.rs | 143 ++++---- src/test/run-pass/auto_serialize.rs | 74 +++-- 5 files changed, 698 insertions(+), 83 deletions(-) diff --git a/src/libstd/json.rs b/src/libstd/json.rs index 0d3391c1867c2..d170255b565d8 100644 --- a/src/libstd/json.rs +++ b/src/libstd/json.rs @@ -273,6 +273,7 @@ pub impl PrettySerializer: serialization::Serializer { } } +#[cfg(stage0)] pub impl Json: serialization::Serializable { fn serialize(&self, s: &S) { match *self { @@ -296,6 +297,33 @@ pub impl Json: serialization::Serializable { } } +#[cfg(stage1)] +#[cfg(stage2)] +pub impl< + S: serialization::Serializer +> Json: serialization::Serializable { + fn serialize(&self, s: &S) { + match *self { + Number(v) => v.serialize(s), + String(ref v) => v.serialize(s), + Boolean(v) => v.serialize(s), + List(v) => v.serialize(s), + Object(ref v) => { + do s.emit_rec || { + let mut idx = 0; + for v.each |key, value| { + do s.emit_field(*key, idx) { + value.serialize(s); + } + idx += 1; + } + } + }, + Null => s.emit_nil(), + } + } +} + /// Serializes a json value into a io::writer pub fn to_writer(wr: io::Writer, json: &Json) { json.serialize(&Serializer(wr)) diff --git a/src/libstd/serialization.rs b/src/libstd/serialization.rs index b7cf09cc6aa50..9df2a326a8474 100644 --- a/src/libstd/serialization.rs +++ b/src/libstd/serialization.rs @@ -92,6 +92,8 @@ pub trait Deserializer { fn read_tup_elt(&self, idx: uint, f: fn() -> T) -> T; } +#[cfg(stage0)] +pub mod traits { pub trait Serializable { fn serialize(&self, s: &S); } @@ -561,3 +563,488 @@ pub impl D: DeserializerHelpers { } } } +} + +#[cfg(stage1)] +#[cfg(stage2)] +pub mod traits { +pub trait Serializable { + fn serialize(&self, s: &S); +} + +pub trait Deserializable { + static fn deserialize(&self, d: &D) -> self; +} + +pub impl uint: Serializable { + fn serialize(&self, s: &S) { s.emit_uint(*self) } +} + +pub impl uint: Deserializable { + static fn deserialize(&self, d: &D) -> uint { + d.read_uint() + } +} + +pub impl u8: Serializable { + fn serialize(&self, s: &S) { s.emit_u8(*self) } +} + +pub impl u8: Deserializable { + static fn deserialize(&self, d: &D) -> u8 { + d.read_u8() + } +} + +pub impl u16: Serializable { + fn serialize(&self, s: &S) { s.emit_u16(*self) } +} + +pub impl u16: Deserializable { + static fn deserialize(&self, d: &D) -> u16 { + d.read_u16() + } +} + +pub impl u32: Serializable { + fn serialize(&self, s: &S) { s.emit_u32(*self) } +} + +pub impl u32: Deserializable { + static fn deserialize(&self, d: &D) -> u32 { + d.read_u32() + } +} + +pub impl u64: Serializable { + fn serialize(&self, s: &S) { s.emit_u64(*self) } +} + +pub impl u64: Deserializable { + static fn deserialize(&self, d: &D) -> u64 { + d.read_u64() + } +} + +pub impl int: Serializable { + fn serialize(&self, s: &S) { s.emit_int(*self) } +} + +pub impl int: Deserializable { + static fn deserialize(&self, d: &D) -> int { + d.read_int() + } +} + +pub impl i8: Serializable { + fn serialize(&self, s: &S) { s.emit_i8(*self) } +} + +pub impl i8: Deserializable { + static fn deserialize(&self, d: &D) -> i8 { + d.read_i8() + } +} + +pub impl i16: Serializable { + fn serialize(&self, s: &S) { s.emit_i16(*self) } +} + +pub impl i16: Deserializable { + static fn deserialize(&self, d: &D) -> i16 { + d.read_i16() + } +} + +pub impl i32: Serializable { + fn serialize(&self, s: &S) { s.emit_i32(*self) } +} + +pub impl i32: Deserializable { + static fn deserialize(&self, d: &D) -> i32 { + d.read_i32() + } +} + +pub impl i64: Serializable { + fn serialize(&self, s: &S) { s.emit_i64(*self) } +} + +pub impl i64: Deserializable { + static fn deserialize(&self, d: &D) -> i64 { + d.read_i64() + } +} + +pub impl &str: Serializable { + fn serialize(&self, s: &S) { s.emit_borrowed_str(*self) } +} + +pub impl ~str: Serializable { + fn serialize(&self, s: &S) { s.emit_owned_str(*self) } +} + +pub impl ~str: Deserializable { + static fn deserialize(&self, d: &D) -> ~str { + d.read_owned_str() + } +} + +pub impl @str: Serializable { + fn serialize(&self, s: &S) { s.emit_managed_str(*self) } +} + +pub impl @str: Deserializable { + static fn deserialize(&self, d: &D) -> @str { + d.read_managed_str() + } +} + +pub impl float: Serializable { + fn serialize(&self, s: &S) { s.emit_float(*self) } +} + +pub impl float: Deserializable { + static fn deserialize(&self, d: &D) -> float { + d.read_float() + } +} + +pub impl f32: Serializable { + fn serialize(&self, s: &S) { s.emit_f32(*self) } +} + +pub impl f32: Deserializable { + static fn deserialize(&self, d: &D) -> f32 { + d.read_f32() } +} + +pub impl f64: Serializable { + fn serialize(&self, s: &S) { s.emit_f64(*self) } +} + +pub impl f64: Deserializable { + static fn deserialize(&self, d: &D) -> f64 { + d.read_f64() + } +} + +pub impl bool: Serializable { + fn serialize(&self, s: &S) { s.emit_bool(*self) } +} + +pub impl bool: Deserializable { + static fn deserialize(&self, d: &D) -> bool { + d.read_bool() + } +} + +pub impl (): Serializable { + fn serialize(&self, s: &S) { s.emit_nil() } +} + +pub impl (): Deserializable { + static fn deserialize(&self, d: &D) -> () { + d.read_nil() + } +} + +pub impl> &T: Serializable { + fn serialize(&self, s: &S) { + s.emit_borrowed(|| (**self).serialize(s)) + } +} + +pub impl> ~T: Serializable { + fn serialize(&self, s: &S) { + s.emit_owned(|| (**self).serialize(s)) + } +} + +pub impl> ~T: Deserializable { + static fn deserialize(&self, d: &D) -> ~T { + d.read_owned(|| ~deserialize(d)) + } +} + +pub impl> @T: Serializable { + fn serialize(&self, s: &S) { + s.emit_managed(|| (**self).serialize(s)) + } +} + +pub impl> @T: Deserializable { + static fn deserialize(&self, d: &D) -> @T { + d.read_managed(|| @deserialize(d)) + } +} + +pub impl> &[T]: Serializable { + fn serialize(&self, s: &S) { + do s.emit_borrowed_vec(self.len()) { + for self.eachi |i, e| { + s.emit_vec_elt(i, || e.serialize(s)) + } + } + } +} + +pub impl> ~[T]: Serializable { + fn serialize(&self, s: &S) { + do s.emit_owned_vec(self.len()) { + for self.eachi |i, e| { + s.emit_vec_elt(i, || e.serialize(s)) + } + } + } +} + +pub impl> ~[T]: Deserializable { + static fn deserialize(&self, d: &D) -> ~[T] { + do d.read_owned_vec |len| { + do vec::from_fn(len) |i| { + d.read_vec_elt(i, || deserialize(d)) + } + } + } +} + +pub impl> @[T]: Serializable { + fn serialize(&self, s: &S) { + do s.emit_managed_vec(self.len()) { + for self.eachi |i, e| { + s.emit_vec_elt(i, || e.serialize(s)) + } + } + } +} + +pub impl> @[T]: Deserializable { + static fn deserialize(&self, d: &D) -> @[T] { + do d.read_managed_vec |len| { + do at_vec::from_fn(len) |i| { + d.read_vec_elt(i, || deserialize(d)) + } + } + } +} + +pub impl> Option: Serializable { + fn serialize(&self, s: &S) { + do s.emit_enum(~"option") { + match *self { + None => do s.emit_enum_variant(~"none", 0u, 0u) { + }, + + Some(ref v) => do s.emit_enum_variant(~"some", 1u, 1u) { + s.emit_enum_variant_arg(0u, || v.serialize(s)) + } + } + } + } +} + +pub impl> Option: Deserializable { + static fn deserialize(&self, d: &D) -> Option { + do d.read_enum(~"option") { + do d.read_enum_variant |i| { + match i { + 0 => None, + 1 => Some(d.read_enum_variant_arg(0u, || deserialize(d))), + _ => fail(#fmt("Bad variant for option: %u", i)) + } + } + } + } +} + +pub impl< + S: Serializer, + T0: Serializable, + T1: Serializable +> (T0, T1): Serializable { + fn serialize(&self, s: &S) { + match *self { + (ref t0, ref t1) => { + do s.emit_tup(2) { + s.emit_tup_elt(0, || t0.serialize(s)); + s.emit_tup_elt(1, || t1.serialize(s)); + } + } + } + } +} + +pub impl< + D: Deserializer, + T0: Deserializable, + T1: Deserializable +> (T0, T1): Deserializable { + static fn deserialize(&self, d: &D) -> (T0, T1) { + do d.read_tup(2) { + ( + d.read_tup_elt(0, || deserialize(d)), + d.read_tup_elt(1, || deserialize(d)) + ) + } + } +} + +pub impl< + S: Serializer, + T0: Serializable, + T1: Serializable, + T2: Serializable +> (T0, T1, T2): Serializable { + fn serialize(&self, s: &S) { + match *self { + (ref t0, ref t1, ref t2) => { + do s.emit_tup(3) { + s.emit_tup_elt(0, || t0.serialize(s)); + s.emit_tup_elt(1, || t1.serialize(s)); + s.emit_tup_elt(2, || t2.serialize(s)); + } + } + } + } +} + +pub impl< + D: Deserializer, + T0: Deserializable, + T1: Deserializable, + T2: Deserializable +> (T0, T1, T2): Deserializable { + static fn deserialize(&self, d: &D) -> (T0, T1, T2) { + do d.read_tup(3) { + ( + d.read_tup_elt(0, || deserialize(d)), + d.read_tup_elt(1, || deserialize(d)), + d.read_tup_elt(2, || deserialize(d)) + ) + } + } +} + +pub impl< + S: Serializer, + T0: Serializable, + T1: Serializable, + T2: Serializable, + T3: Serializable +> (T0, T1, T2, T3): Serializable { + fn serialize(&self, s: &S) { + match *self { + (ref t0, ref t1, ref t2, ref t3) => { + do s.emit_tup(4) { + s.emit_tup_elt(0, || t0.serialize(s)); + s.emit_tup_elt(1, || t1.serialize(s)); + s.emit_tup_elt(2, || t2.serialize(s)); + s.emit_tup_elt(3, || t3.serialize(s)); + } + } + } + } +} + +pub impl< + D: Deserializer, + T0: Deserializable, + T1: Deserializable, + T2: Deserializable, + T3: Deserializable +> (T0, T1, T2, T3): Deserializable { + static fn deserialize(&self, d: &D) -> (T0, T1, T2, T3) { + do d.read_tup(4) { + ( + d.read_tup_elt(0, || deserialize(d)), + d.read_tup_elt(1, || deserialize(d)), + d.read_tup_elt(2, || deserialize(d)), + d.read_tup_elt(3, || deserialize(d)) + ) + } + } +} + +pub impl< + S: Serializer, + T0: Serializable, + T1: Serializable, + T2: Serializable, + T3: Serializable, + T4: Serializable +> (T0, T1, T2, T3, T4): Serializable { + fn serialize(&self, s: &S) { + match *self { + (ref t0, ref t1, ref t2, ref t3, ref t4) => { + do s.emit_tup(5) { + s.emit_tup_elt(0, || t0.serialize(s)); + s.emit_tup_elt(1, || t1.serialize(s)); + s.emit_tup_elt(2, || t2.serialize(s)); + s.emit_tup_elt(3, || t3.serialize(s)); + s.emit_tup_elt(4, || t4.serialize(s)); + } + } + } + } +} + +pub impl< + D: Deserializer, + T0: Deserializable, + T1: Deserializable, + T2: Deserializable, + T3: Deserializable, + T4: Deserializable +> (T0, T1, T2, T3, T4): Deserializable { + static fn deserialize(&self, d: &D) + -> (T0, T1, T2, T3, T4) { + do d.read_tup(5) { + ( + d.read_tup_elt(0, || deserialize(d)), + d.read_tup_elt(1, || deserialize(d)), + d.read_tup_elt(2, || deserialize(d)), + d.read_tup_elt(3, || deserialize(d)), + d.read_tup_elt(4, || deserialize(d)) + ) + } + } +} + +// ___________________________________________________________________________ +// Helper routines +// +// In some cases, these should eventually be coded as traits. + +pub trait SerializerHelpers { + fn emit_from_vec(&self, v: ~[T], f: fn(v: &T)); +} + +pub impl S: SerializerHelpers { + fn emit_from_vec(&self, v: ~[T], f: fn(v: &T)) { + do self.emit_owned_vec(v.len()) { + for v.eachi |i, e| { + do self.emit_vec_elt(i) { + f(e) + } + } + } + } +} + +pub trait DeserializerHelpers { + fn read_to_vec(&self, f: fn() -> T) -> ~[T]; +} + +pub impl D: DeserializerHelpers { + fn read_to_vec(&self, f: fn() -> T) -> ~[T] { + do self.read_owned_vec |len| { + do vec::from_fn(len) |i| { + self.read_vec_elt(i, || f()) + } + } + } +} +} + +pub use traits::*; diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 3740557b7f8fe..cf7b758216b91 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -7,17 +7,34 @@ use std::serialization::{Serializable, use codemap::{span, filename}; use parse::token; +#[cfg(stage0)] impl span: Serializable { /* Note #1972 -- spans are serialized but not deserialized */ fn serialize(&self, _s: &S) { } } +#[cfg(stage0)] impl span: Deserializable { static fn deserialize(_d: &D) -> span { ast_util::dummy_sp() } } +#[cfg(stage1)] +#[cfg(stage2)] +impl span: Serializable { + /* Note #1972 -- spans are serialized but not deserialized */ + fn serialize(&self, _s: &S) { } +} + +#[cfg(stage1)] +#[cfg(stage2)] +impl span: Deserializable { + static fn deserialize(_d: &D) -> span { + ast_util::dummy_sp() + } +} + #[auto_serialize] #[auto_deserialize] type spanned = {node: T, span: span}; @@ -34,6 +51,7 @@ macro_rules! interner_key ( // implemented. struct ident { repr: uint } +#[cfg(stage0)] impl ident: Serializable { fn serialize(&self, s: &S) { let intr = match unsafe { @@ -47,6 +65,7 @@ impl ident: Serializable { } } +#[cfg(stage0)] impl ident: Deserializable { static fn deserialize(d: &D) -> ident { let intr = match unsafe { @@ -60,6 +79,36 @@ impl ident: Deserializable { } } +#[cfg(stage1)] +#[cfg(stage2)] +impl ident: Serializable { + fn serialize(&self, s: &S) { + let intr = match unsafe { + task::local_data::local_data_get(interner_key!()) + } { + None => fail ~"serialization: TLS interner not set up", + Some(intr) => intr + }; + + s.emit_owned_str(*(*intr).get(*self)); + } +} + +#[cfg(stage1)] +#[cfg(stage2)] +impl ident: Deserializable { + static fn deserialize(d: &D) -> ident { + let intr = match unsafe { + task::local_data::local_data_get(interner_key!()) + } { + None => fail ~"deserialization: TLS interner not set up", + Some(intr) => intr + }; + + (*intr).intern(@d.read_owned_str()) + } +} + impl ident: cmp::Eq { pure fn eq(other: &ident) -> bool { self.repr == other.repr } pure fn ne(other: &ident) -> bool { !self.eq(other) } diff --git a/src/libsyntax/ext/auto_serialize.rs b/src/libsyntax/ext/auto_serialize.rs index b06536f4e026a..452becbe559ac 100644 --- a/src/libsyntax/ext/auto_serialize.rs +++ b/src/libsyntax/ext/auto_serialize.rs @@ -13,16 +13,16 @@ For example, a type like: would generate two implementations like: - impl Node: Serializable { - fn serialize(s: &S) { + impl node_id: Serializable { + fn serialize(s: &S) { do s.emit_struct("Node") { s.emit_field("id", 0, || s.emit_uint(self)) } } } - impl node_id: Deserializable { - static fn deserialize(d: &D) -> Node { + impl node_id: Deserializable { + static fn deserialize(d: &D) -> Node { do d.read_struct("Node") { Node { id: d.read_field(~"x", 0, || deserialize(d)) @@ -40,7 +40,10 @@ references other non-built-in types. A type definition like: would yield functions like: - impl spanned: Serializable { + impl< + S: Serializer, + T: Serializable + > spanned: Serializable { fn serialize(s: &S) { do s.emit_rec { s.emit_field("node", 0, || self.node.serialize(s)); @@ -49,8 +52,11 @@ would yield functions like: } } - impl spanned: Deserializable { - static fn deserialize(d: &D) -> spanned { + impl< + D: Deserializer, + T: Deserializable + > spanned: Deserializable { + static fn deserialize(d: &D) -> spanned { do d.read_rec { { node: d.read_field(~"node", 0, || deserialize(d)), @@ -215,6 +221,25 @@ fn expand_auto_deserialize( } priv impl ext_ctxt { + fn bind_path( + span: span, + ident: ast::ident, + path: @ast::path, + bounds: @~[ast::ty_param_bound] + ) -> ast::ty_param { + let bound = ast::bound_trait(@{ + id: self.next_id(), + node: ast::ty_path(path, self.next_id()), + span: span, + }); + + { + ident: ident, + id: self.next_id(), + bounds: @vec::append(~[bound], *bounds) + } + } + fn expr(span: span, node: ast::expr_) -> @ast::expr { @{id: self.next_id(), callee_id: self.next_id(), node: node, span: span} @@ -332,24 +357,28 @@ fn mk_impl( cx: ext_ctxt, span: span, ident: ast::ident, + ty_param: ast::ty_param, path: @ast::path, tps: ~[ast::ty_param], f: fn(@ast::ty) -> @ast::method ) -> @ast::item { // All the type parameters need to bound to the trait. - let trait_tps = do tps.map |tp| { - let t_bound = ast::bound_trait(@{ - id: cx.next_id(), - node: ast::ty_path(path, cx.next_id()), - span: span, - }); + let mut trait_tps = vec::append( + ~[ty_param], + do tps.map |tp| { + let t_bound = ast::bound_trait(@{ + id: cx.next_id(), + node: ast::ty_path(path, cx.next_id()), + span: span, + }); - { - ident: tp.ident, - id: cx.next_id(), - bounds: @vec::append(~[t_bound], *tp.bounds) + { + ident: tp.ident, + id: cx.next_id(), + bounds: @vec::append(~[t_bound], *tp.bounds) + } } - }; + ); let opt_trait = Some(@{ path: path, @@ -382,20 +411,37 @@ fn mk_ser_impl( tps: ~[ast::ty_param], body: @ast::expr ) -> @ast::item { + // Make a path to the std::serialization::Serializable typaram. + let ty_param = cx.bind_path( + span, + cx.ident_of(~"__S"), + cx.path( + span, + ~[ + cx.ident_of(~"std"), + cx.ident_of(~"serialization"), + cx.ident_of(~"Serializer"), + ] + ), + @~[] + ); + // Make a path to the std::serialization::Serializable trait. - let path = cx.path( + let path = cx.path_tps( span, ~[ cx.ident_of(~"std"), cx.ident_of(~"serialization"), cx.ident_of(~"Serializable"), - ] + ], + ~[cx.ty_path(span, ~[cx.ident_of(~"__S")], ~[])] ); mk_impl( cx, span, ident, + ty_param, path, tps, |_ty| mk_ser_method(cx, span, cx.expr_blk(body)) @@ -409,20 +455,37 @@ fn mk_deser_impl( tps: ~[ast::ty_param], body: @ast::expr ) -> @ast::item { + // Make a path to the std::serialization::Deserializable typaram. + let ty_param = cx.bind_path( + span, + cx.ident_of(~"__D"), + cx.path( + span, + ~[ + cx.ident_of(~"std"), + cx.ident_of(~"serialization"), + cx.ident_of(~"Deserializer"), + ] + ), + @~[] + ); + // Make a path to the std::serialization::Deserializable trait. - let path = cx.path( + let path = cx.path_tps( span, ~[ cx.ident_of(~"std"), cx.ident_of(~"serialization"), cx.ident_of(~"Deserializable"), - ] + ], + ~[cx.ty_path(span, ~[cx.ident_of(~"__D")], ~[])] ); mk_impl( cx, span, ident, + ty_param, path, tps, |ty| mk_deser_method(cx, span, ty, cx.expr_blk(body)) @@ -434,22 +497,6 @@ fn mk_ser_method( span: span, ser_body: ast::blk ) -> @ast::method { - let ser_bound = cx.ty_path( - span, - ~[ - cx.ident_of(~"std"), - cx.ident_of(~"serialization"), - cx.ident_of(~"Serializer"), - ], - ~[] - ); - - let ser_tps = ~[{ - ident: cx.ident_of(~"__S"), - id: cx.next_id(), - bounds: @~[ast::bound_trait(ser_bound)], - }]; - let ty_s = @{ id: cx.next_id(), node: ast::ty_rptr( @@ -487,7 +534,7 @@ fn mk_ser_method( @{ ident: cx.ident_of(~"serialize"), attrs: ~[], - tps: ser_tps, + tps: ~[], self_ty: { node: ast::sty_region(ast::m_imm), span: span }, purity: ast::impure_fn, decl: ser_decl, @@ -505,22 +552,6 @@ fn mk_deser_method( ty: @ast::ty, deser_body: ast::blk ) -> @ast::method { - let deser_bound = cx.ty_path( - span, - ~[ - cx.ident_of(~"std"), - cx.ident_of(~"serialization"), - cx.ident_of(~"Deserializer"), - ], - ~[] - ); - - let deser_tps = ~[{ - ident: cx.ident_of(~"__D"), - id: cx.next_id(), - bounds: @~[ast::bound_trait(deser_bound)], - }]; - let ty_d = @{ id: cx.next_id(), node: ast::ty_rptr( @@ -552,7 +583,7 @@ fn mk_deser_method( @{ ident: cx.ident_of(~"deserialize"), attrs: ~[], - tps: deser_tps, + tps: ~[], self_ty: { node: ast::sty_static, span: span }, purity: ast::impure_fn, decl: deser_decl, diff --git a/src/test/run-pass/auto_serialize.rs b/src/test/run-pass/auto_serialize.rs index 6c85f59b74ed2..b63d1dcab7ec8 100644 --- a/src/test/run-pass/auto_serialize.rs +++ b/src/test/run-pass/auto_serialize.rs @@ -9,18 +9,22 @@ use io::Writer; use std::serialization::{Serializable, Deserializable, deserialize}; use std::prettyprint; -fn test_ser_and_deser( - a1: &A, - +expected: ~str +fn test_prettyprint>( + a: &A, + expected: &~str ) { - // check the pretty printer: let s = do io::with_str_writer |w| { - a1.serialize(&prettyprint::Serializer(w)) + a.serialize(&prettyprint::Serializer(w)) }; debug!("s == %?", s); - assert s == expected; + assert s == *expected; +} - // check the EBML serializer: +fn test_ebml + Deserializable +>(a1: &A) { let bytes = do io::with_bytes_writer |wr| { let ebml_w = &ebml::Serializer(wr); a1.serialize(ebml_w) @@ -140,24 +144,40 @@ enum Quark { enum CLike { A, B, C } fn main() { - test_ser_and_deser(&Plus(@Minus(@Val(3u), @Val(10u)), - @Plus(@Val(22u), @Val(5u))), - ~"Plus(@Minus(@Val(3u), @Val(10u)), \ - @Plus(@Val(22u), @Val(5u)))"); - - test_ser_and_deser(&{lo: 0u, hi: 5u, node: 22u}, - ~"{lo: 0u, hi: 5u, node: 22u}"); - - test_ser_and_deser(&AnEnum({v: ~[1u, 2u, 3u]}), - ~"AnEnum({v: ~[1u, 2u, 3u]})"); - - test_ser_and_deser(&Point {x: 3u, y: 5u}, ~"Point {x: 3u, y: 5u}"); - - test_ser_and_deser(&@[1u, 2u, 3u], ~"@[1u, 2u, 3u]"); - - test_ser_and_deser(&Top(22u), ~"Top(22u)"); - test_ser_and_deser(&Bottom(222u), ~"Bottom(222u)"); - - test_ser_and_deser(&A, ~"A"); - test_ser_and_deser(&B, ~"B"); + let a = &Plus(@Minus(@Val(3u), @Val(10u)), @Plus(@Val(22u), @Val(5u))); + test_prettyprint(a, &~"Plus(@Minus(@Val(3u), @Val(10u)), \ + @Plus(@Val(22u), @Val(5u)))"); + test_ebml(a); + + let a = &{lo: 0u, hi: 5u, node: 22u}; + test_prettyprint(a, &~"{lo: 0u, hi: 5u, node: 22u}"); + test_ebml(a); + + let a = &AnEnum({v: ~[1u, 2u, 3u]}); + test_prettyprint(a, &~"AnEnum({v: ~[1u, 2u, 3u]})"); + test_ebml(a); + + let a = &Point {x: 3u, y: 5u}; + test_prettyprint(a, &~"Point {x: 3u, y: 5u}"); + test_ebml(a); + + let a = &@[1u, 2u, 3u]; + test_prettyprint(a, &~"@[1u, 2u, 3u]"); + test_ebml(a); + + let a = &Top(22u); + test_prettyprint(a, &~"Top(22u)"); + test_ebml(a); + + let a = &Bottom(222u); + test_prettyprint(a, &~"Bottom(222u)"); + test_ebml(a); + + let a = &A; + test_prettyprint(a, &~"A"); + test_ebml(a); + + let a = &B; + test_prettyprint(a, &~"B"); + test_ebml(a); } From f6211ab187abf18f458f289fef3c48b14a0a0af8 Mon Sep 17 00:00:00 2001 From: Tim Chevalier Date: Mon, 15 Oct 2012 12:00:32 -0700 Subject: [PATCH 06/40] Add various test cases (xfailed) --- src/test/auxiliary/issue_3136_a.rc | 3 +++ src/test/auxiliary/issue_3136_a.rs | 15 +++++++++++++++ src/test/compile-fail/issue-2074.rs | 12 ++++++++++++ src/test/compile-fail/issue-3214.rs | 8 ++++++++ src/test/run-pass/issue-2284.rs | 11 +++++++++++ src/test/run-pass/issue-3447.rs | 25 +++++++++++++++++++++++++ src/test/run-pass/issue-3521.rs | 10 ++++++++++ src/test/run-pass/issue_3136_b.rs | 6 ++++++ 8 files changed, 90 insertions(+) create mode 100644 src/test/auxiliary/issue_3136_a.rc create mode 100644 src/test/auxiliary/issue_3136_a.rs create mode 100644 src/test/compile-fail/issue-2074.rs create mode 100644 src/test/compile-fail/issue-3214.rs create mode 100644 src/test/run-pass/issue-2284.rs create mode 100644 src/test/run-pass/issue-3447.rs create mode 100644 src/test/run-pass/issue-3521.rs create mode 100644 src/test/run-pass/issue_3136_b.rs diff --git a/src/test/auxiliary/issue_3136_a.rc b/src/test/auxiliary/issue_3136_a.rc new file mode 100644 index 0000000000000..532c669bd1d20 --- /dev/null +++ b/src/test/auxiliary/issue_3136_a.rc @@ -0,0 +1,3 @@ +#[crate_type = "lib"]; + +pub mod issue_3136_a; diff --git a/src/test/auxiliary/issue_3136_a.rs b/src/test/auxiliary/issue_3136_a.rs new file mode 100644 index 0000000000000..b3af688032326 --- /dev/null +++ b/src/test/auxiliary/issue_3136_a.rs @@ -0,0 +1,15 @@ +trait x { + fn use_x(); +} +enum y = (); +impl y:x { + fn use_x() { + struct foo { //~ ERROR quux + i: () + } + fn new_foo(i: ()) -> foo { + foo { i: i } + } + } +} + diff --git a/src/test/compile-fail/issue-2074.rs b/src/test/compile-fail/issue-2074.rs new file mode 100644 index 0000000000000..d911b7db3dd4d --- /dev/null +++ b/src/test/compile-fail/issue-2074.rs @@ -0,0 +1,12 @@ +// xfail-test +fn main() { + let one = fn@() -> uint { + enum r { a }; + return a as uint; + }; + let two = fn@() -> uint { + enum r { a }; + return a as uint; + }; + one(); two(); +} diff --git a/src/test/compile-fail/issue-3214.rs b/src/test/compile-fail/issue-3214.rs new file mode 100644 index 0000000000000..3c783b99232d9 --- /dev/null +++ b/src/test/compile-fail/issue-3214.rs @@ -0,0 +1,8 @@ +// xfail-test +fn foo() { + struct foo { + mut x: T, //~ ERROR quux + drop { } + } +} +fn main() { } diff --git a/src/test/run-pass/issue-2284.rs b/src/test/run-pass/issue-2284.rs new file mode 100644 index 0000000000000..3689c42253b90 --- /dev/null +++ b/src/test/run-pass/issue-2284.rs @@ -0,0 +1,11 @@ +// xfail-test +trait Send { + fn f(); +} + +fn f(t: T) { + t.f(); +} + +fn main() { +} \ No newline at end of file diff --git a/src/test/run-pass/issue-3447.rs b/src/test/run-pass/issue-3447.rs new file mode 100644 index 0000000000000..fab92ea6dcad5 --- /dev/null +++ b/src/test/run-pass/issue-3447.rs @@ -0,0 +1,25 @@ +// xfail-test +struct list { + element: &self/T, + mut next: Option<@list> +} + +impl list{ + fn addEnd(&self, element: &self/T) { + let newList = list { + element: element, + next: option::None + }; + + self.next = Some(@newList); + } +} + +fn main() { + let s = @"str"; + let ls: list<@str> = list { + element: &s, + next: option::None + }; + io::println(*ls.element); +} diff --git a/src/test/run-pass/issue-3521.rs b/src/test/run-pass/issue-3521.rs new file mode 100644 index 0000000000000..d8693fe18d352 --- /dev/null +++ b/src/test/run-pass/issue-3521.rs @@ -0,0 +1,10 @@ +// xfail-test +fn main() { + let foo = 100; + + enum Stuff { + Bar = foo + } + + log(error, Bar); +} diff --git a/src/test/run-pass/issue_3136_b.rs b/src/test/run-pass/issue_3136_b.rs new file mode 100644 index 0000000000000..ef8e1af56d70b --- /dev/null +++ b/src/test/run-pass/issue_3136_b.rs @@ -0,0 +1,6 @@ +// xfail-fast - check-fast doesn't understand aux-build +// aux-build:issue_3136_a.rc + +extern mod issue_3136_a; +fn main() {} + From 7237268b7040edf09dbbd9522b5656227a4b9b91 Mon Sep 17 00:00:00 2001 From: Tim Chevalier Date: Mon, 15 Oct 2012 12:27:09 -0700 Subject: [PATCH 07/40] Allow enum discriminator exprs to refer to declared consts Also some work towards #3521 Closes #2428 --- src/libsyntax/visit.rs | 2 + src/rustc/middle/const_eval.rs | 220 +++++++++++++++----------- src/rustc/middle/resolve.rs | 58 ++++++- src/rustc/middle/typeck/check.rs | 33 ++-- src/test/compile-fail/issue-3521-2.rs | 7 + src/test/run-pass/issue-2428.rs | 10 ++ 6 files changed, 215 insertions(+), 115 deletions(-) create mode 100644 src/test/compile-fail/issue-3521-2.rs create mode 100644 src/test/run-pass/issue-2428.rs diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs index 50fbd21f7b8dc..b402f9727dc2f 100644 --- a/src/libsyntax/visit.rs +++ b/src/libsyntax/visit.rs @@ -182,6 +182,8 @@ fn visit_enum_def(enum_definition: ast::enum_def, tps: ~[ast::ty_param], visit_enum_def(enum_definition, tps, e, v); } } + // Visit the disr expr if it exists + vr.node.disr_expr.iter(|ex| v.visit_expr(*ex, e, v)); } } diff --git a/src/rustc/middle/const_eval.rs b/src/rustc/middle/const_eval.rs index 51382b8108cf9..ea7b08140f39b 100644 --- a/src/rustc/middle/const_eval.rs +++ b/src/rustc/middle/const_eval.rs @@ -1,4 +1,4 @@ -use syntax::{ast,ast_util,visit}; +use syntax::{ast,ast_map,ast_util,visit}; use ast::*; // @@ -135,28 +135,7 @@ fn classify(e: @expr, // FIXME: (#3728) we can probably do something CCI-ish // surrounding nonlocal constants. But we don't yet. ast::expr_path(_) => { - match def_map.find(e.id) { - Some(ast::def_const(def_id)) => { - if ast_util::is_local(def_id) { - let ty = ty::expr_ty(tcx, e); - if ty::type_is_integral(ty) { - integral_const - } else { - general_const - } - } else { - non_const - } - } - Some(_) => { - non_const - } - None => { - tcx.sess.span_bug(e.span, - ~"unknown path when \ - classifying constants"); - } - } + lookup_constness(tcx, e) } _ => non_const @@ -167,6 +146,40 @@ fn classify(e: @expr, } } +fn lookup_const(tcx: ty::ctxt, e: @expr) -> Option<@expr> { + match tcx.def_map.find(e.id) { + Some(ast::def_const(def_id)) => { + if ast_util::is_local(def_id) { + match tcx.items.find(def_id.node) { + None => None, + Some(ast_map::node_item(it, _)) => match it.node { + item_const(_, const_expr) => Some(const_expr), + _ => None + }, + Some(_) => None + } + } + else { None } + } + Some(_) => None, + None => None + } +} + +fn lookup_constness(tcx: ty::ctxt, e: @expr) -> constness { + match lookup_const(tcx, e) { + Some(rhs) => { + let ty = ty::expr_ty(tcx, rhs); + if ty::type_is_integral(ty) { + integral_const + } else { + general_const + } + } + None => non_const + } +} + fn process_crate(crate: @ast::crate, def_map: resolve::DefMap, tcx: ty::ctxt) { @@ -204,58 +217,67 @@ impl const_val : cmp::Eq { pure fn ne(other: &const_val) -> bool { !self.eq(other) } } -// FIXME: issue #1417 fn eval_const_expr(tcx: middle::ty::ctxt, e: @expr) -> const_val { + match eval_const_expr_partial(tcx, e) { + Ok(r) => r, + Err(s) => fail s + } +} + +fn eval_const_expr_partial(tcx: middle::ty::ctxt, e: @expr) + -> Result { use middle::ty; - fn fromb(b: bool) -> const_val { const_int(b as i64) } + fn fromb(b: bool) -> Result { Ok(const_int(b as i64)) } match e.node { expr_unary(neg, inner) => { - match eval_const_expr(tcx, inner) { - const_float(f) => const_float(-f), - const_int(i) => const_int(-i), - const_uint(i) => const_uint(-i), - const_str(_) => fail ~"Negate on string", - const_bool(_) => fail ~"Negate on boolean" + match eval_const_expr_partial(tcx, inner) { + Ok(const_float(f)) => Ok(const_float(-f)), + Ok(const_int(i)) => Ok(const_int(-i)), + Ok(const_uint(i)) => Ok(const_uint(-i)), + Ok(const_str(_)) => Err(~"Negate on string"), + Ok(const_bool(_)) => Err(~"Negate on boolean"), + err => err } } expr_unary(not, inner) => { - match eval_const_expr(tcx, inner) { - const_int(i) => const_int(!i), - const_uint(i) => const_uint(!i), - const_bool(b) => const_bool(!b), - _ => fail ~"Not on float or string" + match eval_const_expr_partial(tcx, inner) { + Ok(const_int(i)) => Ok(const_int(!i)), + Ok(const_uint(i)) => Ok(const_uint(!i)), + Ok(const_bool(b)) => Ok(const_bool(!b)), + _ => Err(~"Not on float or string") } } expr_binary(op, a, b) => { - match (eval_const_expr(tcx, a), eval_const_expr(tcx, b)) { - (const_float(a), const_float(b)) => { + match (eval_const_expr_partial(tcx, a), + eval_const_expr_partial(tcx, b)) { + (Ok(const_float(a)), Ok(const_float(b))) => { match op { - add => const_float(a + b), - subtract => const_float(a - b), - mul => const_float(a * b), - div => const_float(a / b), - rem => const_float(a % b), + add => Ok(const_float(a + b)), + subtract => Ok(const_float(a - b)), + mul => Ok(const_float(a * b)), + div => Ok(const_float(a / b)), + rem => Ok(const_float(a % b)), eq => fromb(a == b), lt => fromb(a < b), le => fromb(a <= b), ne => fromb(a != b), ge => fromb(a >= b), gt => fromb(a > b), - _ => fail ~"Can't do this op on floats" + _ => Err(~"Can't do this op on floats") } } - (const_int(a), const_int(b)) => { + (Ok(const_int(a)), Ok(const_int(b))) => { match op { - add => const_int(a + b), - subtract => const_int(a - b), - mul => const_int(a * b), - div => const_int(a / b), - rem => const_int(a % b), - and | bitand => const_int(a & b), - or | bitor => const_int(a | b), - bitxor => const_int(a ^ b), - shl => const_int(a << b), - shr => const_int(a >> b), + add => Ok(const_int(a + b)), + subtract => Ok(const_int(a - b)), + mul => Ok(const_int(a * b)), + div => Ok(const_int(a / b)), + rem => Ok(const_int(a % b)), + and | bitand => Ok(const_int(a & b)), + or | bitor => Ok(const_int(a | b)), + bitxor => Ok(const_int(a ^ b)), + shl => Ok(const_int(a << b)), + shr => Ok(const_int(a >> b)), eq => fromb(a == b), lt => fromb(a < b), le => fromb(a <= b), @@ -264,18 +286,18 @@ fn eval_const_expr(tcx: middle::ty::ctxt, e: @expr) -> const_val { gt => fromb(a > b) } } - (const_uint(a), const_uint(b)) => { + (Ok(const_uint(a)), Ok(const_uint(b))) => { match op { - add => const_uint(a + b), - subtract => const_uint(a - b), - mul => const_uint(a * b), - div => const_uint(a / b), - rem => const_uint(a % b), - and | bitand => const_uint(a & b), - or | bitor => const_uint(a | b), - bitxor => const_uint(a ^ b), - shl => const_uint(a << b), - shr => const_uint(a >> b), + add => Ok(const_uint(a + b)), + subtract => Ok(const_uint(a - b)), + mul => Ok(const_uint(a * b)), + div => Ok(const_uint(a / b)), + rem => Ok(const_uint(a % b)), + and | bitand => Ok(const_uint(a & b)), + or | bitor => Ok(const_uint(a | b)), + bitxor => Ok(const_uint(a ^ b)), + shl => Ok(const_uint(a << b)), + shr => Ok(const_uint(a >> b)), eq => fromb(a == b), lt => fromb(a < b), le => fromb(a <= b), @@ -285,22 +307,22 @@ fn eval_const_expr(tcx: middle::ty::ctxt, e: @expr) -> const_val { } } // shifts can have any integral type as their rhs - (const_int(a), const_uint(b)) => { + (Ok(const_int(a)), Ok(const_uint(b))) => { match op { - shl => const_int(a << b), - shr => const_int(a >> b), - _ => fail ~"Can't do this op on an int and uint" + shl => Ok(const_int(a << b)), + shr => Ok(const_int(a >> b)), + _ => Err(~"Can't do this op on an int and uint") } } - (const_uint(a), const_int(b)) => { + (Ok(const_uint(a)), Ok(const_int(b))) => { match op { - shl => const_uint(a << b), - shr => const_uint(a >> b), - _ => fail ~"Can't do this op on a uint and int" + shl => Ok(const_uint(a << b)), + shr => Ok(const_uint(a >> b)), + _ => Err(~"Can't do this op on a uint and int") } } - (const_bool(a), const_bool(b)) => { - const_bool(match op { + (Ok(const_bool(a)), Ok(const_bool(b))) => { + Ok(const_bool(match op { and => a && b, or => a || b, bitxor => a ^ b, @@ -308,47 +330,53 @@ fn eval_const_expr(tcx: middle::ty::ctxt, e: @expr) -> const_val { bitor => a | b, eq => a == b, ne => a != b, - _ => fail ~"Can't do this op on bools" - }) + _ => return Err(~"Can't do this op on bools") + })) } - _ => fail ~"Bad operands for binary" + _ => Err(~"Bad operands for binary") } } expr_cast(base, _) => { let ety = ty::expr_ty(tcx, e); - let base = eval_const_expr(tcx, base); + let base = eval_const_expr_partial(tcx, base); match ty::get(ety).sty { ty::ty_float(_) => { match base { - const_uint(u) => const_float(u as f64), - const_int(i) => const_float(i as f64), - const_float(_) => base, - _ => fail ~"Can't cast float to str" + Ok(const_uint(u)) => Ok(const_float(u as f64)), + Ok(const_int(i)) => Ok(const_float(i as f64)), + Ok(const_float(_)) => base, + _ => Err(~"Can't cast float to str") } } ty::ty_uint(_) => { match base { - const_uint(_) => base, - const_int(i) => const_uint(i as u64), - const_float(f) => const_uint(f as u64), - _ => fail ~"Can't cast str to uint" + Ok(const_uint(_)) => base, + Ok(const_int(i)) => Ok(const_uint(i as u64)), + Ok(const_float(f)) => Ok(const_uint(f as u64)), + _ => Err(~"Can't cast str to uint") } } ty::ty_int(_) | ty::ty_bool => { match base { - const_uint(u) => const_int(u as i64), - const_int(_) => base, - const_float(f) => const_int(f as i64), - _ => fail ~"Can't cast str to int" + Ok(const_uint(u)) => Ok(const_int(u as i64)), + Ok(const_int(_)) => base, + Ok(const_float(f)) => Ok(const_int(f as i64)), + _ => Err(~"Can't cast str to int") } } - _ => fail ~"Can't cast this type" + _ => Err(~"Can't cast this type") } } - expr_lit(lit) => lit_to_const(lit), + expr_path(_) => { + match lookup_const(tcx, e) { + Some(actual_e) => eval_const_expr_partial(tcx, actual_e), + None => Err(~"Non-constant path in constant expr") + } + } + expr_lit(lit) => Ok(lit_to_const(lit)), // If we have a vstore, just keep going; it has to be a string - expr_vstore(e, _) => eval_const_expr(tcx, e), - _ => fail ~"Unsupported constant expr" + expr_vstore(e, _) => eval_const_expr_partial(tcx, e), + _ => Err(~"Unsupported constant expr") } } diff --git a/src/rustc/middle/resolve.rs b/src/rustc/middle/resolve.rs index 045905bbe07b5..b6e1729a07a7f 100644 --- a/src/rustc/middle/resolve.rs +++ b/src/rustc/middle/resolve.rs @@ -257,7 +257,10 @@ enum RibKind { MethodRibKind(node_id, MethodSort), // We passed through a function *item* scope. Disallow upvars. - OpaqueFunctionRibKind + OpaqueFunctionRibKind, + + // We're in a constant item. Can't refer to dynamic stuff. + ConstantItemRibKind } // Methods can be required or provided. Required methods only occur in traits. @@ -3114,9 +3117,16 @@ impl Resolver { return None; } + ConstantItemRibKind => { + // Still doesn't deal with upvars + self.session.span_err(span, + ~"attempt to use a non-constant \ + value in a constant"); + + } } - rib_index += 1u; + rib_index += 1; } return Some(dl_def(def)); @@ -3130,8 +3140,8 @@ impl Resolver { // XXX: Try caching? let mut i = (*ribs).len(); - while i != 0u { - i -= 1u; + while i != 0 { + i -= 1; let rib = (*ribs).get_elt(i); match rib.bindings.find(name) { Some(def_like) => { @@ -3179,7 +3189,33 @@ impl Resolver { } match item.node { - item_enum(_, type_parameters) | + + // enum item: resolve all the variants' discrs, + // then resolve the ty params + item_enum(enum_def, type_parameters) => { + + for enum_def.variants.each() |variant| { + do variant.node.disr_expr.iter() |dis_expr| { + // resolve the discriminator expr + // as a constant + self.with_constant_rib(|| { + self.resolve_expr(*dis_expr, visitor); + }); + } + } + + // n.b. the discr expr gets visted twice. + // but maybe it's okay since the first time will signal an + // error if there is one? -- tjc + do self.with_type_parameter_rib + (HasTypeParameters(&type_parameters, item.id, 0, + NormalRibKind)) + || { + + visit_item(item, (), visitor); + } + } + item_ty(_, type_parameters) => { do self.with_type_parameter_rib (HasTypeParameters(&type_parameters, item.id, 0u, @@ -3344,7 +3380,9 @@ impl Resolver { } item_const(*) => { - visit_item(item, (), visitor); + self.with_constant_rib(|| { + visit_item(item, (), visitor); + }); } item_mac(*) => { @@ -3401,6 +3439,12 @@ impl Resolver { f(); (*self.label_ribs).pop(); } + fn with_constant_rib(f: fn()) { + (*self.value_ribs).push(@Rib(ConstantItemRibKind)); + f(); + (*self.value_ribs).pop(); + } + fn resolve_function(rib_kind: RibKind, optional_declaration: Option<@fn_decl>, @@ -4127,7 +4171,7 @@ impl Resolver { namespace); } - if path.idents.len() > 1u { + if path.idents.len() > 1 { return self.resolve_module_relative_path(path, self.xray_context, namespace); diff --git a/src/rustc/middle/typeck/check.rs b/src/rustc/middle/typeck/check.rs index 2bf124526ac8e..9a7f2192cb144 100644 --- a/src/rustc/middle/typeck/check.rs +++ b/src/rustc/middle/typeck/check.rs @@ -2219,9 +2219,14 @@ fn check_block(fcx0: @fn_ctxt, blk: ast::blk) -> bool { fn check_const(ccx: @crate_ctxt, _sp: span, e: @ast::expr, id: ast::node_id) { let rty = ty::node_id_to_type(ccx.tcx, id); let fcx = blank_fn_ctxt(ccx, rty, e.id); + let declty = fcx.ccx.tcx.tcache.get(local_def(id)).ty; + check_const_with_ty(fcx, _sp, e, declty); +} + +fn check_const_with_ty(fcx: @fn_ctxt, _sp: span, e: @ast::expr, + declty: ty::t) { check_expr(fcx, e, None); let cty = fcx.expr_ty(e); - let declty = fcx.ccx.tcx.tcache.get(local_def(id)).ty; demand::suptype(fcx, e.span, declty, cty); regionck::regionck_expr(fcx, e); writeback::resolve_type_vars_in_expr(fcx, e); @@ -2259,27 +2264,31 @@ fn check_enum_variants(ccx: @crate_ctxt, variants: &mut ~[ty::variant_info]) { let rty = ty::node_id_to_type(ccx.tcx, id); for vs.each |v| { - match v.node.disr_expr { - Some(e) => { - let fcx = blank_fn_ctxt(ccx, rty, e.id); - check_expr(fcx, e, None); - let cty = fcx.expr_ty(e); + do v.node.disr_expr.iter |e_ref| { + let e = *e_ref; + debug!("disr expr, checking %s", + expr_to_str(e, ccx.tcx.sess.intr())); let declty = ty::mk_int(ccx.tcx); - demand::suptype(fcx, e.span, declty, cty); + let fcx = blank_fn_ctxt(ccx, rty, e.id); + check_const_with_ty(fcx, e.span, e, declty); // check_expr (from check_const pass) doesn't guarantee // that the expression is in an form that eval_const_expr can // handle, so we may still get an internal compiler error - match const_eval::eval_const_expr(ccx.tcx, e) { - const_eval::const_int(val) => { + + match const_eval::eval_const_expr_partial(ccx.tcx, e) { + Ok(const_eval::const_int(val)) => { *disr_val = val as int; } - _ => { + Ok(_) => { ccx.tcx.sess.span_err(e.span, ~"expected signed integer \ constant"); } + Err(err) => { + ccx.tcx.sess.span_err(e.span, + #fmt("expected constant: %s", err)); + + } } - } - _ => () } if vec::contains(*disr_vals, &*disr_val) { ccx.tcx.sess.span_err(v.span, diff --git a/src/test/compile-fail/issue-3521-2.rs b/src/test/compile-fail/issue-3521-2.rs new file mode 100644 index 0000000000000..5af0417af047f --- /dev/null +++ b/src/test/compile-fail/issue-3521-2.rs @@ -0,0 +1,7 @@ +fn main() { + let foo = 100; + + const y: int = foo + 1; //~ ERROR: attempt to use a non-constant value in a constant + + log(error, y); +} diff --git a/src/test/run-pass/issue-2428.rs b/src/test/run-pass/issue-2428.rs new file mode 100644 index 0000000000000..76fc674e73448 --- /dev/null +++ b/src/test/run-pass/issue-2428.rs @@ -0,0 +1,10 @@ +fn main() { + let foo = 100; + const quux: int = 5; + + enum Stuff { + Bar = quux + } + + assert (Bar as int == quux); +} From 0643466f85a765bbd0d7ec14a96e0980795aebec Mon Sep 17 00:00:00 2001 From: Tim Chevalier Date: Mon, 15 Oct 2012 13:14:23 -0700 Subject: [PATCH 08/40] Fix whitespace --- src/rustc/middle/resolve.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/rustc/middle/resolve.rs b/src/rustc/middle/resolve.rs index b6e1729a07a7f..ba8e6c26274ab 100644 --- a/src/rustc/middle/resolve.rs +++ b/src/rustc/middle/resolve.rs @@ -3199,7 +3199,7 @@ impl Resolver { // resolve the discriminator expr // as a constant self.with_constant_rib(|| { - self.resolve_expr(*dis_expr, visitor); + self.resolve_expr(*dis_expr, visitor); }); } } @@ -3215,7 +3215,7 @@ impl Resolver { visit_item(item, (), visitor); } } - + item_ty(_, type_parameters) => { do self.with_type_parameter_rib (HasTypeParameters(&type_parameters, item.id, 0u, From 2a1aa9fb5356dc43e649b86622bd12463722d2af Mon Sep 17 00:00:00 2001 From: Niko Matsakis Date: Sun, 14 Oct 2012 13:39:17 -0700 Subject: [PATCH 09/40] Check whether loans conflict with old loans or with themselves. Along the way, convert from dvec-of-dvec representation to track loans in scope to just a single flattened list. It's more convenient. Fixes #3765. r+ pcwalton. --- src/rustc/middle/borrowck.rs | 9 +- src/rustc/middle/borrowck/check_loans.rs | 85 ++++++++++------- src/rustc/middle/borrowck/gather_loans.rs | 95 +++++++++++-------- src/rustc/middle/borrowck/loan.rs | 45 +++++---- ...borrowck-loan-local-as-both-mut-and-imm.rs | 25 +++++ 5 files changed, 165 insertions(+), 94 deletions(-) create mode 100644 src/test/compile-fail/borrowck-loan-local-as-both-mut-and-imm.rs diff --git a/src/rustc/middle/borrowck.rs b/src/rustc/middle/borrowck.rs index e2f7ba20642aa..02fd2998f4dba 100644 --- a/src/rustc/middle/borrowck.rs +++ b/src/rustc/middle/borrowck.rs @@ -383,7 +383,7 @@ impl bckerr : cmp::Eq { type bckres = Result; /// a complete record of a loan that was granted -type loan = {lp: @loan_path, cmt: cmt, mutbl: ast::mutability}; +struct Loan {lp: @loan_path, cmt: cmt, mutbl: ast::mutability} /// maps computed by `gather_loans` that are then used by `check_loans` /// @@ -392,7 +392,7 @@ type loan = {lp: @loan_path, cmt: cmt, mutbl: ast::mutability}; /// - `pure_map`: map from block/expr that must be pure to the error message /// that should be reported if they are not pure type req_maps = { - req_loan_map: HashMap>>, + req_loan_map: HashMap>, pure_map: HashMap }; @@ -582,6 +582,11 @@ impl borrowck_ctxt { method_map: self.method_map}; mc.mut_to_str(mutbl) } + + fn loan_to_repr(loan: &Loan) -> ~str { + fmt!("Loan(lp=%?, cmt=%s, mutbl=%?)", + loan.lp, self.cmt_to_repr(loan.cmt), loan.mutbl) + } } // The inherent mutability of a component is its default mutability diff --git a/src/rustc/middle/borrowck/check_loans.rs b/src/rustc/middle/borrowck/check_loans.rs index 6a9195b45096b..7f95d44fd3b85 100644 --- a/src/rustc/middle/borrowck/check_loans.rs +++ b/src/rustc/middle/borrowck/check_loans.rs @@ -131,18 +131,15 @@ impl check_loan_ctxt { } } - fn walk_loans(scope_id: ast::node_id, - f: fn(v: &loan) -> bool) { + fn walk_loans(scope_id: ast::node_id, f: fn(v: &Loan) -> bool) { let mut scope_id = scope_id; let region_map = self.tcx().region_map; let req_loan_map = self.req_maps.req_loan_map; loop { - for req_loan_map.find(scope_id).each |loanss| { - for loanss.each |loans| { - for loans.each |loan| { - if !f(loan) { return; } - } + for req_loan_map.find(scope_id).each |loans| { + for loans.each |loan| { + if !f(loan) { return; } } } @@ -155,7 +152,7 @@ impl check_loan_ctxt { fn walk_loans_of(scope_id: ast::node_id, lp: @loan_path, - f: fn(v: &loan) -> bool) { + f: fn(v: &Loan) -> bool) { for self.walk_loans(scope_id) |loan| { if loan.lp == lp { if !f(loan) { return; } @@ -256,36 +253,58 @@ impl check_loan_ctxt { } fn check_for_conflicting_loans(scope_id: ast::node_id) { - let new_loanss = match self.req_maps.req_loan_map.find(scope_id) { + debug!("check_for_conflicting_loans(scope_id=%?)", scope_id); + + let new_loans = match self.req_maps.req_loan_map.find(scope_id) { None => return, - Some(loanss) => loanss + Some(loans) => loans }; + debug!("new_loans has length %?", new_loans.len()); + let par_scope_id = self.tcx().region_map.get(scope_id); for self.walk_loans(par_scope_id) |old_loan| { - for new_loanss.each |new_loans| { - for new_loans.each |new_loan| { - if old_loan.lp != new_loan.lp { loop; } - match (old_loan.mutbl, new_loan.mutbl) { - (m_const, _) | (_, m_const) | - (m_mutbl, m_mutbl) | (m_imm, m_imm) => { - /*ok*/ - } - - (m_mutbl, m_imm) | (m_imm, m_mutbl) => { - self.bccx.span_err( - new_loan.cmt.span, - fmt!("loan of %s as %s \ - conflicts with prior loan", - self.bccx.cmt_to_str(new_loan.cmt), - self.bccx.mut_to_str(new_loan.mutbl))); - self.bccx.span_note( - old_loan.cmt.span, - fmt!("prior loan as %s granted here", - self.bccx.mut_to_str(old_loan.mutbl))); - } - } - } + debug!("old_loan=%?", self.bccx.loan_to_repr(old_loan)); + + for new_loans.each |new_loan| { + self.report_error_if_loans_conflict(old_loan, new_loan); + } + } + + let len = new_loans.len(); + for uint::range(0, len) |i| { + let loan_i = new_loans[i]; + for uint::range(i+1, len) |j| { + let loan_j = new_loans[j]; + self.report_error_if_loans_conflict(&loan_i, &loan_j); + } + } + } + + fn report_error_if_loans_conflict(&self, + old_loan: &Loan, + new_loan: &Loan) { + if old_loan.lp != new_loan.lp { + return; + } + + match (old_loan.mutbl, new_loan.mutbl) { + (m_const, _) | (_, m_const) | + (m_mutbl, m_mutbl) | (m_imm, m_imm) => { + /*ok*/ + } + + (m_mutbl, m_imm) | (m_imm, m_mutbl) => { + self.bccx.span_err( + new_loan.cmt.span, + fmt!("loan of %s as %s \ + conflicts with prior loan", + self.bccx.cmt_to_str(new_loan.cmt), + self.bccx.mut_to_str(new_loan.mutbl))); + self.bccx.span_note( + old_loan.cmt.span, + fmt!("prior loan as %s granted here", + self.bccx.mut_to_str(old_loan.mutbl))); } } } diff --git a/src/rustc/middle/borrowck/gather_loans.rs b/src/rustc/middle/borrowck/gather_loans.rs index a2c8f18507138..e8d11fd1708f9 100644 --- a/src/rustc/middle/borrowck/gather_loans.rs +++ b/src/rustc/middle/borrowck/gather_loans.rs @@ -213,9 +213,10 @@ fn req_loans_in_expr(ex: @ast::expr, } impl gather_loan_ctxt { - fn tcx() -> ty::ctxt { self.bccx.tcx } + fn tcx(&self) -> ty::ctxt { self.bccx.tcx } - fn guarantee_adjustments(expr: @ast::expr, + fn guarantee_adjustments(&self, + expr: @ast::expr, adjustment: &ty::AutoAdjustment) { debug!("guarantee_adjustments(expr=%s, adjustment=%?)", expr_repr(self.tcx(), expr), adjustment); @@ -256,7 +257,8 @@ impl gather_loan_ctxt { // out loans, which will be added to the `req_loan_map`. This can // also entail "rooting" GC'd pointers, which means ensuring // dynamically that they are not freed. - fn guarantee_valid(cmt: cmt, + fn guarantee_valid(&self, + cmt: cmt, req_mutbl: ast::mutability, scope_r: ty::region) { @@ -280,35 +282,12 @@ impl gather_loan_ctxt { // it within that scope, the loan will be detected and an // error will be reported. Some(_) => { - match self.bccx.loan(cmt, scope_r, req_mutbl) { - Err(e) => { self.bccx.report(e); } - Ok(loans) if loans.len() == 0 => {} - Ok(loans) => { - match scope_r { - ty::re_scope(scope_id) => { - self.add_loans(scope_id, loans); - - if req_mutbl == m_imm && cmt.mutbl != m_imm { - self.bccx.loaned_paths_imm += 1; - - if self.tcx().sess.borrowck_note_loan() { - self.bccx.span_note( - cmt.span, - fmt!("immutable loan required")); - } - } else { - self.bccx.loaned_paths_same += 1; - } + match self.bccx.loan(cmt, scope_r, req_mutbl) { + Err(e) => { self.bccx.report(e); } + Ok(move loans) => { + self.add_loans(cmt, req_mutbl, scope_r, move loans); } - _ => { - self.bccx.tcx.sess.span_bug( - cmt.span, - fmt!("loans required but scope is scope_region is %s", - region_to_str(self.tcx(), scope_r))); - } - } } - } } // The path is not loanable: in that case, we must try and @@ -385,7 +364,8 @@ impl gather_loan_ctxt { // has type `@mut{f:int}`, this check might fail because `&x.f` // reqires an immutable pointer, but `f` lives in (aliased) // mutable memory. - fn check_mutbl(req_mutbl: ast::mutability, + fn check_mutbl(&self, + req_mutbl: ast::mutability, cmt: cmt) -> bckres { debug!("check_mutbl(req_mutbl=%?, cmt.mutbl=%?)", req_mutbl, cmt.mutbl); @@ -407,21 +387,58 @@ impl gather_loan_ctxt { } } - fn add_loans(scope_id: ast::node_id, loans: @DVec) { + fn add_loans(&self, + cmt: cmt, + req_mutbl: ast::mutability, + scope_r: ty::region, + +loans: ~[Loan]) { + if loans.len() == 0 { + return; + } + + let scope_id = match scope_r { + ty::re_scope(scope_id) => scope_id, + _ => { + self.bccx.tcx.sess.span_bug( + cmt.span, + fmt!("loans required but scope is scope_region is %s", + region_to_str(self.tcx(), scope_r))); + } + }; + + self.add_loans_to_scope_id(scope_id, move loans); + + if req_mutbl == m_imm && cmt.mutbl != m_imm { + self.bccx.loaned_paths_imm += 1; + + if self.tcx().sess.borrowck_note_loan() { + self.bccx.span_note( + cmt.span, + fmt!("immutable loan required")); + } + } else { + self.bccx.loaned_paths_same += 1; + } + } + + fn add_loans_to_scope_id(&self, scope_id: ast::node_id, +loans: ~[Loan]) { debug!("adding %u loans to scope_id %?", loans.len(), scope_id); match self.req_maps.req_loan_map.find(scope_id) { - Some(l) => { - l.push(loans); + Some(req_loans) => { + req_loans.push_all(loans); } None => { - self.req_maps.req_loan_map.insert( - scope_id, @dvec::from_vec(~[loans])); + let dvec = @dvec::from_vec(move loans); + self.req_maps.req_loan_map.insert(scope_id, dvec); } } } - fn gather_pat(discr_cmt: cmt, root_pat: @ast::pat, - arm_id: ast::node_id, alt_id: ast::node_id) { + fn gather_pat(&self, + discr_cmt: cmt, + root_pat: @ast::pat, + arm_id: ast::node_id, + alt_id: ast::node_id) { do self.bccx.cat_pattern(discr_cmt, root_pat) |cmt, pat| { match pat.node { ast::pat_ident(bm, _, _) if !self.pat_is_variant(pat) => { @@ -475,7 +492,7 @@ impl gather_loan_ctxt { } } - fn pat_is_variant(pat: @ast::pat) -> bool { + fn pat_is_variant(&self, pat: @ast::pat) -> bool { pat_util::pat_is_variant(self.bccx.tcx.def_map, pat) } } diff --git a/src/rustc/middle/borrowck/loan.rs b/src/rustc/middle/borrowck/loan.rs index 8d9d7a5796a9a..5d3ccc392139e 100644 --- a/src/rustc/middle/borrowck/loan.rs +++ b/src/rustc/middle/borrowck/loan.rs @@ -8,35 +8,37 @@ use result::{Result, Ok, Err}; impl borrowck_ctxt { fn loan(cmt: cmt, scope_region: ty::region, - mutbl: ast::mutability) -> bckres<@DVec> { - let lc = loan_ctxt_(@{bccx: self, - scope_region: scope_region, - loans: @DVec()}); + mutbl: ast::mutability) -> bckres<~[Loan]> { + let lc = LoanContext { + bccx: self, + scope_region: scope_region, + loans: ~[] + }; match lc.loan(cmt, mutbl) { - Ok(()) => {Ok(lc.loans)} - Err(e) => {Err(e)} + Err(e) => Err(e), + Ok(()) => { + let LoanContext {loans, _} = move lc; + Ok(loans) + } } } } -type loan_ctxt_ = { +struct LoanContext { bccx: borrowck_ctxt, // the region scope for which we must preserve the memory scope_region: ty::region, // accumulated list of loans that will be required - loans: @DVec -}; - -enum loan_ctxt { - loan_ctxt_(@loan_ctxt_) + mut loans: ~[Loan] } -impl loan_ctxt { - fn tcx() -> ty::ctxt { self.bccx.tcx } +impl LoanContext { + fn tcx(&self) -> ty::ctxt { self.bccx.tcx } - fn issue_loan(cmt: cmt, + fn issue_loan(&self, + cmt: cmt, scope_ub: ty::region, req_mutbl: ast::mutability) -> bckres<()> { if self.bccx.is_subregion_of(self.scope_region, scope_ub) { @@ -57,12 +59,13 @@ impl loan_ctxt { } } - (*self.loans).push({ + self.loans.push(Loan { // Note: cmt.lp must be Some(_) because otherwise this // loan process does not apply at all. lp: cmt.lp.get(), cmt: cmt, - mutbl: req_mutbl}); + mutbl: req_mutbl + }); return Ok(()); } else { // The loan being requested lives longer than the data @@ -73,7 +76,7 @@ impl loan_ctxt { } } - fn loan(cmt: cmt, req_mutbl: ast::mutability) -> bckres<()> { + fn loan(&self, cmt: cmt, req_mutbl: ast::mutability) -> bckres<()> { debug!("loan(%s, %s)", self.bccx.cmt_to_repr(cmt), self.bccx.mut_to_str(req_mutbl)); @@ -144,7 +147,8 @@ impl loan_ctxt { // A "stable component" is one where assigning the base of the // component cannot cause the component itself to change types. // Example: record fields. - fn loan_stable_comp(cmt: cmt, + fn loan_stable_comp(&self, + cmt: cmt, cmt_base: cmt, req_mutbl: ast::mutability) -> bckres<()> { let base_mutbl = match req_mutbl { @@ -162,7 +166,8 @@ impl loan_ctxt { // An "unstable deref" means a deref of a ptr/comp where, if the // base of the deref is assigned to, pointers into the result of the // deref would be invalidated. Examples: interior of variants, uniques. - fn loan_unstable_deref(cmt: cmt, + fn loan_unstable_deref(&self, + cmt: cmt, cmt_base: cmt, req_mutbl: ast::mutability) -> bckres<()> { // Variant components: the base must be immutable, because diff --git a/src/test/compile-fail/borrowck-loan-local-as-both-mut-and-imm.rs b/src/test/compile-fail/borrowck-loan-local-as-both-mut-and-imm.rs new file mode 100644 index 0000000000000..54048ed2fd8cb --- /dev/null +++ b/src/test/compile-fail/borrowck-loan-local-as-both-mut-and-imm.rs @@ -0,0 +1,25 @@ +use core::either::{Either, Left, Right}; + + fn f(x: &mut Either, y: &Either) -> int { + match *y { + Left(ref z) => { + *x = Right(1.0); + *z + } + _ => fail + } + } + + fn g() { + let mut x: Either = Left(3); + io::println(f(&mut x, &x).to_str()); //~ ERROR conflicts with prior loan + } + + fn h() { + let mut x: Either = Left(3); + let y: &Either = &x; + let z: &mut Either = &mut x; //~ ERROR conflicts with prior loan + *z = *y; + } + + fn main() {} From c886629d4f2fb4a2712884b07de38101e9dc956c Mon Sep 17 00:00:00 2001 From: Patrick Walton Date: Mon, 8 Oct 2012 12:39:30 -0700 Subject: [PATCH 10/40] rustc: Implement monomorphic default methods. r=nmatsakis --- src/rustc/metadata/common.rs | 2 + src/rustc/metadata/csearch.rs | 14 + src/rustc/metadata/decoder.rs | 54 +++- src/rustc/metadata/encoder.rs | 30 ++- src/rustc/middle/resolve.rs | 21 +- src/rustc/middle/trans/base.rs | 94 ++----- src/rustc/middle/trans/callee.rs | 28 +- src/rustc/middle/trans/closure.rs | 4 +- src/rustc/middle/trans/common.rs | 30 ++- src/rustc/middle/trans/expr.rs | 4 +- src/rustc/middle/trans/foreign.rs | 4 +- src/rustc/middle/trans/inline.rs | 19 +- src/rustc/middle/trans/meth.rs | 91 +++++-- src/rustc/middle/trans/monomorphize.rs | 63 ++++- src/rustc/middle/trans/type_use.rs | 9 +- src/rustc/middle/ty.rs | 51 ++-- src/rustc/middle/typeck.rs | 9 +- src/rustc/middle/typeck/check/method.rs | 72 +++-- src/rustc/middle/typeck/check/vtable.rs | 2 +- src/rustc/middle/typeck/coherence.rs | 295 +++++++++++++-------- src/rustc/middle/typeck/collect.rs | 19 +- src/test/run-pass/default-method-simple.rs | 23 ++ 22 files changed, 638 insertions(+), 300 deletions(-) create mode 100644 src/test/run-pass/default-method-simple.rs diff --git a/src/rustc/metadata/common.rs b/src/rustc/metadata/common.rs index 1857abf2cf2f9..972d48a613531 100644 --- a/src/rustc/metadata/common.rs +++ b/src/rustc/metadata/common.rs @@ -124,5 +124,7 @@ enum astencode_tag { // Reserves 0x50 -- 0x6f tag_table_legacy_boxed_trait = 0x63 } +const tag_item_trait_method_sort: uint = 0x70; + type link_meta = {name: ~str, vers: ~str, extras_hash: ~str}; diff --git a/src/rustc/metadata/csearch.rs b/src/rustc/metadata/csearch.rs index 5f5f938541f1d..ea6bd499a3b5e 100644 --- a/src/rustc/metadata/csearch.rs +++ b/src/rustc/metadata/csearch.rs @@ -23,6 +23,7 @@ export get_region_param; export get_enum_variants; export get_impls_for_mod; export get_trait_methods; +export get_provided_trait_methods; export get_method_names_if_trait; export get_item_attrs; export each_path; @@ -31,6 +32,12 @@ export get_impl_traits; export get_impl_method; export get_item_path; export maybe_get_item_ast, found_ast, found, found_parent, not_found; +export ProvidedTraitMethodInfo; + +struct ProvidedTraitMethodInfo { + ty: ty::method, + def_id: ast::def_id +} fn get_symbol(cstore: cstore::cstore, def: ast::def_id) -> ~str { let cdata = cstore::get_crate_data(cstore, def.crate).data; @@ -99,6 +106,13 @@ fn get_trait_methods(tcx: ty::ctxt, def: ast::def_id) -> @~[ty::method] { decoder::get_trait_methods(cstore.intr, cdata, def.node, tcx) } +fn get_provided_trait_methods(tcx: ty::ctxt, def: ast::def_id) -> + ~[ProvidedTraitMethodInfo] { + let cstore = tcx.cstore; + let cdata = cstore::get_crate_data(cstore, def.crate); + decoder::get_provided_trait_methods(cstore.intr, cdata, def.node, tcx) +} + fn get_method_names_if_trait(cstore: cstore::cstore, def: ast::def_id) -> Option<@DVec<(ast::ident, ast::self_ty_)>> { diff --git a/src/rustc/metadata/decoder.rs b/src/rustc/metadata/decoder.rs index e639449a8573f..f0911bd1aa27a 100644 --- a/src/rustc/metadata/decoder.rs +++ b/src/rustc/metadata/decoder.rs @@ -19,6 +19,7 @@ use syntax::diagnostic::span_handler; use common::*; use syntax::parse::token::ident_interner; use hash::{Hash, HashUtil}; +use csearch::ProvidedTraitMethodInfo; export class_dtor; export get_class_fields; @@ -40,6 +41,7 @@ export get_crate_hash; export get_crate_vers; export get_impls_for_mod; export get_trait_methods; +export get_provided_trait_methods; export get_method_names_if_trait; export get_item_attrs; export get_crate_module_paths; @@ -164,6 +166,13 @@ fn item_family(item: ebml::Doc) -> Family { } } +fn item_method_sort(item: ebml::Doc) -> char { + for ebml::tagged_docs(item, tag_item_trait_method_sort) |doc| { + return str::from_bytes(ebml::doc_data(doc))[0] as char; + } + return 'r'; +} + fn item_symbol(item: ebml::Doc) -> ~str { let sym = ebml::get_doc(item, tag_items_data_item_symbol); return str::from_bytes(ebml::doc_data(sym)); @@ -701,6 +710,7 @@ fn get_trait_methods(intr: @ident_interner, cdata: cmd, id: ast::node_id, let bounds = item_ty_param_bounds(mth, tcx, cdata); let name = item_name(intr, mth); let ty = doc_type(mth, tcx, cdata); + let def_id = item_def_id(mth, cdata); let fty = match ty::get(ty).sty { ty::ty_fn(f) => f, _ => { @@ -708,14 +718,52 @@ fn get_trait_methods(intr: @ident_interner, cdata: cmd, id: ast::node_id, ~"get_trait_methods: id has non-function type"); } }; let self_ty = get_self_ty(mth); - result.push({ident: name, tps: bounds, fty: fty, - self_ty: self_ty, - vis: ast::public}); + result.push({ident: name, tps: bounds, fty: fty, self_ty: self_ty, + vis: ast::public, def_id: def_id}); } debug!("get_trait_methods: }"); @result } +fn get_provided_trait_methods(intr: @ident_interner, cdata: cmd, + id: ast::node_id, tcx: ty::ctxt) -> + ~[ProvidedTraitMethodInfo] { + let data = cdata.data; + let item = lookup_item(id, data); + let mut result = ~[]; + + for ebml::tagged_docs(item, tag_item_trait_method) |mth| { + if item_method_sort(mth) != 'p' { loop; } + + let did = item_def_id(mth, cdata); + + let bounds = item_ty_param_bounds(mth, tcx, cdata); + let name = item_name(intr, mth); + let ty = doc_type(mth, tcx, cdata); + + let fty; + match ty::get(ty).sty { + ty::ty_fn(f) => fty = f, + _ => { + tcx.diag.handler().bug(~"get_provided_trait_methods(): id \ + has non-function type"); + } + } + + let self_ty = get_self_ty(mth); + let ty_method = {ident: name, tps: bounds, fty: fty, self_ty: self_ty, + vis: ast::public, def_id: did}; + let provided_trait_method_info = ProvidedTraitMethodInfo { + ty: ty_method, + def_id: did + }; + + vec::push(&mut result, move provided_trait_method_info); + } + + return move result; +} + // If the item in question is a trait, returns its set of methods and // their self types. Otherwise, returns none. This overlaps in an // annoying way with get_trait_methods. diff --git a/src/rustc/metadata/encoder.rs b/src/rustc/metadata/encoder.rs index 2fdd39a2ca483..652af81659a08 100644 --- a/src/rustc/metadata/encoder.rs +++ b/src/rustc/metadata/encoder.rs @@ -388,6 +388,12 @@ fn encode_self_type(ebml_w: ebml::Serializer, self_type: ast::self_ty_) { ebml_w.end_tag(); } +fn encode_method_sort(ebml_w: ebml::Serializer, sort: char) { + ebml_w.start_tag(tag_item_trait_method_sort); + ebml_w.writer.write(&[ sort as u8 ]); + ebml_w.end_tag(); +} + /* Returns an index of items in this class */ fn encode_info_for_class(ecx: @encode_ctxt, ebml_w: ebml::Serializer, id: node_id, path: ast_map::path, @@ -726,6 +732,8 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::Serializer, } } item_trait(tps, traits, ms) => { + let provided_methods = dvec::DVec(); + add_to_index(); ebml_w.start_tag(tag_items_data_item); encode_def_id(ebml_w, local_def(item.id)); @@ -746,12 +754,21 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::Serializer, encode_type(ecx, ebml_w, ty::mk_fn(tcx, mty.fty)); encode_family(ebml_w, purity_fn_family(mty.fty.meta.purity)); encode_self_type(ebml_w, mty.self_ty); + encode_method_sort(ebml_w, 'r'); ebml_w.end_tag(); } provided(m) => { - encode_info_for_method(ecx, ebml_w, path, - should_inline(m.attrs), item.id, - m, m.tps); + provided_methods.push(m); + + ebml_w.start_tag(tag_item_trait_method); + encode_def_id(ebml_w, local_def(m.id)); + encode_name(ecx, ebml_w, mty.ident); + encode_type_param_bounds(ebml_w, ecx, m.tps); + encode_type(ecx, ebml_w, ty::mk_fn(tcx, mty.fty)); + encode_family(ebml_w, purity_fn_family(mty.fty.meta.purity)); + encode_self_type(ebml_w, mty.self_ty); + encode_method_sort(ebml_w, 'p'); + ebml_w.end_tag(); } } i += 1u; @@ -785,7 +802,12 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::Serializer, ebml_w.end_tag(); } - + // Finally, output all the provided methods as items. + for provided_methods.each |m| { + index.push({val: m.id, pos: ebml_w.writer.tell()}); + encode_info_for_method(ecx, ebml_w, path, true, item.id, *m, + m.tps); + } } item_mac(*) => fail ~"item macros unimplemented" } diff --git a/src/rustc/middle/resolve.rs b/src/rustc/middle/resolve.rs index ba8e6c26274ab..4f170fd050be8 100644 --- a/src/rustc/middle/resolve.rs +++ b/src/rustc/middle/resolve.rs @@ -4680,6 +4680,9 @@ impl Resolver { } fn search_for_traits_containing_method(name: ident) -> @DVec { + debug!("(searching for traits containing method) looking for '%s'", + self.session.str_of(name)); + let found_traits = @DVec(); let mut search_module = self.current_module; loop { @@ -4687,8 +4690,8 @@ impl Resolver { match copy self.current_trait_refs { Some(trait_def_ids) => { for trait_def_ids.each |trait_def_id| { - self.add_trait_info_if_containing_method - (found_traits, *trait_def_id, name); + self.add_trait_info_if_containing_method( + found_traits, *trait_def_id, name); } } None => { @@ -4702,8 +4705,8 @@ impl Resolver { Some(def) => { match def.def { def_ty(trait_def_id) => { - self.add_trait_info_if_containing_method - (found_traits, trait_def_id, name); + self.add_trait_info_if_containing_method( + found_traits, trait_def_id, name); } _ => { // Continue. @@ -4730,8 +4733,8 @@ impl Resolver { match def.def { def_ty(trait_def_id) => { self. - add_trait_info_if_containing_method - (found_traits, trait_def_id, name); + add_trait_info_if_containing_method( + found_traits, trait_def_id, name); } _ => { // Continue. @@ -4766,6 +4769,12 @@ impl Resolver { trait_def_id: def_id, name: ident) { + debug!("(adding trait info if containing method) trying trait %d:%d \ + for method '%s'", + trait_def_id.crate, + trait_def_id.node, + self.session.str_of(name)); + match self.trait_info.find(trait_def_id) { Some(trait_info) if trait_info.contains_key(name) => { debug!("(adding trait info if containing method) found trait \ diff --git a/src/rustc/middle/trans/base.rs b/src/rustc/middle/trans/base.rs index 06d5b2f239e3c..93e8435d3e957 100644 --- a/src/rustc/middle/trans/base.rs +++ b/src/rustc/middle/trans/base.rs @@ -206,7 +206,7 @@ fn GEP_enum(bcx: block, llblobptr: ValueRef, enum_id: ast::def_id, assert ix < variant.args.len(); let arg_lltys = vec::map(variant.args, |aty| { - type_of(ccx, ty::subst_tps(ccx.tcx, ty_substs, *aty)) + type_of(ccx, ty::subst_tps(ccx.tcx, ty_substs, None, *aty)) }); let typed_blobptr = PointerCast(bcx, llblobptr, T_ptr(T_struct(arg_lltys))); @@ -385,16 +385,16 @@ fn get_res_dtor(ccx: @crate_ctxt, did: ast::def_id, let _icx = ccx.insn_ctxt("trans_res_dtor"); if (substs.is_not_empty()) { let did = if did.crate != ast::local_crate { - inline::maybe_instantiate_inline(ccx, did) + inline::maybe_instantiate_inline(ccx, did, true) } else { did }; assert did.crate == ast::local_crate; - monomorphize::monomorphic_fn(ccx, did, substs, None, None).val + monomorphize::monomorphic_fn(ccx, did, substs, None, None, None).val } else if did.crate == ast::local_crate { get_item_val(ccx, did.node) } else { let tcx = ccx.tcx; let name = csearch::get_symbol(ccx.sess.cstore, did); - let class_ty = ty::subst_tps(tcx, substs, + let class_ty = ty::subst_tps(tcx, substs, None, ty::lookup_item_type(tcx, parent_id).ty); let llty = type_of_dtor(ccx, class_ty); get_extern_fn(ccx.externs, ccx.llmod, name, lib::llvm::CCallConv, @@ -529,7 +529,8 @@ fn iter_structural_ty(cx: block, av: ValueRef, t: ty::t, let v_id = variant.id; for vec::each(fn_ty.sig.inputs) |a| { let llfldp_a = GEP_enum(cx, a_tup, tid, v_id, tps, j); - let ty_subst = ty::subst_tps(ccx.tcx, tps, a.ty); + // XXX: Is "None" right here? + let ty_subst = ty::subst_tps(ccx.tcx, tps, None, a.ty); cx = f(cx, llfldp_a, ty_subst); j += 1u; } @@ -1392,8 +1393,11 @@ fn mk_standard_basic_blocks(llfn: ValueRef) -> // - create_llargs_for_fn_args. // - new_fn_ctxt // - trans_args -fn new_fn_ctxt_w_id(ccx: @crate_ctxt, path: path, - llfndecl: ValueRef, id: ast::node_id, +fn new_fn_ctxt_w_id(ccx: @crate_ctxt, + path: path, + llfndecl: ValueRef, + id: ast::node_id, + impl_id: Option, param_substs: Option, sp: Option) -> fn_ctxt { let llbbs = mk_standard_basic_blocks(llfndecl); @@ -1410,6 +1414,7 @@ fn new_fn_ctxt_w_id(ccx: @crate_ctxt, path: path, lllocals: HashMap(), llupvars: HashMap(), id: id, + impl_id: impl_id, param_substs: param_substs, span: sp, path: path, @@ -1418,7 +1423,7 @@ fn new_fn_ctxt_w_id(ccx: @crate_ctxt, path: path, fn new_fn_ctxt(ccx: @crate_ctxt, path: path, llfndecl: ValueRef, sp: Option) -> fn_ctxt { - return new_fn_ctxt_w_id(ccx, path, llfndecl, -1, None, sp); + return new_fn_ctxt_w_id(ccx, path, llfndecl, -1, None, None, sp); } // NB: must keep 4 fns in sync: @@ -1561,6 +1566,7 @@ fn trans_closure(ccx: @crate_ctxt, path: path, decl: ast::fn_decl, ty_self: self_arg, param_substs: Option, id: ast::node_id, + impl_id: Option, maybe_load_env: fn(fn_ctxt), finish: fn(block)) { ccx.stats.n_closures += 1; @@ -1568,7 +1574,7 @@ fn trans_closure(ccx: @crate_ctxt, path: path, decl: ast::fn_decl, set_uwtable(llfndecl); // Set up arguments to the function. - let fcx = new_fn_ctxt_w_id(ccx, path, llfndecl, id, param_substs, + let fcx = new_fn_ctxt_w_id(ccx, path, llfndecl, id, impl_id, param_substs, Some(body.span)); let raw_llargs = create_llargs_for_fn_args(fcx, ty_self, decl.inputs); @@ -1620,14 +1626,15 @@ fn trans_fn(ccx: @crate_ctxt, llfndecl: ValueRef, ty_self: self_arg, param_substs: Option, - id: ast::node_id) { + id: ast::node_id, + impl_id: Option) { let do_time = ccx.sess.trans_stats(); let start = if do_time { time::get_time() } else { {sec: 0i64, nsec: 0i32} }; let _icx = ccx.insn_ctxt("trans_fn"); ccx.stats.n_fns += 1; trans_closure(ccx, path, decl, body, llfndecl, ty_self, - param_substs, id, + param_substs, id, impl_id, |fcx| { if ccx.sess.opts.extra_debuginfo { debuginfo::create_function(fcx); @@ -1654,7 +1661,7 @@ fn trans_enum_variant(ccx: @crate_ctxt, ty: varg.ty, ident: special_idents::arg, id: varg.id}); - let fcx = new_fn_ctxt_w_id(ccx, ~[], llfndecl, variant.node.id, + let fcx = new_fn_ctxt_w_id(ccx, ~[], llfndecl, variant.node.id, None, param_substs, None); let raw_llargs = create_llargs_for_fn_args(fcx, no_self, fn_args); let ty_param_substs = match param_substs { @@ -1704,7 +1711,7 @@ fn trans_class_dtor(ccx: @crate_ctxt, path: path, let mut class_ty = ty::lookup_item_type(tcx, parent_id).ty; /* Substitute in the class type if necessary */ do option::iter(&psubsts) |ss| { - class_ty = ty::subst_tps(tcx, ss.tys, class_ty); + class_ty = ty::subst_tps(tcx, ss.tys, ss.self_ty, class_ty); } /* The dtor takes a (null) output pointer, and a self argument, @@ -1724,7 +1731,7 @@ fn trans_class_dtor(ccx: @crate_ctxt, path: path, } /* Translate the dtor body */ trans_fn(ccx, path, ast_util::dtor_dec(), - body, lldecl, impl_self(class_ty), psubsts, dtor_id); + body, lldecl, impl_self(class_ty), psubsts, dtor_id, None); lldecl } @@ -1777,7 +1784,7 @@ fn trans_item(ccx: @crate_ctxt, item: ast::item) { let llfndecl = get_item_val(ccx, item.id); trans_fn(ccx, vec::append(*path, ~[path_name(item.ident)]), - decl, body, llfndecl, no_self, None, item.id); + decl, body, llfndecl, no_self, None, item.id, None); } else { for vec::each(body.node.stmts) |stmt| { match stmt.node { @@ -1789,48 +1796,8 @@ fn trans_item(ccx: @crate_ctxt, item: ast::item) { } } } - ast::item_impl(tps, trait_refs, self_ast_ty, ms) => { - meth::trans_impl(ccx, *path, item.ident, ms, tps, None); - - // Translate any methods that have provided implementations. - for trait_refs.each |trait_ref_ptr| { - let trait_def = ccx.tcx.def_map.get(trait_ref_ptr.ref_id); - - // XXX: Cross-crate default methods. - let trait_id = def_id_of_def(trait_def); - if trait_id.crate != ast::local_crate { - loop; - } - - // Get the self type. - let self_ty; - match ccx.tcx.ast_ty_to_ty_cache.get(self_ast_ty) { - ty::atttce_resolved(self_type) => self_ty = self_type, - ty::atttce_unresolved => { - ccx.tcx.sess.impossible_case(item.span, - ~"didn't cache self ast ty"); - } - } - - match ccx.tcx.items.get(trait_id.node) { - ast_map::node_item(trait_item, _) => { - match trait_item.node { - ast::item_trait(tps, _, trait_methods) => { - trans_trait(ccx, tps, trait_methods, path, - item.ident, self_ty); - } - _ => { - ccx.tcx.sess.impossible_case(item.span, - ~"trait item not a \ - trait"); - } - } - } - _ => { - ccx.tcx.sess.impossible_case(item.span, ~"no trait item"); - } - } - } + ast::item_impl(tps, _, _, ms) => { + meth::trans_impl(ccx, *path, item.ident, ms, tps, None, item.id); } ast::item_mod(m) => { trans_mod(ccx, m); @@ -1871,16 +1838,7 @@ fn trans_struct_def(ccx: @crate_ctxt, struct_def: @ast::struct_def, // If there are ty params, the ctor will get monomorphized // Translate methods - meth::trans_impl(ccx, *path, ident, struct_def.methods, tps, None); -} - -fn trans_trait(ccx: @crate_ctxt, tps: ~[ast::ty_param], - trait_methods: ~[ast::trait_method], - path: @ast_map::path, ident: ast::ident, - self_ty: ty::t) { - // Translate any methods that have provided implementations - let (_, provided_methods) = ast_util::split_trait_methods(trait_methods); - meth::trans_impl(ccx, *path, ident, provided_methods, tps, Some(self_ty)); + meth::trans_impl(ccx, *path, ident, struct_def.methods, tps, None, id); } // Translate a module. Doing this amounts to translating the items in the @@ -2035,7 +1993,7 @@ fn get_dtor_symbol(ccx: @crate_ctxt, path: path, id: ast::node_id, // this to item_symbols match substs { Some(ss) => { - let mono_ty = ty::subst_tps(ccx.tcx, ss.tys, t); + let mono_ty = ty::subst_tps(ccx.tcx, ss.tys, ss.self_ty, t); mangle_exported_name( ccx, vec::append(path, diff --git a/src/rustc/middle/trans/callee.rs b/src/rustc/middle/trans/callee.rs index aa998fb7b92d1..133f4647e683e 100644 --- a/src/rustc/middle/trans/callee.rs +++ b/src/rustc/middle/trans/callee.rs @@ -184,21 +184,31 @@ fn trans_fn_ref_with_vtables( // Polytype of the function item (may have type params) let fn_tpt = ty::lookup_item_type(tcx, def_id); + // Modify the def_id if this is a default method; we want to be + // monomorphizing the trait's code. + let (def_id, opt_impl_did) = + match tcx.provided_method_sources.find(def_id) { + None => (def_id, None), + Some(source) => (source.method_id, Some(source.impl_id)) + }; + // Check whether this fn has an inlined copy and, if so, redirect // def_id to the local id of the inlined copy. let def_id = { if def_id.crate != ast::local_crate { - inline::maybe_instantiate_inline(ccx, def_id) + let may_translate = opt_impl_did.is_none(); + inline::maybe_instantiate_inline(ccx, def_id, may_translate) } else { def_id } }; - // We must monomorphise if the fn has type parameters or is a rust - // intrinsic. In particular, if we see an intrinsic that is - // inlined from a different crate, we want to reemit the intrinsic - // instead of trying to call it in the other crate. - let must_monomorphise = type_params.len() > 0 || { + // We must monomorphise if the fn has type parameters, is a rust + // intrinsic, or is a default method. In particular, if we see an + // intrinsic that is inlined from a different crate, we want to reemit the + // intrinsic instead of trying to call it in the other crate. + let must_monomorphise = type_params.len() > 0 || + opt_impl_did.is_some() || { if def_id.crate == ast::local_crate { let map_node = session::expect( ccx.sess, @@ -222,7 +232,7 @@ fn trans_fn_ref_with_vtables( let mut {val, must_cast} = monomorphize::monomorphic_fn(ccx, def_id, type_params, - vtables, Some(ref_id)); + vtables, opt_impl_did, Some(ref_id)); if must_cast && ref_id != 0 { // Monotype of the REFERENCE to the function (type params // are subst'd) @@ -317,7 +327,9 @@ fn trans_rtcall_or_lang_call_with_type_params(bcx: block, match callee.data { Fn(fn_data) => { let substituted = ty::subst_tps(callee.bcx.tcx(), - type_params, fty); + type_params, + None, + fty); let mut llfnty = type_of::type_of(callee.bcx.ccx(), substituted); llfnty = T_ptr(struct_elt(llfnty, 0)); diff --git a/src/rustc/middle/trans/closure.rs b/src/rustc/middle/trans/closure.rs index 655efe75bf65c..3997076f98713 100644 --- a/src/rustc/middle/trans/closure.rs +++ b/src/rustc/middle/trans/closure.rs @@ -372,7 +372,7 @@ fn trans_expr_fn(bcx: block, let {llbox, cdata_ty, bcx} = build_closure(bcx, cap_vars, ck, ret_handle); trans_closure(ccx, sub_path, decl, body, llfn, no_self, - bcx.fcx.param_substs, id, |fcx| { + bcx.fcx.param_substs, id, None, |fcx| { load_environment(fcx, cdata_ty, cap_vars, ret_handle.is_some(), ck); }, |bcx| { @@ -395,7 +395,7 @@ fn trans_expr_fn(bcx: block, } ty::proto_bare => { trans_closure(ccx, sub_path, decl, body, llfn, no_self, None, - id, |_fcx| { }, |_bcx| { }); + id, None, |_fcx| { }, |_bcx| { }); rslt(bcx, C_null(T_opaque_box_ptr(ccx))) } ty::proto_vstore(ty::vstore_fixed(_)) => { diff --git a/src/rustc/middle/trans/common.rs b/src/rustc/middle/trans/common.rs index 61141f18336fa..0fa22dd65ba7b 100644 --- a/src/rustc/middle/trans/common.rs +++ b/src/rustc/middle/trans/common.rs @@ -181,9 +181,12 @@ struct ValSelfData { enum local_val { local_mem(ValueRef), local_imm(ValueRef), } +// Here `self_ty` is the real type of the self parameter to this method. It +// will only be set in the case of default methods. type param_substs = {tys: ~[ty::t], vtables: Option, - bounds: @~[ty::param_bounds]}; + bounds: @~[ty::param_bounds], + self_ty: Option}; fn param_substs_to_str(tcx: ty::ctxt, substs: ¶m_substs) -> ~str { fmt!("param_substs {tys:%?, vtables:%?, bounds:%?}", @@ -220,6 +223,10 @@ type fn_ctxt = @{ mut llreturn: BasicBlockRef, // The 'self' value currently in use in this function, if there // is one. + // + // NB: This is the type of the self *variable*, not the self *type*. The + // self type is set only for default methods, while the self variable is + // set for all methods. mut llself: Option, // The a value alloca'd for calls to upcalls.rust_personality. Used when // outputting the resume instruction. @@ -240,6 +247,9 @@ type fn_ctxt = @{ // a user-defined function. id: ast::node_id, + // The def_id of the impl we're inside, or None if we aren't inside one. + impl_id: Option, + // If this function is being monomorphized, this contains the type // substitutions used. param_substs: Option, @@ -1110,7 +1120,11 @@ enum mono_param_id { datum::DatumMode), } -type mono_id_ = {def: ast::def_id, params: ~[mono_param_id]}; +type mono_id_ = { + def: ast::def_id, + params: ~[mono_param_id], + impl_did_opt: Option +}; type mono_id = @mono_id_; @@ -1193,7 +1207,9 @@ fn path_str(sess: session::session, p: path) -> ~str { fn monomorphize_type(bcx: block, t: ty::t) -> ty::t { match bcx.fcx.param_substs { - Some(substs) => ty::subst_tps(bcx.tcx(), substs.tys, t), + Some(substs) => { + ty::subst_tps(bcx.tcx(), substs.tys, substs.self_ty, t) + } _ => { assert !ty::type_has_params(t); t } } } @@ -1213,7 +1229,9 @@ fn node_id_type_params(bcx: block, id: ast::node_id) -> ~[ty::t] { let params = ty::node_id_to_type_params(tcx, id); match bcx.fcx.param_substs { Some(substs) => { - vec::map(params, |t| ty::subst_tps(tcx, substs.tys, *t)) + do vec::map(params) |t| { + ty::subst_tps(tcx, substs.tys, substs.self_ty, *t) + } } _ => params } @@ -1241,7 +1259,9 @@ fn resolve_vtable_in_fn_ctxt(fcx: fn_ctxt, vt: typeck::vtable_origin) typeck::vtable_static(trait_id, tys, sub) => { let tys = match fcx.param_substs { Some(substs) => { - vec::map(tys, |t| ty::subst_tps(tcx, substs.tys, *t)) + do vec::map(tys) |t| { + ty::subst_tps(tcx, substs.tys, substs.self_ty, *t) + } } _ => tys }; diff --git a/src/rustc/middle/trans/expr.rs b/src/rustc/middle/trans/expr.rs index 333d76a91ee68..08abe986be6ff 100644 --- a/src/rustc/middle/trans/expr.rs +++ b/src/rustc/middle/trans/expr.rs @@ -793,7 +793,9 @@ fn trans_local_var(bcx: block, def: ast::def) -> Datum { // This cast should not be necessary. We should cast self *once*, // but right now this conflicts with default methods. - let llselfty = T_ptr(type_of::type_of(bcx.ccx(), self_info.t)); + let real_self_ty = monomorphize_type(bcx, self_info.t); + let llselfty = T_ptr(type_of::type_of(bcx.ccx(), real_self_ty)); + let casted_val = PointerCast(bcx, self_info.v, llselfty); Datum { val: casted_val, diff --git a/src/rustc/middle/trans/foreign.rs b/src/rustc/middle/trans/foreign.rs index dbf5ef810462f..5a6260ae27008 100644 --- a/src/rustc/middle/trans/foreign.rs +++ b/src/rustc/middle/trans/foreign.rs @@ -794,7 +794,7 @@ fn trans_intrinsic(ccx: @crate_ctxt, decl: ValueRef, item: @ast::foreign_item, { debug!("trans_intrinsic(item.ident=%s)", ccx.sess.str_of(item.ident)); - let fcx = new_fn_ctxt_w_id(ccx, path, decl, item.id, + let fcx = new_fn_ctxt_w_id(ccx, path, decl, item.id, None, Some(substs), Some(item.span)); let mut bcx = top_scope_block(fcx, None), lltop = bcx.llbb; match ccx.sess.str_of(item.ident) { @@ -1025,7 +1025,7 @@ fn trans_foreign_fn(ccx: @crate_ctxt, path: ast_map::path, decl: ast::fn_decl, ))); let llty = type_of_fn_from_ty(ccx, t); let llfndecl = decl_internal_cdecl_fn(ccx.llmod, ps, llty); - trans_fn(ccx, path, decl, body, llfndecl, no_self, None, id); + trans_fn(ccx, path, decl, body, llfndecl, no_self, None, id, None); return llfndecl; } diff --git a/src/rustc/middle/trans/inline.rs b/src/rustc/middle/trans/inline.rs index ce9088d4b55c3..d3cc23094ee87 100644 --- a/src/rustc/middle/trans/inline.rs +++ b/src/rustc/middle/trans/inline.rs @@ -5,9 +5,12 @@ use syntax::ast_map::{path, path_mod, path_name}; use base::{trans_item, get_item_val, self_arg, trans_fn, impl_self, get_insn_ctxt}; -fn maybe_instantiate_inline(ccx: @crate_ctxt, fn_id: ast::def_id) - -> ast::def_id -{ +// `translate` will be true if this function is allowed to translate the +// item and false otherwise. Currently, this parameter is set to false when +// translating default methods. +fn maybe_instantiate_inline(ccx: @crate_ctxt, fn_id: ast::def_id, + translate: bool) + -> ast::def_id { let _icx = ccx.insn_ctxt("maybe_instantiate_inline"); match ccx.external.find(fn_id) { Some(Some(node_id)) => { @@ -31,7 +34,7 @@ fn maybe_instantiate_inline(ccx: @crate_ctxt, fn_id: ast::def_id) csearch::found(ast::ii_item(item)) => { ccx.external.insert(fn_id, Some(item.id)); ccx.stats.n_inlines += 1; - trans_item(ccx, *item); + if translate { trans_item(ccx, *item); } local_def(item.id) } csearch::found(ast::ii_foreign(item)) => { @@ -53,7 +56,7 @@ fn maybe_instantiate_inline(ccx: @crate_ctxt, fn_id: ast::def_id) _ => ccx.sess.bug(~"maybe_instantiate_inline: item has a \ non-enum parent") } - trans_item(ccx, *item); + if translate { trans_item(ccx, *item); } local_def(my_id) } csearch::found_parent(_, _) => { @@ -65,13 +68,14 @@ fn maybe_instantiate_inline(ccx: @crate_ctxt, fn_id: ast::def_id) ccx.external.insert(fn_id, Some(mth.id)); let {bounds: impl_bnds, region_param: _, ty: impl_ty} = ty::lookup_item_type(ccx.tcx, impl_did); - if (*impl_bnds).len() + mth.tps.len() == 0u { + if translate && (*impl_bnds).len() + mth.tps.len() == 0u { let llfn = get_item_val(ccx, mth.id); let path = vec::append( ty::item_path(ccx.tcx, impl_did), ~[path_name(mth.ident)]); trans_fn(ccx, path, mth.decl, mth.body, - llfn, impl_self(impl_ty), None, mth.id); + llfn, impl_self(impl_ty), None, mth.id, + Some(impl_did)); } local_def(mth.id) } @@ -83,3 +87,4 @@ fn maybe_instantiate_inline(ccx: @crate_ctxt, fn_id: ast::def_id) } } } + diff --git a/src/rustc/middle/trans/meth.rs b/src/rustc/middle/trans/meth.rs index 96cf7fabd1f76..89b101ab87598 100644 --- a/src/rustc/middle/trans/meth.rs +++ b/src/rustc/middle/trans/meth.rs @@ -28,7 +28,7 @@ see `trans::base::lval_static_fn()` or `trans::base::monomorphic_fn()`. */ fn trans_impl(ccx: @crate_ctxt, path: path, name: ast::ident, methods: ~[@ast::method], tps: ~[ast::ty_param], - self_ty: Option) { + self_ty: Option, id: ast::node_id) { let _icx = ccx.insn_ctxt("impl::trans_impl"); if tps.len() > 0u { return; } let sub_path = vec::append_one(path, path_name(name)); @@ -36,7 +36,22 @@ fn trans_impl(ccx: @crate_ctxt, path: path, name: ast::ident, if method.tps.len() == 0u { let llfn = get_item_val(ccx, method.id); let path = vec::append_one(sub_path, path_name(method.ident)); - trans_method(ccx, path, *method, None, self_ty, llfn); + + let param_substs_opt; + match self_ty { + None => param_substs_opt = None, + Some(self_ty) => { + param_substs_opt = Some({ + tys: ~[], + vtables: None, + bounds: @~[], + self_ty: Some(self_ty) + }); + } + } + + trans_method(ccx, path, *method, param_substs_opt, self_ty, llfn, + ast_util::local_def(id)); } } } @@ -54,13 +69,15 @@ Translates a (possibly monomorphized) method body. will be none if this is not a default method and must always be present if this is a default method. - `llfn`: the LLVM ValueRef for the method +- `impl_id`: the node ID of the impl this method is inside */ fn trans_method(ccx: @crate_ctxt, path: path, method: &ast::method, param_substs: Option, base_self_ty: Option, - llfn: ValueRef) { + llfn: ValueRef, + impl_id: ast::def_id) { // figure out how self is being passed let self_arg = match method.self_ty.node { @@ -76,8 +93,10 @@ fn trans_method(ccx: @crate_ctxt, Some(provided_self_ty) => self_ty = provided_self_ty } let self_ty = match param_substs { - None => self_ty, - Some({tys: ref tys, _}) => ty::subst_tps(ccx.tcx, *tys, self_ty) + None => self_ty, + Some({tys: ref tys, _}) => { + ty::subst_tps(ccx.tcx, *tys, None, self_ty) + } }; match method.self_ty.node { ast::sty_value => { @@ -98,15 +117,20 @@ fn trans_method(ccx: @crate_ctxt, llfn, self_arg, param_substs, - method.id); + method.id, + Some(impl_id)); } -fn trans_self_arg(bcx: block, base: @ast::expr, +fn trans_self_arg(bcx: block, + base: @ast::expr, mentry: typeck::method_map_entry) -> Result { let _icx = bcx.insn_ctxt("impl::trans_self_arg"); let mut temp_cleanups = ~[]; + + // Compute the mode and type of self. let self_arg = {mode: mentry.self_arg.mode, ty: monomorphize_type(bcx, mentry.self_arg.ty)}; + let result = trans_arg_expr(bcx, self_arg, base, &mut temp_cleanups, None, DontAutorefArg); @@ -120,11 +144,31 @@ fn trans_self_arg(bcx: block, base: @ast::expr, } fn trans_method_callee(bcx: block, callee_id: ast::node_id, - self: @ast::expr, mentry: typeck::method_map_entry) - -> Callee -{ + self: @ast::expr, mentry: typeck::method_map_entry) -> + Callee { let _icx = bcx.insn_ctxt("impl::trans_method_callee"); - match mentry.origin { + + // Replace method_self with method_static here. + let mut origin = mentry.origin; + match origin { + typeck::method_self(copy trait_id, copy method_index) => { + // Get the ID of the impl we're inside. + let impl_def_id = bcx.fcx.impl_id.get(); + + io::println(fmt!("impl_def_id is %?", impl_def_id)); + + // Get the ID of the method we're calling. + let method_name = + ty::trait_methods(bcx.tcx(), trait_id)[method_index].ident; + let method_id = method_with_name(bcx.ccx(), impl_def_id, + method_name); + origin = typeck::method_static(method_id); + } + typeck::method_static(*) | typeck::method_param(*) | + typeck::method_trait(*) => {} + } + + match origin { typeck::method_static(did) => { let callee_fn = callee::trans_fn_ref(bcx, did, callee_id); let Result {bcx, val} = trans_self_arg(bcx, self, mentry); @@ -155,7 +199,7 @@ fn trans_method_callee(bcx: block, callee_id: ast::node_id, trans_trait_callee(bcx, callee_id, off, self, vstore) } typeck::method_self(*) => { - bcx.tcx().sess.span_bug(self.span, ~"self method call"); + fail ~"method_self should have been handled above" } } } @@ -519,13 +563,21 @@ fn vtable_id(ccx: @crate_ctxt, origin: typeck::vtable_origin) -> mono_id { match origin { typeck::vtable_static(impl_id, substs, sub_vtables) => { monomorphize::make_mono_id( - ccx, impl_id, substs, - if (*sub_vtables).len() == 0u { None } - else { Some(sub_vtables) }, None) + ccx, + impl_id, + substs, + if (*sub_vtables).len() == 0u { + None + } else { + Some(sub_vtables) + }, + None, + None) } typeck::vtable_trait(trait_id, substs) => { @{def: trait_id, - params: vec::map(substs, |t| mono_precise(*t, None))} + params: vec::map(substs, |t| mono_precise(*t, None)), + impl_did_opt: None} } // can't this be checked at the callee? _ => fail ~"vtable_id" @@ -571,7 +623,7 @@ fn make_impl_vtable(ccx: @crate_ctxt, impl_id: ast::def_id, substs: ~[ty::t], let has_tps = (*ty::lookup_item_type(ccx.tcx, impl_id).bounds).len() > 0u; make_vtable(ccx, vec::map(*ty::trait_methods(tcx, trt_id), |im| { - let fty = ty::subst_tps(tcx, substs, ty::mk_fn(tcx, im.fty)); + let fty = ty::subst_tps(tcx, substs, None, ty::mk_fn(tcx, im.fty)); if (*im.tps).len() > 0u || ty::type_has_self(fty) { C_null(T_ptr(T_nil())) } else { @@ -580,10 +632,11 @@ fn make_impl_vtable(ccx: @crate_ctxt, impl_id: ast::def_id, substs: ~[ty::t], // If the method is in another crate, need to make an inlined // copy first if m_id.crate != ast::local_crate { - m_id = inline::maybe_instantiate_inline(ccx, m_id); + // XXX: Set impl ID here? + m_id = inline::maybe_instantiate_inline(ccx, m_id, true); } monomorphize::monomorphic_fn(ccx, m_id, substs, - Some(vtables), None).val + Some(vtables), None, None).val } else if m_id.crate == ast::local_crate { get_item_val(ccx, m_id.node) } else { diff --git a/src/rustc/middle/trans/monomorphize.rs b/src/rustc/middle/trans/monomorphize.rs index 914b733df198d..f6763206e18ce 100644 --- a/src/rustc/middle/trans/monomorphize.rs +++ b/src/rustc/middle/trans/monomorphize.rs @@ -16,9 +16,9 @@ fn monomorphic_fn(ccx: @crate_ctxt, fn_id: ast::def_id, real_substs: ~[ty::t], vtables: Option, - ref_id: Option) - -> {val: ValueRef, must_cast: bool} -{ + impl_did_opt: Option, + ref_id: Option) -> + {val: ValueRef, must_cast: bool} { let _icx = ccx.insn_ctxt("monomorphic_fn"); let mut must_cast = false; let substs = vec::map(real_substs, |t| { @@ -31,7 +31,8 @@ fn monomorphic_fn(ccx: @crate_ctxt, for real_substs.each() |s| { assert !ty::type_has_params(*s); } for substs.each() |s| { assert !ty::type_has_params(*s); } let param_uses = type_use::type_uses_for(ccx, fn_id, substs.len()); - let hash_id = make_mono_id(ccx, fn_id, substs, vtables, Some(param_uses)); + let hash_id = make_mono_id(ccx, fn_id, substs, vtables, impl_did_opt, + Some(param_uses)); if vec::any(hash_id.params, |p| match *p { mono_precise(_, _) => false, _ => true }) { must_cast = true; @@ -73,8 +74,11 @@ fn monomorphic_fn(ccx: @crate_ctxt, } ast_map::node_dtor(_, dtor, _, pt) => (pt, special_idents::dtor, dtor.span), - ast_map::node_trait_method(*) => { - ccx.tcx.sess.bug(~"Can't monomorphize a trait method") + ast_map::node_trait_method(@ast::provided(m), _, pt) => { + (pt, m.ident, m.span) + } + ast_map::node_trait_method(@ast::required(_), _, _) => { + ccx.tcx.sess.bug(~"Can't monomorphize a required trait method") } ast_map::node_expr(*) => { ccx.tcx.sess.bug(~"Can't monomorphize an expr") @@ -93,7 +97,18 @@ fn monomorphic_fn(ccx: @crate_ctxt, ccx.tcx.sess.bug(~"Can't monomorphize a local") } }; - let mono_ty = ty::subst_tps(ccx.tcx, substs, llitem_ty); + + // Look up the impl type if we're translating a default method. + // XXX: Generics. + let impl_ty_opt; + match impl_did_opt { + None => impl_ty_opt = None, + Some(impl_did) => { + impl_ty_opt = Some(ty::lookup_item_type(ccx.tcx, impl_did).ty); + } + } + + let mono_ty = ty::subst_tps(ccx.tcx, substs, impl_ty_opt, llitem_ty); let llfty = type_of_fn_from_ty(ccx, mono_ty); ccx.stats.n_monos += 1; @@ -118,12 +133,18 @@ fn monomorphic_fn(ccx: @crate_ctxt, lldecl }; - let psubsts = Some({tys: substs, vtables: vtables, bounds: tpt.bounds}); + let psubsts = Some({ + tys: substs, + vtables: vtables, + bounds: tpt.bounds, + self_ty: impl_ty_opt + }); + let lldecl = match map_node { ast_map::node_item(i@@{node: ast::item_fn(decl, _, _, body), _}, _) => { let d = mk_lldecl(); set_inline_hint_if_appr(i.attrs, d); - trans_fn(ccx, pt, decl, body, d, no_self, psubsts, fn_id.node); + trans_fn(ccx, pt, decl, body, d, no_self, psubsts, fn_id.node, None); d } ast_map::node_item(*) => { @@ -154,11 +175,19 @@ fn monomorphic_fn(ccx: @crate_ctxt, } d } - ast_map::node_method(mth, _, _) => { + ast_map::node_method(mth, supplied_impl_did, _) => { // XXX: What should the self type be here? let d = mk_lldecl(); set_inline_hint_if_appr(mth.attrs, d); - meth::trans_method(ccx, pt, mth, psubsts, None, d); + + // Override the impl def ID if necessary. + let impl_did; + match impl_did_opt { + None => impl_did = supplied_impl_did, + Some(override_impl_did) => impl_did = override_impl_did + } + + meth::trans_method(ccx, pt, mth, psubsts, None, d, impl_did); d } ast_map::node_dtor(_, dtor, _, pt) => { @@ -171,6 +200,15 @@ fn monomorphic_fn(ccx: @crate_ctxt, trans_class_dtor(ccx, *pt, dtor.node.body, dtor.node.id, psubsts, Some(hash_id), parent_id) } + ast_map::node_trait_method(@ast::provided(mth), _, pt) => { + let d = mk_lldecl(); + set_inline_hint_if_appr(mth.attrs, d); + io::println(fmt!("monomorphic_fn impl_did_opt is %?", impl_did_opt)); + meth::trans_method(ccx, *pt, mth, psubsts, None, d, + impl_did_opt.get()); + d + } + // Ugh -- but this ensures any new variants won't be forgotten ast_map::node_expr(*) | ast_map::node_stmt(*) | @@ -226,6 +264,7 @@ fn normalize_for_monomorphization(tcx: ty::ctxt, ty: ty::t) -> Option { fn make_mono_id(ccx: @crate_ctxt, item: ast::def_id, substs: ~[ty::t], vtables: Option, + impl_did_opt: Option, param_uses: Option<~[type_use::type_uses]>) -> mono_id { let precise_param_ids = match vtables { Some(vts) => { @@ -295,5 +334,5 @@ fn make_mono_id(ccx: @crate_ctxt, item: ast::def_id, substs: ~[ty::t], }) } }; - @{def: item, params: param_ids} + @{def: item, params: param_ids, impl_did_opt: impl_did_opt} } diff --git a/src/rustc/middle/trans/type_use.rs b/src/rustc/middle/trans/type_use.rs index 1f9ad20dd03e7..8ccc8a28de3a2 100644 --- a/src/rustc/middle/trans/type_use.rs +++ b/src/rustc/middle/trans/type_use.rs @@ -40,8 +40,13 @@ fn type_uses_for(ccx: @crate_ctxt, fn_id: def_id, n_tps: uint) Some(uses) => return uses, None => () } - let fn_id_loc = if fn_id.crate == local_crate { fn_id } - else { inline::maybe_instantiate_inline(ccx, fn_id) }; + + let fn_id_loc = if fn_id.crate == local_crate { + fn_id + } else { + inline::maybe_instantiate_inline(ccx, fn_id, true) + }; + // Conservatively assume full use for recursive loops ccx.type_use_cache.insert(fn_id, vec::from_elem(n_tps, 3u)); diff --git a/src/rustc/middle/ty.rs b/src/rustc/middle/ty.rs index cc132a431a330..a0ca46ee01782 100644 --- a/src/rustc/middle/ty.rs +++ b/src/rustc/middle/ty.rs @@ -19,6 +19,7 @@ use syntax::ast::*; use syntax::print::pprust::*; use util::ppaux::{ty_to_str, proto_ty_to_str, tys_to_str}; +export ProvidedMethodSource; export TyVid, IntVid, FnVid, RegionVid, vid; export br_hashmap; export is_instantiable; @@ -207,7 +208,8 @@ type method = {ident: ast::ident, tps: @~[param_bounds], fty: FnTy, self_ty: ast::self_ty_, - vis: ast::visibility}; + vis: ast::visibility, + def_id: ast::def_id}; type mt = {ty: t, mutbl: ast::mutability}; @@ -314,6 +316,11 @@ enum AutoRefKind { AutoPtr } +struct ProvidedMethodSource { + method_id: ast::def_id, + impl_id: ast::def_id +} + type ctxt = @{diag: syntax::diagnostic::span_handler, interner: HashMap, @@ -356,7 +363,8 @@ type ctxt = adjustments: HashMap, normalized_cache: HashMap, lang_items: middle::lang_items::LanguageItems, - legacy_boxed_traits: HashMap}; + legacy_boxed_traits: HashMap, + provided_method_sources: HashMap}; enum tbox_flag { has_params = 1, @@ -879,7 +887,8 @@ fn mk_ctxt(s: session::session, adjustments: HashMap(), normalized_cache: new_ty_hash(), lang_items: move lang_items, - legacy_boxed_traits: HashMap()} + legacy_boxed_traits: HashMap(), + provided_method_sources: HashMap()} } @@ -1392,13 +1401,23 @@ fn fold_region(cx: ctxt, t0: t, fldop: fn(region, bool) -> region) -> t { } // Substitute *only* type parameters. Used in trans where regions are erased. -fn subst_tps(cx: ctxt, tps: &[t], typ: t) -> t { - if tps.len() == 0u { return typ; } +fn subst_tps(cx: ctxt, tps: &[t], self_ty_opt: Option, typ: t) -> t { + if tps.len() == 0u && self_ty_opt.is_none() { return typ; } let tb = ty::get(typ); - if !tbox_has_flag(tb, has_params) { return typ; } + if self_ty_opt.is_none() && !tbox_has_flag(tb, has_params) { return typ; } match tb.sty { - ty_param(p) => tps[p.idx], - ref sty => fold_sty_to_ty(cx, sty, |t| subst_tps(cx, tps, t)) + ty_param(p) => tps[p.idx], + ty_self => { + match self_ty_opt { + None => cx.sess.bug(~"ty_self unexpected here"), + Some(self_ty) => { + subst_tps(cx, tps, self_ty_opt, self_ty) + } + } + } + ref sty => { + fold_sty_to_ty(cx, sty, |t| subst_tps(cx, tps, self_ty_opt, t)) + } } } @@ -3328,20 +3347,18 @@ fn store_trait_methods(cx: ctxt, id: ast::node_id, ms: @~[method]) { cx.trait_method_cache.insert(ast_util::local_def(id), ms); } -fn provided_trait_methods(cx: ctxt, id: ast::def_id) -> ~[@ast::method] { +fn provided_trait_methods(cx: ctxt, id: ast::def_id) -> ~[ast::ident] { if is_local(id) { match cx.items.find(id.node) { Some(ast_map::node_item(@{node: item_trait(_, _, ms),_}, _)) => match ast_util::split_trait_methods(ms) { - (_, p) => p + (_, p) => p.map(|method| method.ident) }, _ => cx.sess.bug(fmt!("provided_trait_methods: %? is not a trait", id)) } - } - else { - // FIXME #2794: default methods for traits don't work cross-crate - ~[] + } else { + csearch::get_provided_trait_methods(cx, id).map(|info| info.ty.ident) } } @@ -3599,10 +3616,12 @@ fn enum_variant_with_id(cx: ctxt, enum_id: ast::def_id, // the type cache. Returns the type parameters and type. fn lookup_item_type(cx: ctxt, did: ast::def_id) -> ty_param_bounds_and_ty { match cx.tcache.find(did) { - Some(tpt) => return tpt, - None => { + Some(tpt) => { // The item is in this crate. The caller should have added it to the // type cache already + return tpt; + } + None => { assert did.crate != ast::local_crate; let tyt = csearch::get_type(cx, did); cx.tcache.insert(did, tyt); diff --git a/src/rustc/middle/typeck.rs b/src/rustc/middle/typeck.rs index 7cb04bc0ea3c1..077d34700b8d4 100644 --- a/src/rustc/middle/typeck.rs +++ b/src/rustc/middle/typeck.rs @@ -62,6 +62,7 @@ use util::ppaux::{ty_to_str, tys_to_str, region_to_str, use util::common::{indent, indenter}; use std::list; use list::{List, Nil, Cons}; +use dvec::DVec; export check_crate; export infer; @@ -174,12 +175,6 @@ impl vtable_origin { type vtable_map = HashMap; -// Stores information about provided methods, aka "default methods" in traits. -// Maps from a trait's def_id to a MethodInfo about -// that method in that trait. -type provided_methods_map = HashMap; - type ty_param_substs_and_ty = {substs: ty::substs, ty: ty::t}; type crate_ctxt_ = {// A mapping from method call sites to traits that have @@ -188,7 +183,6 @@ type crate_ctxt_ = {// A mapping from method call sites to traits that have method_map: method_map, vtable_map: vtable_map, coherence_info: @coherence::CoherenceInfo, - provided_methods_map: provided_methods_map, tcx: ty::ctxt}; enum crate_ctxt { @@ -340,7 +334,6 @@ fn check_crate(tcx: ty::ctxt, method_map: std::map::HashMap(), vtable_map: std::map::HashMap(), coherence_info: @coherence::CoherenceInfo(), - provided_methods_map: std::map::HashMap(), tcx: tcx}); collect::collect_item_types(ccx, crate); coherence::check_coherence(ccx, crate); diff --git a/src/rustc/middle/typeck/check/method.rs b/src/rustc/middle/typeck/check/method.rs index 0d71d61bdaadb..04be004754828 100644 --- a/src/rustc/middle/typeck/check/method.rs +++ b/src/rustc/middle/typeck/check/method.rs @@ -69,7 +69,7 @@ obtained the type `Foo`, we would never match this method. */ -use coherence::get_base_type_def_id; +use coherence::{ProvidedMethodInfo, get_base_type_def_id}; use middle::resolve::{Impl, MethodInfo}; use middle::ty::*; use syntax::ast::{def_id, sty_by_ref, sty_value, sty_region, sty_box, @@ -146,7 +146,7 @@ impl LookupContext { // Prepare the list of candidates self.push_inherent_candidates(self_ty); - self.push_extension_candidates(); + self.push_extension_candidates(self_ty); let enum_dids = DVec(); let mut self_ty = self_ty; @@ -251,7 +251,7 @@ impl LookupContext { } } - fn push_extension_candidates(&self) { + fn push_extension_candidates(&self, self_ty: ty::t) { // If the method being called is associated with a trait, then // find all the impls of that trait. Each of those are // candidates. @@ -259,6 +259,8 @@ impl LookupContext { for opt_applicable_traits.each |applicable_traits| { for applicable_traits.each |trait_did| { let coherence_info = self.fcx.ccx.coherence_info; + + // Look for explicit implementations. let opt_impl_infos = coherence_info.extension_methods.find(*trait_did); for opt_impl_infos.each |impl_infos| { @@ -267,12 +269,21 @@ impl LookupContext { &self.extension_candidates, *impl_info); } } + + // Look for default methods. + match coherence_info.provided_methods.find(*trait_did) { + Some(methods) => { + self.push_candidates_from_provided_methods( + &self.extension_candidates, self_ty, *trait_did, + methods); + } + None => {} + } } } } - fn push_inherent_candidates_from_param(&self, param_ty: param_ty) - { + fn push_inherent_candidates_from_param(&self, param_ty: param_ty) { debug!("push_inherent_candidates_from_param(param_ty=%?)", param_ty); let _indenter = indenter(); @@ -348,8 +359,7 @@ impl LookupContext { self_ty: ty::t, did: def_id, substs: &ty::substs, - vstore: ty::vstore) - { + vstore: ty::vstore) { debug!("push_inherent_candidates_from_trait(did=%s, substs=%s)", self.did_to_str(did), substs_to_str(self.tcx(), substs)); @@ -423,8 +433,7 @@ impl LookupContext { }); } - fn push_inherent_impl_candidates_for_type(did: def_id) - { + fn push_inherent_impl_candidates_for_type(did: def_id) { let opt_impl_infos = self.fcx.ccx.coherence_info.inherent_methods.find(did); for opt_impl_infos.each |impl_infos| { @@ -436,8 +445,7 @@ impl LookupContext { } fn push_candidates_from_impl(&self, candidates: &DVec, - impl_info: &resolve::Impl) - { + impl_info: &resolve::Impl) { if !self.impl_dups.insert(impl_info.did, ()) { return; // already visited } @@ -471,12 +479,47 @@ impl LookupContext { }); } + fn push_candidates_from_provided_methods( + &self, + candidates: &DVec, + self_ty: ty::t, + trait_def_id: def_id, + methods: @DVec<@ProvidedMethodInfo>) { + debug!("(pushing candidates from provided methods) considering trait \ + id %d:%d", + trait_def_id.crate, + trait_def_id.node); + + for methods.each |provided_method_info| { + if provided_method_info.method_info.ident != self.m_name { loop; } + + debug!("(pushing candidates from provided methods) adding \ + candidate"); + + // XXX: Needs to support generics. + let dummy_substs = { self_r: None, self_ty: None, tps: ~[] }; + let (impl_ty, impl_substs) = + self.create_rcvr_ty_and_substs_for_method( + provided_method_info.method_info.self_type, + self_ty, + dummy_substs); + + candidates.push(Candidate { + rcvr_ty: impl_ty, + rcvr_substs: move impl_substs, + num_method_tps: provided_method_info.method_info.n_tps, + self_mode: get_mode_from_self_type( + provided_method_info.method_info.self_type), + origin: method_static(provided_method_info.method_info.did) + }); + } + } + fn create_rcvr_ty_and_substs_for_method(&self, self_decl: ast::self_ty_, self_ty: ty::t, +self_substs: ty::substs) - -> (ty::t, ty::substs) - { + -> (ty::t, ty::substs) { // If the self type includes a region (like &self), we need to // ensure that the receiver substitutions have a self region. // If the receiver type does not itself contain borrowed @@ -693,8 +736,7 @@ impl LookupContext { fn confirm_candidate(&self, self_ty: ty::t, candidate: &Candidate) - -> method_map_entry - { + -> method_map_entry { let tcx = self.tcx(); let fty = self.fn_ty_from_origin(&candidate.origin); diff --git a/src/rustc/middle/typeck/check/vtable.rs b/src/rustc/middle/typeck/check/vtable.rs index 00fb134f2be55..345b8246b4278 100644 --- a/src/rustc/middle/typeck/check/vtable.rs +++ b/src/rustc/middle/typeck/check/vtable.rs @@ -444,7 +444,7 @@ fn connect_trait_tps(fcx: @fn_ctxt, expr: @ast::expr, impl_tys: ~[ty::t], // XXX: This should work for multiple traits. let ity = ty::impl_traits(tcx, impl_did, vstore)[0]; - let trait_ty = ty::subst_tps(tcx, impl_tys, ity); + let trait_ty = ty::subst_tps(tcx, impl_tys, None, ity); debug!("(connect trait tps) trait type is %?, impl did is %?", ty::get(trait_ty).sty, impl_did); match ty::get(trait_ty).sty { diff --git a/src/rustc/middle/typeck/coherence.rs b/src/rustc/middle/typeck/coherence.rs index 89cd696eb6fbd..9a9a8dda6e4d8 100644 --- a/src/rustc/middle/typeck/coherence.rs +++ b/src/rustc/middle/typeck/coherence.rs @@ -4,12 +4,13 @@ // has at most one implementation for each type. Then we build a mapping from // each trait in the system to its implementations. -use metadata::csearch::{each_path, get_impl_traits, get_impls_for_mod}; +use metadata::csearch::{ProvidedTraitMethodInfo, each_path, get_impl_traits}; +use metadata::csearch::{get_impls_for_mod}; use metadata::cstore::{cstore, iter_crate_data}; use metadata::decoder::{dl_def, dl_field, dl_impl}; use middle::resolve::{Impl, MethodInfo}; -use middle::ty::{get, lookup_item_type, subst, t, ty_box}; -use middle::ty::{ty_uniq, ty_ptr, ty_rptr, ty_enum}; +use middle::ty::{ProvidedMethodSource, get, lookup_item_type, subst, t}; +use middle::ty::{ty_box, ty_uniq, ty_ptr, ty_rptr, ty_enum}; use middle::ty::{ty_class, ty_nil, ty_bot, ty_bool, ty_int, ty_uint}; use middle::ty::{ty_float, ty_estr, ty_evec, ty_rec}; use middle::ty::{ty_fn, ty_trait, ty_tup, ty_infer}; @@ -17,7 +18,7 @@ use middle::ty::{ty_param, ty_self, ty_type, ty_opaque_box}; use middle::ty::{ty_opaque_closure_ptr, ty_unboxed_vec, type_is_ty_var}; use middle::typeck::infer::{infer_ctxt, can_mk_subty}; use middle::typeck::infer::{new_infer_ctxt, resolve_ivar, resolve_type}; -use syntax::ast::{crate, def_id, def_mod}; +use syntax::ast::{crate, def_id, def_mod, def_ty}; use syntax::ast::{item, item_class, item_const, item_enum, item_fn}; use syntax::ast::{item_foreign_mod, item_impl, item_mac, item_mod}; use syntax::ast::{item_trait, item_ty, local_crate, method, node_id}; @@ -118,6 +119,21 @@ fn method_to_MethodInfo(ast_method: @method) -> @MethodInfo { } } +// Stores the method info and definition ID of the associated trait method for +// each instantiation of each provided method. +struct ProvidedMethodInfo { + method_info: @MethodInfo, + trait_method_def_id: def_id +} + +// Stores information about provided methods (a.k.a. default methods) in +// implementations. +// +// This is a map from ID of each implementation to the method info and trait +// method ID of each of the default methods belonging to the trait that that +// implementation implements. +type ProvidedMethodsMap = HashMap>; + struct CoherenceInfo { // Contains implementations of methods that are inherent to a type. // Methods in these implementations don't need to be exported. @@ -128,14 +144,20 @@ struct CoherenceInfo { extension_methods: HashMap>, // A mapping from a supertrait to its subtraits. - supertrait_to_subtraits: HashMap> + supertrait_to_subtraits: HashMap>, + + // A mapping from an implementation ID to the method info and trait method + // ID of the provided (a.k.a. default) methods in the traits that that + // implementation implements. + provided_methods: ProvidedMethodsMap, } fn CoherenceInfo() -> CoherenceInfo { CoherenceInfo { inherent_methods: HashMap(), extension_methods: HashMap(), - supertrait_to_subtraits: HashMap() + supertrait_to_subtraits: HashMap(), + provided_methods: HashMap(), } } @@ -165,68 +187,6 @@ struct CoherenceChecker { } impl CoherenceChecker { - // Create a mapping containing a MethodInfo for every provided - // method in every trait. - fn build_provided_methods_map(crate: @crate) { - let sess = self.crate_context.tcx.sess; - - let pmm = self.crate_context.provided_methods_map; - - visit_crate(*crate, (), mk_simple_visitor(@{ - visit_item: |item| { - match item.node { - item_trait(_, _, trait_methods) => { - for trait_methods.each |trait_method| { - debug!("(building provided methods map) checking \ - trait `%s` with id %d", - sess.str_of(item.ident), item.id); - - match *trait_method { - required(_) => { /* fall through */} - provided(m) => { - // For every provided method in the - // trait, store a MethodInfo. - let mi = method_to_MethodInfo(m); - - match pmm.find(item.id) { - Some(mis) => { - // If the trait already has an - // entry in the - // provided_methods_map, we just - // need to add this method to - // that entry. - debug!("(building provided \ - methods map) adding \ - method `%s` to entry for \ - existing trait", - sess.str_of(mi.ident)); - let mut method_infos = mis; - method_infos.push(mi); - pmm.insert(item.id, method_infos); - } - None => { - // If the trait doesn't have an - // entry yet, create one. - debug!("(building provided \ - methods map) creating new \ - entry for method `%s`", - sess.str_of(mi.ident)); - pmm.insert(item.id, ~[mi]); - } - } - } - } - } - } - _ => { - // Nothing to do. - } - }; - }, - .. *default_simple_visitor() - })); - } - fn check_coherence(crate: @crate) { // Check implementations and traits. This populates the tables // containing the inherent methods and extension methods. It also @@ -307,6 +267,7 @@ impl CoherenceChecker { self.crate_context.tcx.sess.parse_sess.interner), self.crate_context.tcx.sess.str_of(item.ident)); + self.instantiate_default_methods(item.id, trait_did); let implementation = self.create_impl_from_item(item); self.add_trait_method(trait_did, implementation); } @@ -321,6 +282,7 @@ impl CoherenceChecker { // Nothing to do. } Some(base_type_def_id) => { + // XXX: Gather up default methods? let implementation = self.create_impl_from_item(item); self.add_inherent_method(base_type_def_id, implementation); @@ -330,6 +292,68 @@ impl CoherenceChecker { } } + // Creates default method IDs and performs type substitutions for an impl + // and trait pair. Then, for each provided method in the trait, inserts a + // `ProvidedMethodInfo` instance into the `provided_method_sources` map. + fn instantiate_default_methods(impl_id: ast::node_id, + trait_did: ast::def_id) { + for self.each_provided_trait_method(trait_did) |trait_method| { + // Synthesize an ID. + let tcx = self.crate_context.tcx; + let new_id = syntax::parse::next_node_id(tcx.sess.parse_sess); + let new_did = local_def(new_id); + + // XXX: Perform substitutions. + let new_polytype = ty::lookup_item_type(tcx, trait_method.def_id); + tcx.tcache.insert(new_did, new_polytype); + + // Pair the new synthesized ID up with the + // ID of the method. + let source = ProvidedMethodSource { + method_id: trait_method.def_id, + impl_id: local_def(impl_id) + }; + + self.crate_context.tcx.provided_method_sources.insert(new_did, + source); + + let provided_method_info = + @ProvidedMethodInfo { + method_info: @{ + did: new_did, + n_tps: trait_method.tps.len(), + ident: trait_method.ident, + self_type: trait_method.self_ty + }, + trait_method_def_id: trait_method.def_id + }; + + let pmm = self.crate_context.coherence_info.provided_methods; + match pmm.find(local_def(impl_id)) { + Some(mis) => { + // If the trait already has an entry in the + // provided_methods_map, we just need to add this + // method to that entry. + debug!("(checking implementation) adding method `%s` \ + to entry for existing trait", + self.crate_context.tcx.sess.str_of( + provided_method_info.method_info.ident)); + mis.push(provided_method_info); + } + None => { + // If the trait doesn't have an entry yet, create one. + debug!("(checking implementation) creating new entry \ + for method `%s`", + self.crate_context.tcx.sess.str_of( + provided_method_info.method_info.ident)); + let method_infos = @DVec(); + method_infos.push(provided_method_info); + pmm.insert(local_def(impl_id), method_infos); + } + } + } + } + fn register_inherited_trait(item: @item, supertraits: ~[@trait_ref]) { // XXX: This is wrong. We need to support substitutions; e.g. // trait Foo : Bar. @@ -354,8 +378,7 @@ impl CoherenceChecker { fn add_inherent_method(base_def_id: def_id, implementation: @Impl) { let implementation_list; match self.crate_context.coherence_info.inherent_methods - .find(base_def_id) { - + .find(base_def_id) { None => { implementation_list = @DVec(); self.crate_context.coherence_info.inherent_methods @@ -372,8 +395,7 @@ impl CoherenceChecker { fn add_trait_method(trait_id: def_id, implementation: @Impl) { let implementation_list; match self.crate_context.coherence_info.extension_methods - .find(trait_id) { - + .find(trait_id) { None => { implementation_list = @DVec(); self.crate_context.coherence_info.extension_methods @@ -413,6 +435,26 @@ impl CoherenceChecker { } } + fn each_provided_trait_method( + trait_did: ast::def_id, + f: &fn(x: &ty::method) -> bool) { + // Make a list of all the names of the provided methods. + // XXX: This is horrible. + let provided_method_idents = HashMap(); + let tcx = self.crate_context.tcx; + for ty::provided_trait_methods(tcx, trait_did).each |ident| { + provided_method_idents.insert(*ident, ()); + } + + for ty::trait_methods(tcx, trait_did).each |method| { + if provided_method_idents.contains_key(method.ident) { + if !f(method) { + break; + } + } + } + } + fn polytypes_unify(polytype_a: ty_param_bounds_and_ty, polytype_b: ty_param_bounds_and_ty) -> bool { @@ -449,7 +491,6 @@ impl CoherenceChecker { fn get_self_type_for_implementation(implementation: @Impl) -> ty_param_bounds_and_ty { - return self.crate_context.tcx.tcache.get(implementation.did); } @@ -552,33 +593,15 @@ impl CoherenceChecker { // Converts an implementation in the AST to an Impl structure. fn create_impl_from_item(item: @item) -> @Impl { - - fn add_provided_methods(inherent_methods: ~[@MethodInfo], - all_provided_methods: ~[@MethodInfo], - sess: driver::session::session) - -> ~[@MethodInfo] { - - let mut methods = inherent_methods; - - // If there's no inherent method with the same name as a - // provided method, add that provided method to `methods`. + fn add_provided_methods(all_methods: &mut ~[@MethodInfo], + all_provided_methods: ~[@ProvidedMethodInfo], + sess: driver::session::session) { for all_provided_methods.each |provided_method| { - let mut method_inherent_to_impl = false; - for inherent_methods.each |inherent_method| { - if provided_method.ident == inherent_method.ident { - method_inherent_to_impl = true; - } - } - - if !method_inherent_to_impl { - debug!( - "(creating impl) adding provided method `%s` to impl", - sess.str_of(provided_method.ident)); - methods.push(*provided_method); - } + debug!( + "(creating impl) adding provided method `%s` to impl", + sess.str_of(provided_method.method_info.ident)); + vec::push(all_methods, provided_method.method_info); } - - return methods; } match item.node { @@ -598,24 +621,22 @@ impl CoherenceChecker { let trait_did = self.trait_ref_to_trait_def_id(*trait_ref); - match self.crate_context.provided_methods_map - .find(trait_did.node) { + match self.crate_context + .coherence_info + .provided_methods + .find(local_def(item.id)) { None => { debug!("(creating impl) trait with node_id `%d` \ has no provided methods", trait_did.node); /* fall through */ } - Some(all_provided) - => { + Some(all_provided) => { debug!("(creating impl) trait with node_id `%d` \ has provided methods", trait_did.node); - // Selectively add only those provided - // methods that aren't inherent to the - // trait. - - // XXX: could probably be doing this with filter. - methods = add_provided_methods( - methods, all_provided, + // Add all provided methods. + add_provided_methods( + &mut methods, + all_provided.get(), self.crate_context.tcx.sess); } } @@ -758,6 +779,41 @@ impl CoherenceChecker { } } + fn add_default_methods_for_external_trait(trait_def_id: ast::def_id) { + let tcx = self.crate_context.tcx; + let pmm = self.crate_context.coherence_info.provided_methods; + + if pmm.contains_key(trait_def_id) { return; } + + debug!("(adding default methods for trait) processing trait"); + + for csearch::get_provided_trait_methods(tcx, + trait_def_id).each |info| { + debug!("(adding default methods for trait) found default method"); + + // Create a new def ID for this provided method. + let parse_sess = &self.crate_context.tcx.sess.parse_sess; + let new_did = local_def(syntax::parse::next_node_id(*parse_sess)); + + let provided_method_info = + @ProvidedMethodInfo { + method_info: @{ + did: new_did, + n_tps: info.ty.tps.len(), + ident: info.ty.ident, + self_type: info.ty.self_ty + }, + trait_method_def_id: info.def_id + }; + + let method_infos = @DVec(); + method_infos.push(provided_method_info); + pmm.insert(trait_def_id, method_infos); + } + } + + // Adds implementations and traits from external crates to the coherence + // info. fn add_external_crates() { let impls_seen = HashMap(); @@ -768,20 +824,28 @@ impl CoherenceChecker { { crate: crate_number, node: 0 }); for each_path(crate_store, crate_number) |path_entry| { - let module_def_id; match path_entry.def_like { dl_def(def_mod(def_id)) => { - module_def_id = def_id; + self.add_impls_for_module(impls_seen, + crate_store, + def_id); + } + dl_def(def_ty(def_id)) => { + let tcx = self.crate_context.tcx; + let polytype = csearch::get_type(tcx, def_id); + match ty::get(polytype.ty).sty { + ty::ty_trait(*) => { + self.add_default_methods_for_external_trait( + def_id); + } + _ => {} + } } dl_def(_) | dl_impl(_) | dl_field => { // Skip this. loop; } } - - self.add_impls_for_module(impls_seen, - crate_store, - module_def_id); } } } @@ -789,7 +853,6 @@ impl CoherenceChecker { fn check_coherence(crate_context: @crate_ctxt, crate: @crate) { let coherence_checker = @CoherenceChecker(crate_context); - (*coherence_checker).build_provided_methods_map(crate); (*coherence_checker).check_coherence(crate); } diff --git a/src/rustc/middle/typeck/collect.rs b/src/rustc/middle/typeck/collect.rs index a38e69effdffc..9e51225f172f2 100644 --- a/src/rustc/middle/typeck/collect.rs +++ b/src/rustc/middle/typeck/collect.rs @@ -212,9 +212,15 @@ fn ensure_trait_methods(ccx: @crate_ctxt, id: ast::node_id, trait_ty: ty::t) { match tcx.items.get(id) { ast_map::node_item(@{node: ast::item_trait(params, _, ms), _}, _) => { store_methods::(ccx, id, ms, |m| { + let def_id; + match *m { + ast::required(ty_method) => def_id = local_def(ty_method.id), + ast::provided(method) => def_id = local_def(method.id) + } + let trait_bounds = ty_param_bounds(ccx, params); let ty_m = trait_method_to_ty_method(*m); - let method_ty = ty_of_ty_method(ccx, ty_m, region_paramd); + let method_ty = ty_of_ty_method(ccx, ty_m, region_paramd, def_id); if ty_m.self_ty.node == ast::sty_static { make_static_method_ty(ccx, ty_m, region_paramd, method_ty, trait_ty, trait_bounds); @@ -373,7 +379,7 @@ fn check_methods_against_trait(ccx: @crate_ctxt, let provided_methods = ty::provided_trait_methods(tcx, did); match vec::find(provided_methods, |provided_method| - provided_method.ident == trait_m.ident) { + *provided_method == trait_m.ident) { Some(_) => { // If there's a provided method with the name we // want, then we're fine; nothing else to do. @@ -546,19 +552,22 @@ fn ty_of_method(ccx: @crate_ctxt, m.purity, @~[], m.decl, None, m.span), self_ty: m.self_ty.node, - vis: m.vis} + vis: m.vis, + def_id: local_def(m.id)} } fn ty_of_ty_method(self: @crate_ctxt, m: ast::ty_method, - rp: Option) -> ty::method { + rp: Option, + id: ast::def_id) -> ty::method { {ident: m.ident, tps: ty_param_bounds(self, m.tps), fty: ty_of_fn_decl(self, type_rscope(rp), ast::proto_bare, m.purity, @~[], m.decl, None, m.span), // assume public, because this is only invoked on trait methods self_ty: m.self_ty.node, - vis: ast::public} + vis: ast::public, + def_id: id} } /* diff --git a/src/test/run-pass/default-method-simple.rs b/src/test/run-pass/default-method-simple.rs new file mode 100644 index 0000000000000..6a05d9589130e --- /dev/null +++ b/src/test/run-pass/default-method-simple.rs @@ -0,0 +1,23 @@ +trait Foo { + fn f() { + io::println("Hello!"); + self.g(); + } + fn g(); +} + +struct A { + x: int +} + +impl A : Foo { + fn g() { + io::println("Goodbye!"); + } +} + +fn main() { + let a = A { x: 1 }; + a.f(); +} + From 4f9e7babeb3fce422072f51959470ecec4dead53 Mon Sep 17 00:00:00 2001 From: Tim Chevalier Date: Mon, 15 Oct 2012 14:17:16 -0700 Subject: [PATCH 11/40] Un-xfail test for #3521; it works now Closes #3521 --- src/test/{run-pass => compile-fail}/issue-3521.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) rename src/test/{run-pass => compile-fail}/issue-3521.rs (50%) diff --git a/src/test/run-pass/issue-3521.rs b/src/test/compile-fail/issue-3521.rs similarity index 50% rename from src/test/run-pass/issue-3521.rs rename to src/test/compile-fail/issue-3521.rs index d8693fe18d352..9ad483367fe91 100644 --- a/src/test/run-pass/issue-3521.rs +++ b/src/test/compile-fail/issue-3521.rs @@ -1,9 +1,8 @@ -// xfail-test fn main() { let foo = 100; enum Stuff { - Bar = foo + Bar = foo //~ ERROR attempt to use a non-constant value in a constant } log(error, Bar); From c5b82a65e96cfe77e4983e78a34a7d5aa91329b4 Mon Sep 17 00:00:00 2001 From: Tim Chevalier Date: Mon, 15 Oct 2012 14:33:32 -0700 Subject: [PATCH 12/40] Add test cases for #3668 Previous commits fix the issue. Closes #3668 --- src/test/compile-fail/issue-3668-2.rs | 5 +++++ src/test/{run-pass => compile-fail}/issue-3668.rs | 3 +-- src/test/run-pass/issue-3688-2.rs | 6 ------ 3 files changed, 6 insertions(+), 8 deletions(-) create mode 100644 src/test/compile-fail/issue-3668-2.rs rename src/test/{run-pass => compile-fail}/issue-3668.rs (63%) delete mode 100644 src/test/run-pass/issue-3688-2.rs diff --git a/src/test/compile-fail/issue-3668-2.rs b/src/test/compile-fail/issue-3668-2.rs new file mode 100644 index 0000000000000..89c9e2b2e91e1 --- /dev/null +++ b/src/test/compile-fail/issue-3668-2.rs @@ -0,0 +1,5 @@ +fn f(x:int) { + const child: int = x + 1; //~ ERROR attempt to use a non-constant value in a constant +} + +fn main() {} diff --git a/src/test/run-pass/issue-3668.rs b/src/test/compile-fail/issue-3668.rs similarity index 63% rename from src/test/run-pass/issue-3668.rs rename to src/test/compile-fail/issue-3668.rs index 8b3005a3589dc..6cbd64c6aa9bd 100644 --- a/src/test/run-pass/issue-3668.rs +++ b/src/test/compile-fail/issue-3668.rs @@ -1,4 +1,3 @@ -// xfail-test struct P { child: Option<@mut P> } trait PTrait { fn getChildOption() -> Option<@P>; @@ -6,7 +5,7 @@ trait PTrait { impl P: PTrait { fn getChildOption() -> Option<@P> { - const childVal: @P = self.child.get(); + const childVal: @P = self.child.get(); //~ ERROR attempt to use a non-constant value in a constant fail; } } diff --git a/src/test/run-pass/issue-3688-2.rs b/src/test/run-pass/issue-3688-2.rs deleted file mode 100644 index 8a5b0e26829cf..0000000000000 --- a/src/test/run-pass/issue-3688-2.rs +++ /dev/null @@ -1,6 +0,0 @@ -// xfail-test -fn f(x:int) { - const child: int = x + 1; -} - -fn main() {} From 91ae5412d8141ea958924408bf3c1def5edca806 Mon Sep 17 00:00:00 2001 From: Patrick Walton Date: Mon, 15 Oct 2012 14:56:42 -0700 Subject: [PATCH 13/40] rustc: Merge module and type namespaces. r=brson --- src/fuzzer/fuzzer.rs | 18 +- src/libcore/cmath.rs | 4 +- src/libcore/f32.rs | 2 +- src/libcore/f64.rs | 4 +- src/libcore/libc.rs | 6 +- src/libsyntax/ast.rs | 50 +- src/libsyntax/ast_util.rs | 2 +- src/libsyntax/codemap.rs | 36 +- src/libsyntax/diagnostic.rs | 19 +- src/libsyntax/ext/auto_serialize.rs | 8 +- src/libsyntax/ext/base.rs | 6 +- src/libsyntax/ext/pipes.rs | 4 +- src/libsyntax/ext/pipes/ast_builder.rs | 60 +-- src/libsyntax/ext/pipes/check.rs | 4 +- src/libsyntax/ext/pipes/parse_proto.rs | 2 +- src/libsyntax/ext/pipes/pipec.rs | 8 +- src/libsyntax/ext/pipes/proto.rs | 10 +- src/libsyntax/ext/qquote.rs | 22 +- src/libsyntax/ext/simplext.rs | 4 +- src/libsyntax/ext/trace_macros.rs | 6 +- src/libsyntax/ext/tt/macro_parser.rs | 12 +- src/libsyntax/ext/tt/macro_rules.rs | 6 +- src/libsyntax/ext/tt/transcribe.rs | 9 +- src/libsyntax/fold.rs | 4 +- src/libsyntax/parse.rs | 24 +- src/libsyntax/parse/attr.rs | 2 +- src/libsyntax/parse/common.rs | 94 ++-- src/libsyntax/parse/eval.rs | 2 +- src/libsyntax/parse/lexer.rs | 26 +- src/libsyntax/parse/obsolete.rs | 6 +- src/libsyntax/parse/parser.rs | 70 +-- src/libsyntax/parse/prec.rs | 4 +- src/libsyntax/parse/token.rs | 30 +- src/libsyntax/print/pprust.rs | 14 +- src/libsyntax/util/interner.rs | 10 +- src/libsyntax/visit.rs | 12 +- src/rustc/back/link.rs | 26 +- src/rustc/back/rpath.rs | 4 +- src/rustc/driver/driver.rs | 29 +- src/rustc/driver/rustc.rs | 2 +- src/rustc/driver/session.rs | 16 +- src/rustc/front/core_inject.rs | 6 +- src/rustc/front/intrinsic_inject.rs | 4 +- src/rustc/front/test.rs | 14 +- src/rustc/metadata/creader.rs | 8 +- src/rustc/metadata/csearch.rs | 20 +- src/rustc/metadata/cstore.rs | 40 +- src/rustc/metadata/encoder.rs | 8 +- src/rustc/metadata/filesearch.rs | 12 +- src/rustc/metadata/loader.rs | 6 +- src/rustc/metadata/tydecode.rs | 2 +- src/rustc/metadata/tyencode.rs | 2 +- src/rustc/middle/astencode.rs | 8 +- src/rustc/middle/borrowck.rs | 7 +- src/rustc/middle/borrowck/gather_loans.rs | 4 +- src/rustc/middle/borrowck/loan.rs | 6 +- src/rustc/middle/borrowck/preserve.rs | 6 +- src/rustc/middle/capture.rs | 1 - src/rustc/middle/check_alt.rs | 1 - src/rustc/middle/check_const.rs | 12 +- src/rustc/middle/check_loop.rs | 1 - src/rustc/middle/kind.rs | 7 +- src/rustc/middle/lang_items.rs | 12 +- src/rustc/middle/lint.rs | 6 +- src/rustc/middle/liveness.rs | 1 - src/rustc/middle/mem_categorization.rs | 4 +- src/rustc/middle/region.rs | 16 +- src/rustc/middle/resolve.rs | 458 ++++++++---------- src/rustc/middle/trans/alt.rs | 1 - src/rustc/middle/trans/base.rs | 6 +- src/rustc/middle/trans/build.rs | 1 - src/rustc/middle/trans/common.rs | 8 +- src/rustc/middle/trans/debuginfo.rs | 8 +- src/rustc/middle/trans/foreign.rs | 2 +- src/rustc/middle/trans/reachable.rs | 2 +- src/rustc/middle/trans/reflect.rs | 1 - src/rustc/middle/trans/tvec.rs | 1 - src/rustc/middle/trans/type_use.rs | 1 - src/rustc/middle/ty.rs | 144 +++--- src/rustc/middle/typeck.rs | 1 - src/rustc/middle/typeck/astconv.rs | 6 +- src/rustc/middle/typeck/check.rs | 34 +- src/rustc/middle/typeck/check/alt.rs | 4 +- src/rustc/middle/typeck/check/method.rs | 4 +- src/rustc/middle/typeck/check/regionck.rs | 12 +- src/rustc/middle/typeck/check/regionmanip.rs | 8 +- src/rustc/middle/typeck/coherence.rs | 6 +- src/rustc/middle/typeck/collect.rs | 4 +- src/rustc/middle/typeck/infer.rs | 15 +- src/rustc/middle/typeck/infer/assignment.rs | 4 +- src/rustc/middle/typeck/infer/combine.rs | 18 +- src/rustc/middle/typeck/infer/glb.rs | 6 +- src/rustc/middle/typeck/infer/integral.rs | 2 +- src/rustc/middle/typeck/infer/lattice.rs | 2 +- src/rustc/middle/typeck/infer/lub.rs | 6 +- .../typeck/infer/region_var_bindings.rs | 58 +-- src/rustc/middle/typeck/infer/resolve.rs | 10 +- src/rustc/middle/typeck/infer/sub.rs | 6 +- src/rustc/middle/typeck/infer/to_str.rs | 16 +- src/rustc/middle/typeck/infer/unify.rs | 4 +- src/rustc/middle/typeck/rscope.rs | 36 +- src/rustc/util/common.rs | 1 - src/rustc/util/ppaux.rs | 13 +- src/rustdoc/astsrv.rs | 14 +- src/rustdoc/parse.rs | 6 +- src/test/run-pass/issue-2930.rs | 4 +- src/test/run-pass/pipe-select.rs | 4 +- 107 files changed, 877 insertions(+), 951 deletions(-) diff --git a/src/fuzzer/fuzzer.rs b/src/fuzzer/fuzzer.rs index 3e31287e3cd19..018972d4c3e0d 100644 --- a/src/fuzzer/fuzzer.rs +++ b/src/fuzzer/fuzzer.rs @@ -104,7 +104,7 @@ pure fn safe_to_use_expr(e: ast::expr, tm: test_mode) -> bool { } } -fn safe_to_steal_ty(t: @ast::ty, tm: test_mode) -> bool { +fn safe_to_steal_ty(t: @ast::Ty, tm: test_mode) -> bool { // Restrictions happen to be the same. safe_to_replace_ty(t.node, tm) } @@ -119,16 +119,16 @@ fn stash_expr_if(c: fn@(@ast::expr, test_mode)->bool, } else {/* now my indices are wrong :( */ } } -fn stash_ty_if(c: fn@(@ast::ty, test_mode)->bool, - es: @mut ~[ast::ty], - e: @ast::ty, +fn stash_ty_if(c: fn@(@ast::Ty, test_mode)->bool, + es: @mut ~[ast::Ty], + e: @ast::Ty, tm: test_mode) { if c(e, tm) { es.push(*e); } else {/* now my indices are wrong :( */ } } -type stolen_stuff = {exprs: ~[ast::expr], tys: ~[ast::ty]}; +type stolen_stuff = {exprs: ~[ast::expr], tys: ~[ast::Ty]}; fn steal(crate: ast::crate, tm: test_mode) -> stolen_stuff { let exprs = @mut ~[]; @@ -195,7 +195,7 @@ fn replace_expr_in_crate(crate: ast::crate, i: uint, // Replace the |i|th ty (in fold order) of |crate| with |newty|. -fn replace_ty_in_crate(crate: ast::crate, i: uint, newty: ast::ty, +fn replace_ty_in_crate(crate: ast::crate, i: uint, newty: ast::Ty, tm: test_mode) -> ast::crate { let j: @mut uint = @mut 0u; fn fold_ty_rep(j_: @mut uint, i_: uint, newty_: ast::ty_, @@ -225,7 +225,7 @@ fn as_str(f: fn@(+x: io::Writer)) -> ~str { io::with_str_writer(f) } -fn check_variants_of_ast(crate: ast::crate, codemap: codemap::codemap, +fn check_variants_of_ast(crate: ast::crate, codemap: codemap::CodeMap, filename: &Path, cx: context) { let stolen = steal(crate, cx.mode); let extra_exprs = vec::filter(common_exprs(), @@ -239,7 +239,7 @@ fn check_variants_of_ast(crate: ast::crate, codemap: codemap::codemap, fn check_variants_T( crate: ast::crate, - codemap: codemap::codemap, + codemap: codemap::CodeMap, filename: &Path, thing_label: ~str, things: ~[T], @@ -444,7 +444,7 @@ fn parse_and_print(code: @~str) -> ~str { fn has_raw_pointers(c: ast::crate) -> bool { let has_rp = @mut false; - fn visit_ty(flag: @mut bool, t: @ast::ty) { + fn visit_ty(flag: @mut bool, t: @ast::Ty) { match t.node { ast::ty_ptr(_) => { *flag = true; } _ => { } diff --git a/src/libcore/cmath.rs b/src/libcore/cmath.rs index b0aeb78afaa83..46ac90413a648 100644 --- a/src/libcore/cmath.rs +++ b/src/libcore/cmath.rs @@ -12,7 +12,7 @@ use libc::c_double; #[link_name = "m"] #[abi = "cdecl"] -pub extern mod c_double { +pub extern mod c_double_utils { // Alpabetically sorted by link_name @@ -87,7 +87,7 @@ pub extern mod c_double { #[link_name = "m"] #[abi = "cdecl"] -pub extern mod c_float { +pub extern mod c_float_utils { // Alpabetically sorted by link_name diff --git a/src/libcore/f32.rs b/src/libcore/f32.rs index ec0e66734fa39..ed6908d110d6d 100644 --- a/src/libcore/f32.rs +++ b/src/libcore/f32.rs @@ -4,7 +4,7 @@ //! Operations and constants for `f32` -pub use cmath::c_float::*; +pub use cmath::c_float_utils::*; pub use cmath::c_float_targ_consts::*; // These are not defined inside consts:: for consistency with diff --git a/src/libcore/f64.rs b/src/libcore/f64.rs index 731d369649b16..2d13dc86e2fa5 100644 --- a/src/libcore/f64.rs +++ b/src/libcore/f64.rs @@ -4,7 +4,7 @@ //! Operations and constants for `f64` -pub use cmath::c_double::*; +pub use cmath::c_double_utils::*; pub use cmath::c_double_targ_consts::*; // FIXME (#1433): obtain these in a different way @@ -59,7 +59,7 @@ pub pure fn ge(x: f64, y: f64) -> bool { return x >= y; } pub pure fn gt(x: f64, y: f64) -> bool { return x > y; } pub pure fn sqrt(x: f64) -> f64 { - cmath::c_double::sqrt(x as libc::c_double) as f64 + cmath::c_double_utils::sqrt(x as libc::c_double) as f64 } /// Returns true if `x` is a positive number, including +0.0f640 and +Infinity diff --git a/src/libcore/libc.rs b/src/libcore/libc.rs index dd8f76c89d5ff..7ed43f619e13e 100644 --- a/src/libcore/libc.rs +++ b/src/libcore/libc.rs @@ -87,7 +87,7 @@ pub use funcs::extra::*; pub use size_t; pub use c_float, c_double, c_void, FILE, fpos_t; -pub use DIR, dirent; +pub use DIR, dirent_t; pub use c_char, c_schar, c_uchar; pub use c_short, c_ushort, c_int, c_uint, c_long, c_ulong; pub use size_t, ptrdiff_t, clock_t, time_t; @@ -147,7 +147,7 @@ mod types { } pub mod posix88 { pub enum DIR {} - pub enum dirent {} + pub enum dirent_t {} } pub mod posix01 {} pub mod posix08 {} @@ -1019,7 +1019,7 @@ pub mod funcs { pub extern mod dirent { fn opendir(dirname: *c_char) -> *DIR; fn closedir(dirp: *DIR) -> c_int; - fn readdir(dirp: *DIR) -> *dirent; + fn readdir(dirp: *DIR) -> *dirent_t; fn rewinddir(dirp: *DIR); fn seekdir(dirp: *DIR, loc: c_long); fn telldir(dirp: *DIR) -> c_long; diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index cf7b758216b91..e3da15d181b76 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -129,7 +129,7 @@ type path = {span: span, global: bool, idents: ~[ident], rp: Option<@region>, - types: ~[@ty]}; + types: ~[@Ty]}; type crate_num = int; @@ -156,7 +156,7 @@ enum ty_param_bound { bound_send, bound_const, bound_owned, - bound_trait(@ty), + bound_trait(@Ty), } #[auto_serialize] @@ -702,7 +702,7 @@ type initializer = {op: init_op, expr: @expr}; // a refinement on pat. #[auto_serialize] #[auto_deserialize] -type local_ = {is_mutbl: bool, ty: @ty, pat: @pat, +type local_ = {is_mutbl: bool, ty: @Ty, pat: @pat, init: Option, id: node_id}; type local = spanned; @@ -764,7 +764,7 @@ enum expr_ { expr_binary(binop, @expr, @expr), expr_unary(unop, @expr), expr_lit(@lit), - expr_cast(@expr, @ty), + expr_cast(@expr, @Ty), expr_if(@expr, blk, Option<@expr>), expr_while(@expr, blk), /* Conditionless loop (can be exited with break, cont, ret, or fail) @@ -788,7 +788,7 @@ enum expr_ { expr_assign(@expr, @expr), expr_swap(@expr, @expr), expr_assign_op(binop, @expr, @expr), - expr_field(@expr, ident, ~[@ty]), + expr_field(@expr, ident, ~[@Ty]), expr_index(@expr, @expr), expr_path(@path), expr_addr_of(mutability, @expr), @@ -843,10 +843,10 @@ type capture_clause = @~[capture_item]; #[auto_deserialize] #[doc="For macro invocations; parsing is delegated to the macro"] enum token_tree { - tt_tok(span, token::token), + tt_tok(span, token::Token), tt_delim(~[token_tree]), // These only make sense for right-hand-sides of MBE macros - tt_seq(span, ~[token_tree], Option, bool), + tt_seq(span, ~[token_tree], Option, bool), tt_nonterminal(span, ident) } @@ -908,10 +908,10 @@ type matcher = spanned; #[auto_deserialize] enum matcher_ { // match one token - match_tok(token::token), + match_tok(token::Token), // match repetitions of a sequence: body, separator, zero ok?, // lo, hi position-in-match-array used: - match_seq(~[matcher], Option, bool, uint, uint), + match_seq(~[matcher], Option, bool, uint, uint), // parse a Rust NT: name to bind, name of NT, position in match array: match_nonterminal(ident, ident, uint) } @@ -984,7 +984,7 @@ impl ast::lit_: cmp::Eq { // type structure in middle/ty.rs as well. #[auto_serialize] #[auto_deserialize] -type mt = {ty: @ty, mutbl: mutability}; +type mt = {ty: @Ty, mutbl: mutability}; #[auto_serialize] #[auto_deserialize] @@ -1087,7 +1087,7 @@ impl float_ty : cmp::Eq { #[auto_serialize] #[auto_deserialize] -type ty = {id: node_id, node: ty_, span: span}; +type Ty = {id: node_id, node: ty_, span: span}; // Not represented directly in the AST, referred to by name through a ty_path. #[auto_serialize] @@ -1163,9 +1163,9 @@ enum ty_ { ty_rptr(@region, mt), ty_rec(~[ty_field]), ty_fn(proto, purity, @~[ty_param_bound], fn_decl), - ty_tup(~[@ty]), + ty_tup(~[@Ty]), ty_path(@path, node_id), - ty_fixed_length(@ty, Option), + ty_fixed_length(@Ty, Option), ty_mac(mac), // ty_infer means the type should be inferred instead of it having been // specified. This should only appear at the "top level" of a type and not @@ -1175,16 +1175,16 @@ enum ty_ { // Equality and byte-iter (hashing) can be quite approximate for AST types. // since we only care about this for normalizing them to "real" types. -impl ty : cmp::Eq { - pure fn eq(other: &ty) -> bool { +impl Ty : cmp::Eq { + pure fn eq(other: &Ty) -> bool { ptr::addr_of(&self) == ptr::addr_of(&(*other)) } - pure fn ne(other: &ty) -> bool { + pure fn ne(other: &Ty) -> bool { ptr::addr_of(&self) != ptr::addr_of(&(*other)) } } -impl ty : to_bytes::IterBytes { +impl Ty : to_bytes::IterBytes { pure fn iter_bytes(+lsb0: bool, f: to_bytes::Cb) { to_bytes::iter_bytes_2(&self.span.lo, &self.span.hi, lsb0, f); } @@ -1193,13 +1193,13 @@ impl ty : to_bytes::IterBytes { #[auto_serialize] #[auto_deserialize] -type arg = {mode: mode, ty: @ty, ident: ident, id: node_id}; +type arg = {mode: mode, ty: @Ty, ident: ident, id: node_id}; #[auto_serialize] #[auto_deserialize] type fn_decl = {inputs: ~[arg], - output: @ty, + output: @Ty, cf: ret_style}; #[auto_serialize] @@ -1362,7 +1362,7 @@ type foreign_mod = #[auto_serialize] #[auto_deserialize] -type variant_arg = {ty: @ty, id: node_id}; +type variant_arg = {ty: @Ty, id: node_id}; #[auto_serialize] #[auto_deserialize] @@ -1495,7 +1495,7 @@ impl visibility : cmp::Eq { type struct_field_ = { kind: struct_field_kind, id: node_id, - ty: @ty + ty: @Ty }; type struct_field = spanned; @@ -1531,17 +1531,17 @@ type item = {ident: ident, attrs: ~[attribute], #[auto_serialize] #[auto_deserialize] enum item_ { - item_const(@ty, @expr), + item_const(@Ty, @expr), item_fn(fn_decl, purity, ~[ty_param], blk), item_mod(_mod), item_foreign_mod(foreign_mod), - item_ty(@ty, ~[ty_param]), + item_ty(@Ty, ~[ty_param]), item_enum(enum_def, ~[ty_param]), item_class(@struct_def, ~[ty_param]), item_trait(~[ty_param], ~[@trait_ref], ~[trait_method]), item_impl(~[ty_param], Option<@trait_ref>, /* (optional) trait this impl implements */ - @ty, /* self */ + @Ty, /* self */ ~[@method]), item_mac(mac), } @@ -1601,7 +1601,7 @@ type foreign_item = #[auto_deserialize] enum foreign_item_ { foreign_item_fn(fn_decl, purity, ~[ty_param]), - foreign_item_const(@ty) + foreign_item_const(@Ty) } // The data we save and restore about an inlined item or method. This is not diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs index 4c18b6b8ecac9..6fd84c3317f72 100644 --- a/src/libsyntax/ast_util.rs +++ b/src/libsyntax/ast_util.rs @@ -471,7 +471,7 @@ fn id_visitor(vfn: fn@(node_id)) -> visit::vt<()> { visit_expr_post: fn@(_e: @expr) { }, - visit_ty: fn@(t: @ty) { + visit_ty: fn@(t: @Ty) { match t.node { ty_path(_, id) => vfn(id), _ => { /* fall through */ } diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index e07985119ec49..0cb8b425c942e 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -7,7 +7,7 @@ export file_substr; export fss_none; export fss_internal; export fss_external; -export codemap; +export CodeMap; export expn_info; export expn_info_; export expanded_from; @@ -55,11 +55,11 @@ type filemap = @{name: filename, substr: file_substr, src: @~str, start_pos: file_pos, mut lines: ~[file_pos]}; -type codemap = @{files: DVec}; +type CodeMap = @{files: DVec}; type loc = {file: filemap, line: uint, col: uint}; -fn new_codemap() -> codemap { @{files: DVec()} } +fn new_codemap() -> CodeMap { @{files: DVec()} } fn new_filemap_w_substr(+filename: filename, +substr: file_substr, src: @~str, @@ -77,7 +77,7 @@ fn new_filemap(+filename: filename, src: @~str, start_pos_ch, start_pos_byte); } -fn mk_substr_filename(cm: codemap, sp: span) -> ~str +fn mk_substr_filename(cm: CodeMap, sp: span) -> ~str { let pos = lookup_char_pos(cm, sp.lo); return fmt!("<%s:%u:%u>", pos.file.name, pos.line, pos.col); @@ -89,7 +89,7 @@ fn next_line(file: filemap, chpos: uint, byte_pos: uint) { type lookup_fn = pure fn(file_pos) -> uint; -fn lookup_line(map: codemap, pos: uint, lookup: lookup_fn) +fn lookup_line(map: CodeMap, pos: uint, lookup: lookup_fn) -> {fm: filemap, line: uint} { let len = map.files.len(); @@ -112,22 +112,22 @@ fn lookup_line(map: codemap, pos: uint, lookup: lookup_fn) return {fm: f, line: a}; } -fn lookup_pos(map: codemap, pos: uint, lookup: lookup_fn) -> loc { +fn lookup_pos(map: CodeMap, pos: uint, lookup: lookup_fn) -> loc { let {fm: f, line: a} = lookup_line(map, pos, lookup); return {file: f, line: a + 1u, col: pos - lookup(f.lines[a])}; } -fn lookup_char_pos(map: codemap, pos: uint) -> loc { +fn lookup_char_pos(map: CodeMap, pos: uint) -> loc { pure fn lookup(pos: file_pos) -> uint { return pos.ch; } return lookup_pos(map, pos, lookup); } -fn lookup_byte_pos(map: codemap, pos: uint) -> loc { +fn lookup_byte_pos(map: CodeMap, pos: uint) -> loc { pure fn lookup(pos: file_pos) -> uint { return pos.byte; } return lookup_pos(map, pos, lookup); } -fn lookup_char_pos_adj(map: codemap, pos: uint) +fn lookup_char_pos_adj(map: CodeMap, pos: uint) -> {filename: ~str, line: uint, col: uint, file: Option} { let loc = lookup_char_pos(map, pos); @@ -150,7 +150,7 @@ fn lookup_char_pos_adj(map: codemap, pos: uint) } } -fn adjust_span(map: codemap, sp: span) -> span { +fn adjust_span(map: CodeMap, sp: span) -> span { pure fn lookup(pos: file_pos) -> uint { return pos.ch; } let line = lookup_line(map, sp.lo, lookup); match (line.fm.substr) { @@ -178,14 +178,14 @@ impl span : cmp::Eq { pure fn ne(other: &span) -> bool { !self.eq(other) } } -fn span_to_str_no_adj(sp: span, cm: codemap) -> ~str { +fn span_to_str_no_adj(sp: span, cm: CodeMap) -> ~str { let lo = lookup_char_pos(cm, sp.lo); let hi = lookup_char_pos(cm, sp.hi); return fmt!("%s:%u:%u: %u:%u", lo.file.name, lo.line, lo.col, hi.line, hi.col) } -fn span_to_str(sp: span, cm: codemap) -> ~str { +fn span_to_str(sp: span, cm: CodeMap) -> ~str { let lo = lookup_char_pos_adj(cm, sp.lo); let hi = lookup_char_pos_adj(cm, sp.hi); return fmt!("%s:%u:%u: %u:%u", lo.filename, @@ -194,12 +194,12 @@ fn span_to_str(sp: span, cm: codemap) -> ~str { type file_lines = {file: filemap, lines: ~[uint]}; -fn span_to_filename(sp: span, cm: codemap::codemap) -> filename { +fn span_to_filename(sp: span, cm: codemap::CodeMap) -> filename { let lo = lookup_char_pos(cm, sp.lo); return /* FIXME (#2543) */ copy lo.file.name; } -fn span_to_lines(sp: span, cm: codemap::codemap) -> @file_lines { +fn span_to_lines(sp: span, cm: codemap::CodeMap) -> @file_lines { let lo = lookup_char_pos(cm, sp.lo); let hi = lookup_char_pos(cm, sp.hi); let mut lines = ~[]; @@ -218,7 +218,7 @@ fn get_line(fm: filemap, line: int) -> ~str unsafe { str::slice(*fm.src, begin, end) } -fn lookup_byte_offset(cm: codemap::codemap, chpos: uint) +fn lookup_byte_offset(cm: codemap::CodeMap, chpos: uint) -> {fm: filemap, pos: uint} { pure fn lookup(pos: file_pos) -> uint { return pos.ch; } let {fm, line} = lookup_line(cm, chpos, lookup); @@ -228,20 +228,20 @@ fn lookup_byte_offset(cm: codemap::codemap, chpos: uint) {fm: fm, pos: line_offset + col_offset} } -fn span_to_snippet(sp: span, cm: codemap::codemap) -> ~str { +fn span_to_snippet(sp: span, cm: codemap::CodeMap) -> ~str { let begin = lookup_byte_offset(cm, sp.lo); let end = lookup_byte_offset(cm, sp.hi); assert begin.fm.start_pos == end.fm.start_pos; return str::slice(*begin.fm.src, begin.pos, end.pos); } -fn get_snippet(cm: codemap::codemap, fidx: uint, lo: uint, hi: uint) -> ~str +fn get_snippet(cm: codemap::CodeMap, fidx: uint, lo: uint, hi: uint) -> ~str { let fm = cm.files[fidx]; return str::slice(*fm.src, lo, hi) } -fn get_filemap(cm: codemap, filename: ~str) -> filemap { +fn get_filemap(cm: CodeMap, filename: ~str) -> filemap { for cm.files.each |fm| { if fm.name == filename { return *fm; } } //XXjdm the following triggers a mismatched type bug // (or expected function, found _|_) diff --git a/src/libsyntax/diagnostic.rs b/src/libsyntax/diagnostic.rs index 2addb3d9e12a9..855b0ca3ef568 100644 --- a/src/libsyntax/diagnostic.rs +++ b/src/libsyntax/diagnostic.rs @@ -9,7 +9,7 @@ export codemap_span_handler, codemap_handler; export ice_msg; export expect; -type emitter = fn@(cmsp: Option<(codemap::codemap, span)>, +type emitter = fn@(cmsp: Option<(codemap::CodeMap, span)>, msg: &str, lvl: level); @@ -33,7 +33,7 @@ trait handler { fn note(msg: &str); fn bug(msg: &str) -> !; fn unimpl(msg: &str) -> !; - fn emit(cmsp: Option<(codemap::codemap, span)>, msg: &str, lvl: level); + fn emit(cmsp: Option<(codemap::CodeMap, span)>, msg: &str, lvl: level); } type handler_t = @{ @@ -43,7 +43,7 @@ type handler_t = @{ type codemap_t = @{ handler: handler, - cm: codemap::codemap + cm: codemap::CodeMap }; impl codemap_t: span_handler { @@ -107,7 +107,7 @@ impl handler_t: handler { self.fatal(ice_msg(msg)); } fn unimpl(msg: &str) -> ! { self.bug(~"unimplemented " + msg); } - fn emit(cmsp: Option<(codemap::codemap, span)>, msg: &str, lvl: level) { + fn emit(cmsp: Option<(codemap::CodeMap, span)>, msg: &str, lvl: level) { self.emit(cmsp, msg, lvl); } } @@ -116,7 +116,7 @@ fn ice_msg(msg: &str) -> ~str { fmt!("internal compiler error: %s", msg) } -fn mk_span_handler(handler: handler, cm: codemap::codemap) -> span_handler { +fn mk_span_handler(handler: handler, cm: codemap::CodeMap) -> span_handler { @{ handler: handler, cm: cm } as span_handler } @@ -125,7 +125,7 @@ fn mk_handler(emitter: Option) -> handler { let emit = match emitter { Some(e) => e, None => { - let f = fn@(cmsp: Option<(codemap::codemap, span)>, + let f = fn@(cmsp: Option<(codemap::CodeMap, span)>, msg: &str, t: level) { emit(cmsp, msg, t); }; @@ -189,8 +189,7 @@ fn print_diagnostic(topic: ~str, lvl: level, msg: &str) { io::stderr().write_str(fmt!(" %s\n", msg)); } -fn emit(cmsp: Option<(codemap::codemap, span)>, - msg: &str, lvl: level) { +fn emit(cmsp: Option<(codemap::CodeMap, span)>, msg: &str, lvl: level) { match cmsp { Some((cm, sp)) => { let sp = codemap::adjust_span(cm,sp); @@ -206,7 +205,7 @@ fn emit(cmsp: Option<(codemap::codemap, span)>, } } -fn highlight_lines(cm: codemap::codemap, sp: span, +fn highlight_lines(cm: codemap::CodeMap, sp: span, lines: @codemap::file_lines) { let fm = lines.file; @@ -261,7 +260,7 @@ fn highlight_lines(cm: codemap::codemap, sp: span, } } -fn print_macro_backtrace(cm: codemap::codemap, sp: span) { +fn print_macro_backtrace(cm: codemap::CodeMap, sp: span) { do option::iter(&sp.expn_info) |ei| { let ss = option::map_default(&ei.callie.span, @~"", |span| @codemap::span_to_str(*span, cm)); diff --git a/src/libsyntax/ext/auto_serialize.rs b/src/libsyntax/ext/auto_serialize.rs index 452becbe559ac..7e5e68ffff90e 100644 --- a/src/libsyntax/ext/auto_serialize.rs +++ b/src/libsyntax/ext/auto_serialize.rs @@ -250,12 +250,12 @@ priv impl ext_ctxt { } fn path_tps(span: span, strs: ~[ast::ident], - tps: ~[@ast::ty]) -> @ast::path { + tps: ~[@ast::Ty]) -> @ast::path { @{span: span, global: false, idents: strs, rp: None, types: tps} } fn ty_path(span: span, strs: ~[ast::ident], - tps: ~[@ast::ty]) -> @ast::ty { + tps: ~[@ast::Ty]) -> @ast::Ty { @{id: self.next_id(), node: ast::ty_path(self.path_tps(span, strs, tps), self.next_id()), span: span} @@ -360,7 +360,7 @@ fn mk_impl( ty_param: ast::ty_param, path: @ast::path, tps: ~[ast::ty_param], - f: fn(@ast::ty) -> @ast::method + f: fn(@ast::Ty) -> @ast::method ) -> @ast::item { // All the type parameters need to bound to the trait. let mut trait_tps = vec::append( @@ -549,7 +549,7 @@ fn mk_ser_method( fn mk_deser_method( cx: ext_ctxt, span: span, - ty: @ast::ty, + ty: @ast::Ty, deser_body: ast::blk ) -> @ast::method { let ty_d = @{ diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 5894758cd85ca..5b4cc23ce09fd 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -1,7 +1,7 @@ use std::map::HashMap; use parse::parser; use diagnostic::span_handler; -use codemap::{codemap, span, expn_info, expanded_from}; +use codemap::{CodeMap, span, expn_info, expanded_from}; // obsolete old-style #macro code: // @@ -124,7 +124,7 @@ fn syntax_expander_table() -> HashMap<~str, syntax_extension> { // when a macro expansion occurs, the resulting nodes have the backtrace() // -> expn_info of their expansion context stored into their span. trait ext_ctxt { - fn codemap() -> codemap; + fn codemap() -> CodeMap; fn parse_sess() -> parse::parse_sess; fn cfg() -> ast::crate_cfg; fn print_backtrace(); @@ -156,7 +156,7 @@ fn mk_ctxt(parse_sess: parse::parse_sess, mut mod_path: ~[ast::ident], mut trace_mac: bool}; impl ctxt_repr: ext_ctxt { - fn codemap() -> codemap { self.parse_sess.cm } + fn codemap() -> CodeMap { self.parse_sess.cm } fn parse_sess() -> parse::parse_sess { self.parse_sess } fn cfg() -> ast::crate_cfg { self.cfg } fn print_backtrace() { } diff --git a/src/libsyntax/ext/pipes.rs b/src/libsyntax/ext/pipes.rs index ad4984c55582d..4d04552bfa15a 100644 --- a/src/libsyntax/ext/pipes.rs +++ b/src/libsyntax/ext/pipes.rs @@ -37,7 +37,7 @@ use codemap::span; use ext::base::ext_ctxt; use ast::tt_delim; use parse::lexer::{new_tt_reader, reader}; -use parse::parser::{parser, SOURCE_FILE}; +use parse::parser::{Parser, SOURCE_FILE}; use parse::common::parser_common; use pipes::parse_proto::proto_parser; @@ -52,7 +52,7 @@ fn expand_proto(cx: ext_ctxt, _sp: span, id: ast::ident, let tt_rdr = new_tt_reader(cx.parse_sess().span_diagnostic, cx.parse_sess().interner, None, tt); let rdr = tt_rdr as reader; - let rust_parser = parser(sess, cfg, rdr.dup(), SOURCE_FILE); + let rust_parser = Parser(sess, cfg, rdr.dup(), SOURCE_FILE); let proto = rust_parser.parse_proto(cx.str_of(id)); diff --git a/src/libsyntax/ext/pipes/ast_builder.rs b/src/libsyntax/ext/pipes/ast_builder.rs index 4da9992b0dd36..f10cbc2a5898a 100644 --- a/src/libsyntax/ext/pipes/ast_builder.rs +++ b/src/libsyntax/ext/pipes/ast_builder.rs @@ -28,17 +28,17 @@ fn empty_span() -> span { } trait append_types { - fn add_ty(ty: @ast::ty) -> @ast::path; - fn add_tys(+tys: ~[@ast::ty]) -> @ast::path; + fn add_ty(ty: @ast::Ty) -> @ast::path; + fn add_tys(+tys: ~[@ast::Ty]) -> @ast::path; } impl @ast::path: append_types { - fn add_ty(ty: @ast::ty) -> @ast::path { + fn add_ty(ty: @ast::Ty) -> @ast::path { @{types: vec::append_one(self.types, ty), .. *self} } - fn add_tys(+tys: ~[@ast::ty]) -> @ast::path { + fn add_tys(+tys: ~[@ast::Ty]) -> @ast::path { @{types: vec::append(self.types, tys), .. *self} } @@ -47,18 +47,18 @@ impl @ast::path: append_types { trait ext_ctxt_ast_builder { fn ty_param(id: ast::ident, +bounds: ~[ast::ty_param_bound]) -> ast::ty_param; - fn arg(name: ident, ty: @ast::ty) -> ast::arg; + fn arg(name: ident, ty: @ast::Ty) -> ast::arg; fn expr_block(e: @ast::expr) -> ast::blk; - fn fn_decl(+inputs: ~[ast::arg], output: @ast::ty) -> ast::fn_decl; + fn fn_decl(+inputs: ~[ast::arg], output: @ast::Ty) -> ast::fn_decl; fn item(name: ident, span: span, +node: ast::item_) -> @ast::item; fn item_fn_poly(name: ident, +inputs: ~[ast::arg], - output: @ast::ty, + output: @ast::Ty, +ty_params: ~[ast::ty_param], +body: ast::blk) -> @ast::item; fn item_fn(name: ident, +inputs: ~[ast::arg], - output: @ast::ty, + output: @ast::Ty, +body: ast::blk) -> @ast::item; fn item_enum_poly(name: ident, span: span, @@ -66,17 +66,17 @@ trait ext_ctxt_ast_builder { +ty_params: ~[ast::ty_param]) -> @ast::item; fn item_enum(name: ident, span: span, +enum_definition: ast::enum_def) -> @ast::item; - fn variant(name: ident, span: span, +tys: ~[@ast::ty]) -> ast::variant; + fn variant(name: ident, span: span, +tys: ~[@ast::Ty]) -> ast::variant; fn item_mod(name: ident, span: span, +items: ~[@ast::item]) -> @ast::item; - fn ty_path_ast_builder(path: @ast::path) -> @ast::ty; + fn ty_path_ast_builder(path: @ast::path) -> @ast::Ty; fn item_ty_poly(name: ident, span: span, - ty: @ast::ty, + ty: @ast::Ty, +params: ~[ast::ty_param]) -> @ast::item; - fn item_ty(name: ident, span: span, ty: @ast::ty) -> @ast::item; - fn ty_vars(+ty_params: ~[ast::ty_param]) -> ~[@ast::ty]; - fn ty_field_imm(name: ident, ty: @ast::ty) -> ast::ty_field; - fn ty_rec(+v: ~[ast::ty_field]) -> @ast::ty; + fn item_ty(name: ident, span: span, ty: @ast::Ty) -> @ast::item; + fn ty_vars(+ty_params: ~[ast::ty_param]) -> ~[@ast::Ty]; + fn ty_field_imm(name: ident, ty: @ast::Ty) -> ast::ty_field; + fn ty_rec(+v: ~[ast::ty_field]) -> @ast::Ty; fn field_imm(name: ident, e: @ast::expr) -> ast::field; fn rec(+v: ~[ast::field]) -> @ast::expr; fn block(+stmts: ~[@ast::stmt], e: @ast::expr) -> ast::blk; @@ -84,11 +84,11 @@ trait ext_ctxt_ast_builder { fn stmt_expr(e: @ast::expr) -> @ast::stmt; fn block_expr(b: ast::blk) -> @ast::expr; fn empty_span() -> span; - fn ty_option(ty: @ast::ty) -> @ast::ty; + fn ty_option(ty: @ast::Ty) -> @ast::Ty; } impl ext_ctxt: ext_ctxt_ast_builder { - fn ty_option(ty: @ast::ty) -> @ast::ty { + fn ty_option(ty: @ast::Ty) -> @ast::Ty { self.ty_path_ast_builder(path(~[self.ident_of(~"Option")], self.empty_span()) .add_ty(ty)) @@ -146,18 +146,18 @@ impl ext_ctxt: ext_ctxt_ast_builder { span: self.empty_span()} } - fn ty_field_imm(name: ident, ty: @ast::ty) -> ast::ty_field { + fn ty_field_imm(name: ident, ty: @ast::Ty) -> ast::ty_field { {node: {ident: name, mt: { ty: ty, mutbl: ast::m_imm } }, span: self.empty_span()} } - fn ty_rec(+fields: ~[ast::ty_field]) -> @ast::ty { + fn ty_rec(+fields: ~[ast::ty_field]) -> @ast::Ty { @{id: self.next_id(), node: ast::ty_rec(fields), span: self.empty_span()} } - fn ty_infer() -> @ast::ty { + fn ty_infer() -> @ast::Ty { @{id: self.next_id(), node: ast::ty_infer, span: self.empty_span()} @@ -169,7 +169,7 @@ impl ext_ctxt: ext_ctxt_ast_builder { {ident: id, id: self.next_id(), bounds: @bounds} } - fn arg(name: ident, ty: @ast::ty) -> ast::arg { + fn arg(name: ident, ty: @ast::Ty) -> ast::arg { {mode: ast::infer(self.next_id()), ty: ty, ident: name, @@ -192,7 +192,7 @@ impl ext_ctxt: ext_ctxt_ast_builder { } fn fn_decl(+inputs: ~[ast::arg], - output: @ast::ty) -> ast::fn_decl { + output: @ast::Ty) -> ast::fn_decl { {inputs: inputs, output: output, cf: ast::return_val} @@ -224,7 +224,7 @@ impl ext_ctxt: ext_ctxt_ast_builder { fn item_fn_poly(name: ident, +inputs: ~[ast::arg], - output: @ast::ty, + output: @ast::Ty, +ty_params: ~[ast::ty_param], +body: ast::blk) -> @ast::item { self.item(name, @@ -237,7 +237,7 @@ impl ext_ctxt: ext_ctxt_ast_builder { fn item_fn(name: ident, +inputs: ~[ast::arg], - output: @ast::ty, + output: @ast::Ty, +body: ast::blk) -> @ast::item { self.item_fn_poly(name, inputs, output, ~[], body) } @@ -256,7 +256,7 @@ impl ext_ctxt: ext_ctxt_ast_builder { fn variant(name: ident, span: span, - +tys: ~[@ast::ty]) -> ast::variant { + +tys: ~[@ast::Ty]) -> ast::variant { let args = tys.map(|ty| {ty: *ty, id: self.next_id()}); {node: {name: name, @@ -278,13 +278,13 @@ impl ext_ctxt: ext_ctxt_ast_builder { items: items})) } - fn ty_path_ast_builder(path: @ast::path) -> @ast::ty { + fn ty_path_ast_builder(path: @ast::path) -> @ast::Ty { @{id: self.next_id(), node: ast::ty_path(path, self.next_id()), span: path.span} } - fn ty_nil_ast_builder() -> @ast::ty { + fn ty_nil_ast_builder() -> @ast::Ty { @{id: self.next_id(), node: ast::ty_nil, span: self.empty_span()} @@ -292,16 +292,16 @@ impl ext_ctxt: ext_ctxt_ast_builder { fn item_ty_poly(name: ident, span: span, - ty: @ast::ty, + ty: @ast::Ty, +params: ~[ast::ty_param]) -> @ast::item { self.item(name, span, ast::item_ty(ty, params)) } - fn item_ty(name: ident, span: span, ty: @ast::ty) -> @ast::item { + fn item_ty(name: ident, span: span, ty: @ast::Ty) -> @ast::item { self.item_ty_poly(name, span, ty, ~[]) } - fn ty_vars(+ty_params: ~[ast::ty_param]) -> ~[@ast::ty] { + fn ty_vars(+ty_params: ~[ast::ty_param]) -> ~[@ast::Ty] { ty_params.map(|p| self.ty_path_ast_builder( path(~[p.ident], self.empty_span()))) } diff --git a/src/libsyntax/ext/pipes/check.rs b/src/libsyntax/ext/pipes/check.rs index 5fcc00ef01217..fcc0c84a4ff39 100644 --- a/src/libsyntax/ext/pipes/check.rs +++ b/src/libsyntax/ext/pipes/check.rs @@ -38,7 +38,7 @@ impl ext_ctxt: proto::visitor<(), (), ()> { } } - fn visit_message(name: ~str, _span: span, _tys: &[@ast::ty], + fn visit_message(name: ~str, _span: span, _tys: &[@ast::Ty], this: state, next: next_state) { match next { Some({state: next, tys: next_tys}) => { @@ -68,4 +68,4 @@ impl ext_ctxt: proto::visitor<(), (), ()> { None => () } } -} \ No newline at end of file +} diff --git a/src/libsyntax/ext/pipes/parse_proto.rs b/src/libsyntax/ext/pipes/parse_proto.rs index 5c15b616b4aef..8f2b92a720c07 100644 --- a/src/libsyntax/ext/pipes/parse_proto.rs +++ b/src/libsyntax/ext/pipes/parse_proto.rs @@ -10,7 +10,7 @@ trait proto_parser { fn parse_state(proto: protocol); } -impl parser: proto_parser { +impl parser::Parser: proto_parser { fn parse_proto(id: ~str) -> protocol { let proto = protocol(id, self.span); diff --git a/src/libsyntax/ext/pipes/pipec.rs b/src/libsyntax/ext/pipes/pipec.rs index 874ea01e9b01d..7e1cbe9ad0dbf 100644 --- a/src/libsyntax/ext/pipes/pipec.rs +++ b/src/libsyntax/ext/pipes/pipec.rs @@ -181,7 +181,7 @@ impl message: gen_send { } } - fn to_ty(cx: ext_ctxt) -> @ast::ty { + fn to_ty(cx: ext_ctxt) -> @ast::Ty { cx.ty_path_ast_builder(path(~[cx.ident_of(self.name())], self.span()) .add_tys(cx.ty_vars(self.get_params()))) } @@ -360,7 +360,7 @@ impl protocol: gen_init { }} } - fn buffer_ty_path(cx: ext_ctxt) -> @ast::ty { + fn buffer_ty_path(cx: ext_ctxt) -> @ast::Ty { let mut params: ~[ast::ty_param] = ~[]; for (copy self.states).each |s| { for s.ty_params.each |tp| { @@ -444,13 +444,13 @@ impl ~[@ast::item]: to_source { } } -impl @ast::ty: to_source { +impl @ast::Ty: to_source { fn to_source(cx: ext_ctxt) -> ~str { ty_to_str(self, cx.parse_sess().interner) } } -impl ~[@ast::ty]: to_source { +impl ~[@ast::Ty]: to_source { fn to_source(cx: ext_ctxt) -> ~str { str::connect(self.map(|i| i.to_source(cx)), ~", ") } diff --git a/src/libsyntax/ext/pipes/proto.rs b/src/libsyntax/ext/pipes/proto.rs index a501df4c32d2b..229e55fdfcc6a 100644 --- a/src/libsyntax/ext/pipes/proto.rs +++ b/src/libsyntax/ext/pipes/proto.rs @@ -35,11 +35,11 @@ impl direction { } } -type next_state = Option<{state: ~str, tys: ~[@ast::ty]}>; +type next_state = Option<{state: ~str, tys: ~[@ast::Ty]}>; enum message { // name, span, data, current state, next state - message(~str, span, ~[@ast::ty], state, next_state) + message(~str, span, ~[@ast::Ty], state, next_state) } impl message { @@ -78,7 +78,7 @@ enum state { impl state { fn add_message(name: ~str, span: span, - +data: ~[@ast::ty], next: next_state) { + +data: ~[@ast::Ty], next: next_state) { self.messages.push(message(name, span, data, self, next)); } @@ -92,7 +92,7 @@ impl state { } /// Returns the type that is used for the messages. - fn to_ty(cx: ext_ctxt) -> @ast::ty { + fn to_ty(cx: ext_ctxt) -> @ast::Ty { cx.ty_path_ast_builder (path(~[cx.ident_of(self.name)],self.span).add_tys( cx.ty_vars(self.ty_params))) @@ -200,7 +200,7 @@ impl protocol { trait visitor { fn visit_proto(proto: protocol, st: &[Tstate]) -> Tproto; fn visit_state(state: state, m: &[Tmessage]) -> Tstate; - fn visit_message(name: ~str, spane: span, tys: &[@ast::ty], + fn visit_message(name: ~str, spane: span, tys: &[@ast::Ty], this: state, next: next_state) -> Tmessage; } diff --git a/src/libsyntax/ext/qquote.rs b/src/libsyntax/ext/qquote.rs index ee9602598d1ed..a83789642ccc8 100644 --- a/src/libsyntax/ext/qquote.rs +++ b/src/libsyntax/ext/qquote.rs @@ -1,7 +1,7 @@ use ast::{crate, expr_, mac_invoc, mac_aq, mac_var}; use parse::parser; -use parse::parser::parse_from_source_str; +use parse::parser::{Parser, parse_from_source_str}; use dvec::DVec; use parse::token::ident_interner; @@ -24,7 +24,7 @@ struct gather_item { type aq_ctxt = @{lo: uint, gather: DVec}; enum fragment { from_expr(@ast::expr), - from_ty(@ast::ty) + from_ty(@ast::Ty) } fn ids_ext(cx: ext_ctxt, strs: ~[~str]) -> ~[ast::ident] { @@ -68,7 +68,7 @@ impl @ast::expr: qq_helper { } fn get_fold_fn() -> ~str {~"fold_expr"} } -impl @ast::ty: qq_helper { +impl @ast::Ty: qq_helper { fn span() -> span {self.span} fn visit(cx: aq_ctxt, v: vt) {visit_ty(self, cx, v);} fn extract_mac() -> Option { @@ -186,13 +186,13 @@ fn expand_ast(ecx: ext_ctxt, _sp: span, }; } -fn parse_crate(p: parser) -> @ast::crate { p.parse_crate_mod(~[]) } -fn parse_ty(p: parser) -> @ast::ty { p.parse_ty(false) } -fn parse_stmt(p: parser) -> @ast::stmt { p.parse_stmt(~[]) } -fn parse_expr(p: parser) -> @ast::expr { p.parse_expr() } -fn parse_pat(p: parser) -> @ast::pat { p.parse_pat(true) } +fn parse_crate(p: Parser) -> @ast::crate { p.parse_crate_mod(~[]) } +fn parse_ty(p: Parser) -> @ast::Ty { p.parse_ty(false) } +fn parse_stmt(p: Parser) -> @ast::stmt { p.parse_stmt(~[]) } +fn parse_expr(p: Parser) -> @ast::expr { p.parse_expr() } +fn parse_pat(p: Parser) -> @ast::pat { p.parse_pat(true) } -fn parse_item(p: parser) -> @ast::item { +fn parse_item(p: Parser) -> @ast::item { match p.parse_item(~[]) { Some(item) => item, None => fail ~"parse_item: parsing an item failed" @@ -200,7 +200,7 @@ fn parse_item(p: parser) -> @ast::item { } fn finish - (ecx: ext_ctxt, body: ast::mac_body_, f: fn (p: parser) -> T) + (ecx: ext_ctxt, body: ast::mac_body_, f: fn (p: Parser) -> T) -> @ast::expr { let cm = ecx.codemap(); @@ -309,7 +309,7 @@ fn fold_crate(f: ast_fold, &&n: @ast::crate) -> @ast::crate { @f.fold_crate(*n) } fn fold_expr(f: ast_fold, &&n: @ast::expr) -> @ast::expr {f.fold_expr(n)} -fn fold_ty(f: ast_fold, &&n: @ast::ty) -> @ast::ty {f.fold_ty(n)} +fn fold_ty(f: ast_fold, &&n: @ast::Ty) -> @ast::Ty {f.fold_ty(n)} fn fold_item(f: ast_fold, &&n: @ast::item) -> @ast::item { f.fold_item(n).get() //HACK: we know we don't drop items } diff --git a/src/libsyntax/ext/simplext.rs b/src/libsyntax/ext/simplext.rs index e16e1c5534997..bec29c9a83540 100644 --- a/src/libsyntax/ext/simplext.rs +++ b/src/libsyntax/ext/simplext.rs @@ -6,7 +6,7 @@ use base::*; use fold::*; use ast_util::respan; -use ast::{ident, path, ty, blk_, expr, expr_path, +use ast::{ident, path, Ty, blk_, expr, expr_path, expr_vec, expr_mac, mac_invoc, node_id, expr_index}; export add_new_extension; @@ -29,7 +29,7 @@ enum matchable { match_expr(@expr), match_path(@path), match_ident(ast::spanned), - match_ty(@ty), + match_ty(@Ty), match_block(ast::blk), match_exact, /* don't bind anything, just verify the AST traversal */ } diff --git a/src/libsyntax/ext/trace_macros.rs b/src/libsyntax/ext/trace_macros.rs index c2d4de1b423c4..0c7d408db7cc3 100644 --- a/src/libsyntax/ext/trace_macros.rs +++ b/src/libsyntax/ext/trace_macros.rs @@ -2,7 +2,7 @@ use codemap::span; use ext::base::ext_ctxt; use ast::tt_delim; use parse::lexer::{new_tt_reader, reader}; -use parse::parser::{parser, SOURCE_FILE}; +use parse::parser::{Parser, SOURCE_FILE}; use parse::common::parser_common; fn expand_trace_macros(cx: ext_ctxt, sp: span, @@ -13,7 +13,7 @@ fn expand_trace_macros(cx: ext_ctxt, sp: span, let tt_rdr = new_tt_reader(cx.parse_sess().span_diagnostic, cx.parse_sess().interner, None, tt); let rdr = tt_rdr as reader; - let rust_parser = parser(sess, cfg, rdr.dup(), SOURCE_FILE); + let rust_parser = Parser(sess, cfg, rdr.dup(), SOURCE_FILE); let arg = cx.str_of(rust_parser.parse_ident()); match arg { @@ -21,7 +21,7 @@ fn expand_trace_macros(cx: ext_ctxt, sp: span, ~"false" => cx.set_trace_macros(false), _ => cx.span_fatal(sp, ~"trace_macros! only accepts `true` or `false`") } - let rust_parser = parser(sess, cfg, rdr.dup(), SOURCE_FILE); + let rust_parser = Parser(sess, cfg, rdr.dup(), SOURCE_FILE); let result = rust_parser.parse_expr(); base::mr_expr(result) } diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 0b2070c8c86df..17122b85fb360 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -1,9 +1,9 @@ // Earley-like parser for macros. use parse::token; -use parse::token::{token, EOF, to_str, nonterminal}; +use parse::token::{Token, EOF, to_str, nonterminal}; use parse::lexer::*; //resolve bug? //import parse::lexer::{reader, tt_reader, tt_reader_as_reader}; -use parse::parser::{parser,SOURCE_FILE}; +use parse::parser::{Parser, SOURCE_FILE}; //import parse::common::parser_common; use parse::common::*; //resolve bug? use parse::parse_sess; @@ -97,7 +97,7 @@ fn is_some(&&mpu: matcher_pos_up) -> bool { type matcher_pos = ~{ elts: ~[ast::matcher], // maybe should be /&? Need to understand regions. - sep: Option, + sep: Option, mut idx: uint, mut up: matcher_pos_up, // mutable for swapping only matches: ~[DVec<@named_match>], @@ -122,7 +122,7 @@ fn count_names(ms: &[matcher]) -> uint { } #[allow(non_implicitly_copyable_typarams)] -fn initial_matcher_pos(ms: ~[matcher], sep: Option, lo: uint) +fn initial_matcher_pos(ms: ~[matcher], sep: Option, lo: uint) -> matcher_pos { let mut match_idx_hi = 0u; for ms.each() |elt| { @@ -354,7 +354,7 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher]) } rdr.next_token(); } else /* bb_eis.len() == 1 */ { - let rust_parser = parser(sess, cfg, rdr.dup(), SOURCE_FILE); + let rust_parser = Parser(sess, cfg, rdr.dup(), SOURCE_FILE); let ei = bb_eis.pop(); match ei.elts[ei.idx].node { @@ -381,7 +381,7 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher]) } } -fn parse_nt(p: parser, name: ~str) -> nonterminal { +fn parse_nt(p: Parser, name: ~str) -> nonterminal { match name { ~"item" => match p.parse_item(~[]) { Some(i) => token::nt_item(i), diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 52369ad7207f0..31bc375a76d56 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -4,7 +4,7 @@ use ast::{ident, matcher_, matcher, match_tok, match_nonterminal, match_seq, tt_delim}; use parse::lexer::{new_tt_reader, reader}; use parse::token::{FAT_ARROW, SEMI, LBRACE, RBRACE, nt_matchers, nt_tt}; -use parse::parser::{parser, SOURCE_FILE}; +use parse::parser::{Parser, SOURCE_FILE}; use macro_parser::{parse, parse_or_else, success, failure, named_match, matched_seq, matched_nonterminal, error}; use std::map::HashMap; @@ -86,7 +86,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, name: ident, // rhs has holes ( `$id` and `$(...)` that need filled) let trncbr = new_tt_reader(s_d, itr, Some(named_matches), ~[rhs]); - let p = parser(cx.parse_sess(), cx.cfg(), + let p = Parser(cx.parse_sess(), cx.cfg(), trncbr as reader, SOURCE_FILE); let e = p.parse_expr(); return mr_expr(e); @@ -111,4 +111,4 @@ fn add_new_extension(cx: ext_ctxt, sp: span, name: ident, name: *cx.parse_sess().interner.get(name), ext: expr_tt({expander: exp, span: Some(sp)}) }); -} \ No newline at end of file +} diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index a8a41cca6cbd7..238f9db6ac537 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -2,8 +2,7 @@ use diagnostic::span_handler; use ast::{token_tree, tt_delim, tt_tok, tt_seq, tt_nonterminal,ident}; use macro_parser::{named_match, matched_seq, matched_nonterminal}; use codemap::span; -use parse::token::{EOF, INTERPOLATED, IDENT, token, nt_ident, - ident_interner}; +use parse::token::{EOF, INTERPOLATED, IDENT, Token, nt_ident, ident_interner}; use std::map::HashMap; export tt_reader, new_tt_reader, dup_tt_reader, tt_next_token; @@ -19,7 +18,7 @@ type tt_frame = @{ readme: ~[ast::token_tree], mut idx: uint, dotdotdoted: bool, - sep: Option, + sep: Option, up: tt_frame_up, }; @@ -32,7 +31,7 @@ type tt_reader = @{ mut repeat_idx: ~[uint], mut repeat_len: ~[uint], /* cached: */ - mut cur_tok: token, + mut cur_tok: Token, mut cur_span: span }; @@ -134,7 +133,7 @@ fn lockstep_iter_size(t: token_tree, r: tt_reader) -> lis { } -fn tt_next_token(&&r: tt_reader) -> {tok: token, sp: span} { +fn tt_next_token(&&r: tt_reader) -> {tok: Token, sp: span} { let ret_val = { tok: r.cur_tok, sp: r.cur_span }; while r.cur.idx >= r.cur.readme.len() { /* done with this set; pop or repeat? */ diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index 68d9cd80430d3..564debefa2539 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -33,7 +33,7 @@ trait ast_fold { fn fold_pat(&&v: @pat) -> @pat; fn fold_decl(&&v: @decl) -> @decl; fn fold_expr(&&v: @expr) -> @expr; - fn fold_ty(&&v: @ty) -> @ty; + fn fold_ty(&&v: @Ty) -> @Ty; fn fold_mod(_mod) -> _mod; fn fold_foreign_mod(foreign_mod) -> foreign_mod; fn fold_variant(variant) -> variant; @@ -728,7 +728,7 @@ impl ast_fold_precursor: ast_fold { node: n, span: self.new_span(s)}; } - fn fold_ty(&&x: @ty) -> @ty { + fn fold_ty(&&x: @Ty) -> @Ty { let (n, s) = self.fold_ty(x.node, x.span, self as ast_fold); return @{id: self.new_id(x.id), node: n, span: self.new_span(s)}; } diff --git a/src/libsyntax/parse.rs b/src/libsyntax/parse.rs index 2c04b2a14190c..e38ee7ff03763 100644 --- a/src/libsyntax/parse.rs +++ b/src/libsyntax/parse.rs @@ -12,7 +12,7 @@ export parse_expr_from_source_str, parse_item_from_source_str; export parse_stmt_from_source_str; export parse_from_source_str; -use parser::parser; +use parser::Parser; use attr::parser_attr; use common::parser_common; use ast::node_id; @@ -22,7 +22,7 @@ use lexer::{reader, string_reader}; use parse::token::{ident_interner, mk_ident_interner}; type parse_sess = @{ - cm: codemap::codemap, + cm: codemap::CodeMap, mut next_id: node_id, span_diagnostic: span_handler, interner: @ident_interner, @@ -40,7 +40,7 @@ fn new_parse_sess(demitter: Option) -> parse_sess { mut chpos: 0u, mut byte_pos: 0u}; } -fn new_parse_sess_special_handler(sh: span_handler, cm: codemap::codemap) +fn new_parse_sess_special_handler(sh: span_handler, cm: codemap::CodeMap) -> parse_sess { return @{cm: cm, mut next_id: 1, @@ -142,7 +142,7 @@ fn parse_stmt_from_source_str(name: ~str, source: @~str, cfg: ast::crate_cfg, return r; } -fn parse_from_source_str(f: fn (p: parser) -> T, +fn parse_from_source_str(f: fn (p: Parser) -> T, name: ~str, ss: codemap::file_substr, source: @~str, cfg: ast::crate_cfg, sess: parse_sess) @@ -170,19 +170,19 @@ fn next_node_id(sess: parse_sess) -> node_id { fn new_parser_etc_from_source_str(sess: parse_sess, cfg: ast::crate_cfg, +name: ~str, +ss: codemap::file_substr, - source: @~str) -> (parser, string_reader) { + source: @~str) -> (Parser, string_reader) { let ftype = parser::SOURCE_FILE; let filemap = codemap::new_filemap_w_substr (name, ss, source, sess.chpos, sess.byte_pos); sess.cm.files.push(filemap); let srdr = lexer::new_string_reader(sess.span_diagnostic, filemap, sess.interner); - return (parser(sess, cfg, srdr as reader, ftype), srdr); + return (Parser(sess, cfg, srdr as reader, ftype), srdr); } fn new_parser_from_source_str(sess: parse_sess, cfg: ast::crate_cfg, +name: ~str, +ss: codemap::file_substr, - source: @~str) -> parser { + source: @~str) -> Parser { let (p, _) = new_parser_etc_from_source_str(sess, cfg, name, ss, source); move p } @@ -190,7 +190,7 @@ fn new_parser_from_source_str(sess: parse_sess, cfg: ast::crate_cfg, fn new_parser_etc_from_file(sess: parse_sess, cfg: ast::crate_cfg, path: &Path, ftype: parser::file_type) -> - (parser, string_reader) { + (Parser, string_reader) { let res = io::read_whole_file_str(path); match res { result::Ok(_) => { /* Continue. */ } @@ -202,18 +202,18 @@ fn new_parser_etc_from_file(sess: parse_sess, cfg: ast::crate_cfg, sess.cm.files.push(filemap); let srdr = lexer::new_string_reader(sess.span_diagnostic, filemap, sess.interner); - return (parser(sess, cfg, srdr as reader, ftype), srdr); + return (Parser(sess, cfg, srdr as reader, ftype), srdr); } fn new_parser_from_file(sess: parse_sess, cfg: ast::crate_cfg, path: &Path, - ftype: parser::file_type) -> parser { + ftype: parser::file_type) -> Parser { let (p, _) = new_parser_etc_from_file(sess, cfg, path, ftype); move p } fn new_parser_from_tt(sess: parse_sess, cfg: ast::crate_cfg, - tt: ~[ast::token_tree]) -> parser { + tt: ~[ast::token_tree]) -> Parser { let trdr = lexer::new_tt_reader(sess.span_diagnostic, sess.interner, None, tt); - return parser(sess, cfg, trdr as reader, parser::SOURCE_FILE) + return Parser(sess, cfg, trdr as reader, parser::SOURCE_FILE) } diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index 9be4909814b13..42101a431d6c6 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -23,7 +23,7 @@ trait parser_attr { fn parse_optional_meta() -> ~[@ast::meta_item]; } -impl parser: parser_attr { +impl Parser: parser_attr { fn parse_outer_attrs_or_ext(first_item_attrs: ~[ast::attribute]) -> attr_or_ext diff --git a/src/libsyntax/parse/common.rs b/src/libsyntax/parse/common.rs index c8c30ee7fa9cb..50c22c08f4f88 100644 --- a/src/libsyntax/parse/common.rs +++ b/src/libsyntax/parse/common.rs @@ -1,63 +1,63 @@ use std::map::{HashMap}; use ast_util::spanned; -use parser::parser; +use parser::Parser; use lexer::reader; type seq_sep = { - sep: Option, + sep: Option, trailing_sep_allowed: bool }; -fn seq_sep_trailing_disallowed(t: token::token) -> seq_sep { +fn seq_sep_trailing_disallowed(t: token::Token) -> seq_sep { return {sep: option::Some(t), trailing_sep_allowed: false}; } -fn seq_sep_trailing_allowed(t: token::token) -> seq_sep { +fn seq_sep_trailing_allowed(t: token::Token) -> seq_sep { return {sep: option::Some(t), trailing_sep_allowed: true}; } fn seq_sep_none() -> seq_sep { return {sep: option::None, trailing_sep_allowed: false}; } -fn token_to_str(reader: reader, ++token: token::token) -> ~str { +fn token_to_str(reader: reader, ++token: token::Token) -> ~str { token::to_str(reader.interner(), token) } trait parser_common { - fn unexpected_last(t: token::token) -> !; + fn unexpected_last(t: token::Token) -> !; fn unexpected() -> !; - fn expect(t: token::token); + fn expect(t: token::Token); fn parse_ident() -> ast::ident; fn parse_path_list_ident() -> ast::path_list_ident; fn parse_value_ident() -> ast::ident; - fn eat(tok: token::token) -> bool; + fn eat(tok: token::Token) -> bool; // A sanity check that the word we are asking for is a known keyword fn require_keyword(word: ~str); - fn token_is_keyword(word: ~str, ++tok: token::token) -> bool; + fn token_is_keyword(word: ~str, ++tok: token::Token) -> bool; fn is_keyword(word: ~str) -> bool; - fn is_any_keyword(tok: token::token) -> bool; + fn is_any_keyword(tok: token::Token) -> bool; fn eat_keyword(word: ~str) -> bool; fn expect_keyword(word: ~str); fn expect_gt(); - fn parse_seq_to_before_gt(sep: Option, - f: fn(parser) -> T) -> ~[T]; - fn parse_seq_to_gt(sep: Option, - f: fn(parser) -> T) -> ~[T]; - fn parse_seq_lt_gt(sep: Option, - f: fn(parser) -> T) -> spanned<~[T]>; - fn parse_seq_to_end(ket: token::token, sep: seq_sep, - f: fn(parser) -> T) -> ~[T]; - fn parse_seq_to_before_end(ket: token::token, sep: seq_sep, - f: fn(parser) -> T) -> ~[T]; - fn parse_unspanned_seq(bra: token::token, - ket: token::token, + fn parse_seq_to_before_gt(sep: Option, + f: fn(Parser) -> T) -> ~[T]; + fn parse_seq_to_gt(sep: Option, + f: fn(Parser) -> T) -> ~[T]; + fn parse_seq_lt_gt(sep: Option, + f: fn(Parser) -> T) -> spanned<~[T]>; + fn parse_seq_to_end(ket: token::Token, sep: seq_sep, + f: fn(Parser) -> T) -> ~[T]; + fn parse_seq_to_before_end(ket: token::Token, sep: seq_sep, + f: fn(Parser) -> T) -> ~[T]; + fn parse_unspanned_seq(bra: token::Token, + ket: token::Token, sep: seq_sep, - f: fn(parser) -> T) -> ~[T]; - fn parse_seq(bra: token::token, ket: token::token, sep: seq_sep, - f: fn(parser) -> T) -> spanned<~[T]>; + f: fn(Parser) -> T) -> ~[T]; + fn parse_seq(bra: token::Token, ket: token::Token, sep: seq_sep, + f: fn(Parser) -> T) -> spanned<~[T]>; } -impl parser: parser_common { - fn unexpected_last(t: token::token) -> ! { +impl Parser: parser_common { + fn unexpected_last(t: token::Token) -> ! { self.span_fatal( copy self.last_span, ~"unexpected token: `" + token_to_str(self.reader, t) + ~"`"); @@ -68,7 +68,7 @@ impl parser: parser_common { + token_to_str(self.reader, self.token) + ~"`"); } - fn expect(t: token::token) { + fn expect(t: token::Token) { if self.token == t { self.bump(); } else { @@ -104,7 +104,7 @@ impl parser: parser_common { return self.parse_ident(); } - fn eat(tok: token::token) -> bool { + fn eat(tok: token::Token) -> bool { return if self.token == tok { self.bump(); true } else { false }; } @@ -117,14 +117,14 @@ impl parser: parser_common { } } - fn token_is_word(word: ~str, ++tok: token::token) -> bool { + fn token_is_word(word: ~str, ++tok: token::Token) -> bool { match tok { token::IDENT(sid, false) => { *self.id_to_str(sid) == word } _ => { false } } } - fn token_is_keyword(word: ~str, ++tok: token::token) -> bool { + fn token_is_keyword(word: ~str, ++tok: token::Token) -> bool { self.require_keyword(word); self.token_is_word(word, tok) } @@ -133,7 +133,7 @@ impl parser: parser_common { self.token_is_keyword(word, self.token) } - fn is_any_keyword(tok: token::token) -> bool { + fn is_any_keyword(tok: token::Token) -> bool { match tok { token::IDENT(sid, false) => { self.keywords.contains_key_ref(self.id_to_str(sid)) @@ -216,8 +216,8 @@ impl parser: parser_common { } } - fn parse_seq_to_before_gt(sep: Option, - f: fn(parser) -> T) -> ~[T] { + fn parse_seq_to_before_gt(sep: Option, + f: fn(Parser) -> T) -> ~[T] { let mut first = true; let mut v = ~[]; while self.token != token::GT @@ -235,16 +235,16 @@ impl parser: parser_common { return v; } - fn parse_seq_to_gt(sep: Option, - f: fn(parser) -> T) -> ~[T] { + fn parse_seq_to_gt(sep: Option, + f: fn(Parser) -> T) -> ~[T] { let v = self.parse_seq_to_before_gt(sep, f); self.expect_gt(); return v; } - fn parse_seq_lt_gt(sep: Option, - f: fn(parser) -> T) -> spanned<~[T]> { + fn parse_seq_lt_gt(sep: Option, + f: fn(Parser) -> T) -> spanned<~[T]> { let lo = self.span.lo; self.expect(token::LT); let result = self.parse_seq_to_before_gt::(sep, f); @@ -253,16 +253,16 @@ impl parser: parser_common { return spanned(lo, hi, result); } - fn parse_seq_to_end(ket: token::token, sep: seq_sep, - f: fn(parser) -> T) -> ~[T] { + fn parse_seq_to_end(ket: token::Token, sep: seq_sep, + f: fn(Parser) -> T) -> ~[T] { let val = self.parse_seq_to_before_end(ket, sep, f); self.bump(); return val; } - fn parse_seq_to_before_end(ket: token::token, sep: seq_sep, - f: fn(parser) -> T) -> ~[T] { + fn parse_seq_to_before_end(ket: token::Token, sep: seq_sep, + f: fn(Parser) -> T) -> ~[T] { let mut first: bool = true; let mut v: ~[T] = ~[]; while self.token != ket { @@ -279,10 +279,10 @@ impl parser: parser_common { return v; } - fn parse_unspanned_seq(bra: token::token, - ket: token::token, + fn parse_unspanned_seq(bra: token::Token, + ket: token::Token, sep: seq_sep, - f: fn(parser) -> T) -> ~[T] { + f: fn(Parser) -> T) -> ~[T] { self.expect(bra); let result = self.parse_seq_to_before_end::(ket, sep, f); self.bump(); @@ -291,8 +291,8 @@ impl parser: parser_common { // NB: Do not use this function unless you actually plan to place the // spanned list in the AST. - fn parse_seq(bra: token::token, ket: token::token, sep: seq_sep, - f: fn(parser) -> T) -> spanned<~[T]> { + fn parse_seq(bra: token::Token, ket: token::Token, sep: seq_sep, + f: fn(Parser) -> T) -> spanned<~[T]> { let lo = self.span.lo; self.expect(bra); let result = self.parse_seq_to_before_end::(ket, sep, f); diff --git a/src/libsyntax/parse/eval.rs b/src/libsyntax/parse/eval.rs index c91060284910f..56c9d4de9f3cd 100644 --- a/src/libsyntax/parse/eval.rs +++ b/src/libsyntax/parse/eval.rs @@ -1,4 +1,4 @@ -use parser::{parser, SOURCE_FILE}; +use parser::{Parser, SOURCE_FILE}; use attr::parser_attr; export eval_crate_directives_to_mod; diff --git a/src/libsyntax/parse/lexer.rs b/src/libsyntax/parse/lexer.rs index 06fcc1cf9589f..8f57d733eb51f 100644 --- a/src/libsyntax/parse/lexer.rs +++ b/src/libsyntax/parse/lexer.rs @@ -10,11 +10,11 @@ export string_reader_as_reader, tt_reader_as_reader; trait reader { fn is_eof() -> bool; - fn next_token() -> {tok: token::token, sp: span}; + fn next_token() -> {tok: token::Token, sp: span}; fn fatal(~str) -> !; fn span_diag() -> span_handler; pure fn interner() -> @token::ident_interner; - fn peek() -> {tok: token::token, sp: span}; + fn peek() -> {tok: token::Token, sp: span}; fn dup() -> reader; } @@ -28,7 +28,7 @@ type string_reader = @{ filemap: codemap::filemap, interner: @token::ident_interner, /* cached: */ - mut peek_tok: token::token, + mut peek_tok: token::Token, mut peek_span: span }; @@ -69,7 +69,7 @@ fn dup_string_reader(&&r: string_reader) -> string_reader { impl string_reader: reader { fn is_eof() -> bool { is_eof(self) } - fn next_token() -> {tok: token::token, sp: span} { + fn next_token() -> {tok: token::Token, sp: span} { let ret_val = {tok: self.peek_tok, sp: self.peek_span}; string_advance_token(self); return ret_val; @@ -79,7 +79,7 @@ impl string_reader: reader { } fn span_diag() -> span_handler { self.span_diagnostic } pure fn interner() -> @token::ident_interner { self.interner } - fn peek() -> {tok: token::token, sp: span} { + fn peek() -> {tok: token::Token, sp: span} { {tok: self.peek_tok, sp: self.peek_span} } fn dup() -> reader { dup_string_reader(self) as reader } @@ -87,7 +87,7 @@ impl string_reader: reader { impl tt_reader: reader { fn is_eof() -> bool { self.cur_tok == token::EOF } - fn next_token() -> {tok: token::token, sp: span} { + fn next_token() -> {tok: token::Token, sp: span} { /* weird resolve bug: if the following `if`, or any of its statements are removed, we get resolution errors */ if false { @@ -101,7 +101,7 @@ impl tt_reader: reader { } fn span_diag() -> span_handler { self.sp_diag } pure fn interner() -> @token::ident_interner { self.interner } - fn peek() -> {tok: token::token, sp: span} { + fn peek() -> {tok: token::Token, sp: span} { { tok: self.cur_tok, sp: self.cur_span } } fn dup() -> reader { dup_tt_reader(self) as reader } @@ -196,14 +196,14 @@ fn is_bin_digit(c: char) -> bool { return c == '0' || c == '1'; } // might return a sugared-doc-attr fn consume_whitespace_and_comments(rdr: string_reader) - -> Option<{tok: token::token, sp: span}> { + -> Option<{tok: token::Token, sp: span}> { while is_whitespace(rdr.curr) { bump(rdr); } return consume_any_line_comment(rdr); } // might return a sugared-doc-attr fn consume_any_line_comment(rdr: string_reader) - -> Option<{tok: token::token, sp: span}> { + -> Option<{tok: token::Token, sp: span}> { if rdr.curr == '/' { match nextch(rdr) { '/' => { @@ -246,7 +246,7 @@ fn consume_any_line_comment(rdr: string_reader) // might return a sugared-doc-attr fn consume_block_comment(rdr: string_reader) - -> Option<{tok: token::token, sp: span}> { + -> Option<{tok: token::Token, sp: span}> { // block comments starting with "/**" or "/*!" are doc-comments if rdr.curr == '*' || rdr.curr == '!' { @@ -317,7 +317,7 @@ fn scan_digits(rdr: string_reader, radix: uint) -> ~str { }; } -fn scan_number(c: char, rdr: string_reader) -> token::token { +fn scan_number(c: char, rdr: string_reader) -> token::Token { let mut num_str, base = 10u, c = c, n = nextch(rdr); if c == '0' && n == 'x' { bump(rdr); @@ -435,7 +435,7 @@ fn scan_numeric_escape(rdr: string_reader, n_hex_digits: uint) -> char { return accum_int as char; } -fn next_token_inner(rdr: string_reader) -> token::token { +fn next_token_inner(rdr: string_reader) -> token::Token { let mut accum_str = ~""; let mut c = rdr.curr; if (c >= 'a' && c <= 'z') @@ -460,7 +460,7 @@ fn next_token_inner(rdr: string_reader) -> token::token { if is_dec_digit(c) { return scan_number(c, rdr); } - fn binop(rdr: string_reader, op: token::binop) -> token::token { + fn binop(rdr: string_reader, op: token::binop) -> token::Token { bump(rdr); if rdr.curr == '=' { bump(rdr); diff --git a/src/libsyntax/parse/obsolete.rs b/src/libsyntax/parse/obsolete.rs index 828d498ca3c23..c0e01fb194430 100644 --- a/src/libsyntax/parse/obsolete.rs +++ b/src/libsyntax/parse/obsolete.rs @@ -10,7 +10,7 @@ removed. use codemap::span; use ast::{expr, expr_lit, lit_nil}; use ast_util::{respan}; -use token::token; +use token::Token; /// The specific types of unsupported syntax pub enum ObsoleteSyntax { @@ -47,7 +47,7 @@ pub trait ObsoleteReporter { fn obsolete_expr(sp: span, kind: ObsoleteSyntax) -> @expr; } -impl parser : ObsoleteReporter { +impl Parser : ObsoleteReporter { /// Reports an obsolete syntax non-fatal error. fn obsolete(sp: span, kind: ObsoleteSyntax) { let (kind_str, desc) = match kind { @@ -121,7 +121,7 @@ impl parser : ObsoleteReporter { } } - fn token_is_obsolete_ident(ident: &str, token: token) -> bool { + fn token_is_obsolete_ident(ident: &str, token: Token) -> bool { match token { token::IDENT(copy sid, _) => { str::eq_slice(*self.id_to_str(sid), ident) diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 973822ddff9b9..e29620a7e79dd 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -6,7 +6,7 @@ use std::map::HashMap; use token::{can_begin_expr, is_ident, is_ident_or_path, is_plain_ident, INTERPOLATED, special_idents}; use codemap::{span,fss_none}; -use util::interner::interner; +use util::interner::Interner; use ast_util::{spanned, respan, mk_sp, ident_to_path, operator_prec}; use lexer::reader; use prec::{as_prec, token_to_binop}; @@ -58,7 +58,7 @@ use ast::{_mod, add, alt_check, alt_exhaustive, arg, arm, attribute, stmt_semi, struct_def, struct_field, struct_variant_kind, subtract, sty_box, sty_by_ref, sty_region, sty_static, sty_uniq, sty_value, token_tree, trait_method, trait_ref, tt_delim, tt_seq, - tt_tok, tt_nonterminal, tuple_variant_kind, ty, ty_, ty_bot, + tt_tok, tt_nonterminal, tuple_variant_kind, Ty, ty_, ty_bot, ty_box, ty_field, ty_fn, ty_infer, ty_mac, ty_method, ty_nil, ty_param, ty_param_bound, ty_path, ty_ptr, ty_rec, ty_rptr, ty_tup, ty_u32, ty_uniq, ty_vec, ty_fixed_length, type_value_ns, @@ -71,7 +71,7 @@ use ast::{_mod, add, alt_check, alt_exhaustive, arg, arm, attribute, expr_vstore_uniq}; export file_type; -export parser; +export Parser; export CRATE_FILE; export SOURCE_FILE; @@ -190,14 +190,14 @@ pure fn maybe_append(+lhs: ~[attribute], rhs: Option<~[attribute]>) /* ident is handled by common.rs */ -fn parser(sess: parse_sess, cfg: ast::crate_cfg, - +rdr: reader, ftype: file_type) -> parser { +fn Parser(sess: parse_sess, cfg: ast::crate_cfg, + +rdr: reader, ftype: file_type) -> Parser { let tok0 = rdr.next_token(); let span0 = tok0.sp; let interner = rdr.interner(); - parser { + Parser { reader: move rdr, interner: move interner, sess: sess, @@ -223,14 +223,14 @@ fn parser(sess: parse_sess, cfg: ast::crate_cfg, } } -struct parser { +struct Parser { sess: parse_sess, cfg: crate_cfg, file_type: file_type, - mut token: token::token, + mut token: token::Token, mut span: span, mut last_span: span, - mut buffer: [mut {tok: token::token, sp: span}]/4, + mut buffer: [mut {tok: token::Token, sp: span}]/4, mut buffer_start: int, mut buffer_end: int, mut restriction: restriction, @@ -247,7 +247,7 @@ struct parser { drop {} /* do not copy the parser; its state is tied to outside state */ } -impl parser { +impl Parser { fn bump() { self.last_span = self.span; let next = if self.buffer_start == self.buffer_end { @@ -260,7 +260,7 @@ impl parser { self.token = next.tok; self.span = next.sp; } - fn swap(next: token::token, lo: uint, hi: uint) { + fn swap(next: token::Token, lo: uint, hi: uint) { self.token = next; self.span = mk_sp(lo, hi); } @@ -270,7 +270,7 @@ impl parser { } return (4 - self.buffer_start) + self.buffer_end; } - fn look_ahead(distance: uint) -> token::token { + fn look_ahead(distance: uint) -> token::Token { let dist = distance as int; while self.buffer_length() < dist { self.buffer[self.buffer_end] = self.reader.next_token(); @@ -411,7 +411,7 @@ impl parser { }); } - fn parse_ret_ty() -> (ret_style, @ty) { + fn parse_ret_ty() -> (ret_style, @Ty) { return if self.eat(token::RARROW) { let lo = self.span.lo; if self.eat(token::NOT) { @@ -472,7 +472,7 @@ impl parser { self.region_from_name(name) } - fn parse_ty(colons_before_params: bool) -> @ty { + fn parse_ty(colons_before_params: bool) -> @Ty { maybe_whole!(self, nt_ty); let lo = self.span.lo; @@ -609,10 +609,10 @@ impl parser { } } - fn parse_capture_item_or(parse_arg_fn: fn(parser) -> arg_or_capture_item) + fn parse_capture_item_or(parse_arg_fn: fn(Parser) -> arg_or_capture_item) -> arg_or_capture_item { - fn parse_capture_item(p:parser, is_move: bool) -> capture_item { + fn parse_capture_item(p:Parser, is_move: bool) -> capture_item { let sp = mk_sp(p.span.lo, p.span.hi); let ident = p.parse_ident(); @{id: p.get_id(), is_move: is_move, name: ident, span: sp} @@ -728,7 +728,7 @@ impl parser { } } - fn lit_from_token(tok: token::token) -> lit_ { + fn lit_from_token(tok: token::Token) -> lit_ { match tok { token::LIT_INT(i, it) => lit_int(i, it), token::LIT_UINT(u, ut) => lit_uint(u, ut), @@ -760,8 +760,8 @@ impl parser { } fn parse_path_without_tps_( - parse_ident: fn(parser) -> ident, - parse_last_ident: fn(parser) -> ident) -> @path { + parse_ident: fn(Parser) -> ident, + parse_last_ident: fn(Parser) -> ident) -> @path { maybe_whole!(self, nt_path); let lo = self.span.lo; @@ -842,7 +842,7 @@ impl parser { } } - fn parse_field(sep: token::token) -> field { + fn parse_field(sep: token::Token) -> field { let lo = self.span.lo; let m = self.parse_mutability(); let i = self.parse_ident(); @@ -1220,7 +1220,7 @@ impl parser { return e; } - fn parse_sep_and_zerok() -> (Option, bool) { + fn parse_sep_and_zerok() -> (Option, bool) { if self.token == token::BINOP(token::STAR) || self.token == token::BINOP(token::PLUS) { let zerok = self.token == token::BINOP(token::STAR); @@ -1243,7 +1243,7 @@ impl parser { fn parse_token_tree() -> token_tree { maybe_whole!(deref self, nt_tt); - fn parse_tt_tok(p: parser, delim_ok: bool) -> token_tree { + fn parse_tt_tok(p: Parser, delim_ok: bool) -> token_tree { match p.token { token::RPAREN | token::RBRACE | token::RBRACKET if !delim_ok => { @@ -1310,8 +1310,8 @@ impl parser { // This goofy function is necessary to correctly match parens in matchers. // Otherwise, `$( ( )` would be a valid matcher, and `$( () )` would be // invalid. It's similar to common::parse_seq. - fn parse_matcher_subseq(name_idx: @mut uint, bra: token::token, - ket: token::token) -> ~[matcher] { + fn parse_matcher_subseq(name_idx: @mut uint, bra: token::Token, + ket: token::Token) -> ~[matcher] { let mut ret_val = ~[]; let mut lparens = 0u; @@ -2158,7 +2158,7 @@ impl parser { fn parse_stmt(+first_item_attrs: ~[attribute]) -> @stmt { maybe_whole!(self, nt_stmt); - fn check_expected_item(p: parser, current_attrs: ~[attribute]) { + fn check_expected_item(p: Parser, current_attrs: ~[attribute]) { // If we have attributes then we should have an item if vec::is_not_empty(current_attrs) { p.fatal(~"expected item"); @@ -2221,7 +2221,7 @@ impl parser { maybe_whole!(pair_empty self, nt_block); - fn maybe_parse_inner_attrs_and_next(p: parser, parse_attrs: bool) -> + fn maybe_parse_inner_attrs_and_next(p: Parser, parse_attrs: bool) -> {inner: ~[attribute], next: ~[attribute]} { if parse_attrs { p.parse_inner_attrs_and_next() @@ -2386,7 +2386,7 @@ impl parser { } else { ~[] } } - fn parse_fn_decl(parse_arg_fn: fn(parser) -> arg_or_capture_item) + fn parse_fn_decl(parse_arg_fn: fn(Parser) -> arg_or_capture_item) -> (fn_decl, capture_clause) { let args_or_capture_items: ~[arg_or_capture_item] = @@ -2420,11 +2420,11 @@ impl parser { } fn parse_fn_decl_with_self(parse_arg_fn: - fn(parser) -> arg_or_capture_item) + fn(Parser) -> arg_or_capture_item) -> (self_ty, fn_decl, capture_clause) { fn maybe_parse_self_ty(cnstr: fn(+v: mutability) -> ast::self_ty_, - p: parser) -> ast::self_ty_ { + p: Parser) -> ast::self_ty_ { // We need to make sure it isn't a mode or a type if p.token_is_keyword(~"self", p.look_ahead(1)) || ((p.token_is_keyword(~"const", p.look_ahead(1)) || @@ -2604,7 +2604,7 @@ impl parser { // Parses four variants (with the region/type params always optional): // impl ~[T] : to_str { ... } fn parse_item_impl() -> item_info { - fn wrap_path(p: parser, pt: @path) -> @ty { + fn wrap_path(p: Parser, pt: @path) -> @Ty { @{id: p.get_id(), node: ty_path(pt, p.get_id()), span: pt.span} } @@ -2664,7 +2664,7 @@ impl parser { ref_id: self.get_id(), impl_id: self.get_id()} } - fn parse_trait_ref_list(ket: token::token) -> ~[@trait_ref] { + fn parse_trait_ref_list(ket: token::Token) -> ~[@trait_ref] { self.parse_seq_to_before_end( ket, seq_sep_trailing_disallowed(token::COMMA), |p| p.parse_trait_ref()) @@ -2756,7 +2756,7 @@ impl parser { None) } - fn token_is_pound_or_doc_comment(++tok: token::token) -> bool { + fn token_is_pound_or_doc_comment(++tok: token::Token) -> bool { match tok { token::POUND | token::DOC_COMMENT(_) => true, _ => false @@ -2841,7 +2841,7 @@ impl parser { self.eat_keyword(~"static") } - fn parse_mod_items(term: token::token, + fn parse_mod_items(term: token::Token, +first_item_attrs: ~[attribute]) -> _mod { // Shouldn't be any view items since we've already parsed an item attr let {attrs_remaining, view_items, items: starting_items} = @@ -3222,7 +3222,7 @@ impl parser { } } - fn fn_expr_lookahead(tok: token::token) -> bool { + fn fn_expr_lookahead(tok: token::Token) -> bool { match tok { token::LPAREN | token::AT | token::TILDE | token::BINOP(_) => true, _ => false @@ -3608,7 +3608,7 @@ impl parser { return self.fatal(~"expected crate directive"); } - fn parse_crate_directives(term: token::token, + fn parse_crate_directives(term: token::Token, first_outer_attr: ~[attribute]) -> ~[@crate_directive] { diff --git a/src/libsyntax/parse/prec.rs b/src/libsyntax/parse/prec.rs index 668301db6201e..3fd905cb8ecec 100644 --- a/src/libsyntax/parse/prec.rs +++ b/src/libsyntax/parse/prec.rs @@ -3,7 +3,7 @@ export unop_prec; export token_to_binop; use token::*; -use token::token; +use token::Token; use ast::*; /// Unary operators have higher precedence than binary @@ -19,7 +19,7 @@ const as_prec: uint = 11u; * Maps a token to a record specifying the corresponding binary * operator and its precedence */ -fn token_to_binop(tok: token) -> Option { +fn token_to_binop(tok: Token) -> Option { match tok { BINOP(STAR) => Some(mul), BINOP(SLASH) => Some(div), diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index a7d439b8ce6d0..5151fd1bac837 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -1,5 +1,5 @@ use util::interner; -use util::interner::interner; +use util::interner::Interner; use std::map::HashMap; #[auto_serialize] @@ -19,7 +19,7 @@ enum binop { #[auto_serialize] #[auto_deserialize] -enum token { +enum Token { /* Expression-operator symbols. */ EQ, LT, @@ -84,7 +84,7 @@ enum nonterminal { nt_stmt(@ast::stmt), nt_pat( @ast::pat), nt_expr(@ast::expr), - nt_ty( @ast::ty), + nt_ty( @ast::Ty), nt_ident(ast::ident, bool), nt_path(@ast::path), nt_tt( @ast::token_tree), //needs @ed to break a circularity @@ -106,7 +106,7 @@ fn binop_to_str(o: binop) -> ~str { } } -fn to_str(in: @ident_interner, t: token) -> ~str { +fn to_str(in: @ident_interner, t: Token) -> ~str { match t { EQ => ~"=", LT => ~"<", @@ -192,7 +192,7 @@ fn to_str(in: @ident_interner, t: token) -> ~str { } } -pure fn can_begin_expr(t: token) -> bool { +pure fn can_begin_expr(t: Token) -> bool { match t { LPAREN => true, LBRACE => true, @@ -223,7 +223,7 @@ pure fn can_begin_expr(t: token) -> bool { } /// what's the opposite delimiter? -fn flip_delimiter(t: token::token) -> token::token { +fn flip_delimiter(t: token::Token) -> token::Token { match t { token::LPAREN => token::RPAREN, token::LBRACE => token::RBRACE, @@ -237,7 +237,7 @@ fn flip_delimiter(t: token::token) -> token::token { -fn is_lit(t: token) -> bool { +fn is_lit(t: Token) -> bool { match t { LIT_INT(_, _) => true, LIT_UINT(_, _) => true, @@ -248,22 +248,22 @@ fn is_lit(t: token) -> bool { } } -pure fn is_ident(t: token) -> bool { +pure fn is_ident(t: Token) -> bool { match t { IDENT(_, _) => true, _ => false } } -pure fn is_ident_or_path(t: token) -> bool { +pure fn is_ident_or_path(t: Token) -> bool { match t { IDENT(_, _) | INTERPOLATED(nt_path(*)) => true, _ => false } } -pure fn is_plain_ident(t: token) -> bool { +pure fn is_plain_ident(t: Token) -> bool { match t { IDENT(_, false) => true, _ => false } } -pure fn is_bar(t: token) -> bool { +pure fn is_bar(t: Token) -> bool { match t { BINOP(OR) | OROR => true, _ => false } } @@ -314,7 +314,7 @@ mod special_idents { } struct ident_interner { - priv interner: util::interner::interner<@~str>, + priv interner: util::interner::Interner<@~str>, } impl ident_interner { @@ -457,8 +457,8 @@ impl binop : cmp::Eq { pure fn ne(other: &binop) -> bool { !self.eq(other) } } -impl token : cmp::Eq { - pure fn eq(other: &token) -> bool { +impl Token : cmp::Eq { + pure fn eq(other: &Token) -> bool { match self { EQ => { match (*other) { @@ -720,7 +720,7 @@ impl token : cmp::Eq { } } } - pure fn ne(other: &token) -> bool { !self.eq(other) } + pure fn ne(other: &Token) -> bool { !self.eq(other) } } // Local Variables: diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index b98014f421bb3..5e37f7e18ece9 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -1,5 +1,5 @@ use parse::{comments, lexer, token}; -use codemap::codemap; +use codemap::CodeMap; use pp::{break_offset, word, printer, space, zerobreak, hardbreak, breaks}; use pp::{consistent, inconsistent, eof}; use ast::{required, provided}; @@ -24,7 +24,7 @@ fn no_ann() -> pp_ann { type ps = @{s: pp::printer, - cm: Option, + cm: Option, intr: @token::ident_interner, comments: Option<~[comments::cmnt]>, literals: Option<~[comments::lit]>, @@ -45,7 +45,7 @@ fn end(s: ps) { fn rust_printer(writer: io::Writer, intr: @ident_interner) -> ps { return @{s: pp::mk_printer(writer, default_columns), - cm: None::, + cm: None::, intr: intr, comments: None::<~[comments::cmnt]>, literals: None::<~[comments::lit]>, @@ -63,7 +63,7 @@ const default_columns: uint = 78u; // Requires you to pass an input filename and reader so that // it can scan the input text for comments and literals to // copy forward. -fn print_crate(cm: codemap, intr: @ident_interner, +fn print_crate(cm: CodeMap, intr: @ident_interner, span_diagnostic: diagnostic::span_handler, crate: @ast::crate, filename: ~str, in: io::Reader, out: io::Writer, ann: pp_ann, is_expanded: bool) { @@ -91,7 +91,7 @@ fn print_crate_(s: ps, &&crate: @ast::crate) { eof(s.s); } -fn ty_to_str(ty: @ast::ty, intr: @ident_interner) -> ~str { +fn ty_to_str(ty: @ast::Ty, intr: @ident_interner) -> ~str { to_str(ty, print_type, intr) } @@ -348,11 +348,11 @@ fn print_region(s: ps, region: @ast::region, sep: ~str) { word(s.s, sep); } -fn print_type(s: ps, &&ty: @ast::ty) { +fn print_type(s: ps, &&ty: @ast::Ty) { print_type_ex(s, ty, false); } -fn print_type_ex(s: ps, &&ty: @ast::ty, print_colons: bool) { +fn print_type_ex(s: ps, &&ty: @ast::Ty, print_colons: bool) { maybe_print_comment(s, ty.span.lo); ibox(s, 0u); match ty.node { diff --git a/src/libsyntax/util/interner.rs b/src/libsyntax/util/interner.rs index 5d991bb3551c1..f564589cbe0c0 100644 --- a/src/libsyntax/util/interner.rs +++ b/src/libsyntax/util/interner.rs @@ -12,14 +12,14 @@ type hash_interner = {map: HashMap, vect: DVec}; -fn mk() -> interner { +fn mk() -> Interner { let m = map::HashMap::(); let hi: hash_interner = {map: m, vect: DVec()}; - move ((move hi) as interner::) + move ((move hi) as Interner::) } -fn mk_prefill(init: ~[T]) -> interner { +fn mk_prefill(init: ~[T]) -> Interner { let rv = mk(); for init.each() |v| { rv.intern(*v); } return rv; @@ -27,14 +27,14 @@ fn mk_prefill(init: ~[T]) -> interner { /* when traits can extend traits, we should extend index to get [] */ -trait interner { +trait Interner { fn intern(T) -> uint; fn gensym(T) -> uint; pure fn get(uint) -> T; fn len() -> uint; } -impl hash_interner: interner { +impl hash_interner: Interner { fn intern(val: T) -> uint { match self.map.find(val) { Some(idx) => return idx, diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs index b402f9727dc2f..32fcbdfc7589c 100644 --- a/src/libsyntax/visit.rs +++ b/src/libsyntax/visit.rs @@ -55,7 +55,7 @@ type visitor = visit_decl: fn@(@decl, E, vt), visit_expr: fn@(@expr, E, vt), visit_expr_post: fn@(@expr, E, vt), - visit_ty: fn@(@ty, E, vt), + visit_ty: fn@(@Ty, E, vt), visit_ty_params: fn@(~[ty_param], E, vt), visit_fn: fn@(fn_kind, fn_decl, blk, span, node_id, E, vt), visit_ty_method: fn@(ty_method, E, vt), @@ -187,9 +187,9 @@ fn visit_enum_def(enum_definition: ast::enum_def, tps: ~[ast::ty_param], } } -fn skip_ty(_t: @ty, _e: E, _v: vt) {} +fn skip_ty(_t: @Ty, _e: E, _v: vt) {} -fn visit_ty(t: @ty, e: E, v: vt) { +fn visit_ty(t: @Ty, e: E, v: vt) { match t.node { ty_box(mt) | ty_uniq(mt) | ty_vec(mt) | ty_ptr(mt) | ty_rptr(_, mt) => { @@ -490,7 +490,7 @@ type simple_visitor = visit_decl: fn@(@decl), visit_expr: fn@(@expr), visit_expr_post: fn@(@expr), - visit_ty: fn@(@ty), + visit_ty: fn@(@Ty), visit_ty_params: fn@(~[ty_param]), visit_fn: fn@(fn_kind, fn_decl, blk, span, node_id), visit_ty_method: fn@(ty_method), @@ -499,7 +499,7 @@ type simple_visitor = visit_struct_field: fn@(@struct_field), visit_struct_method: fn@(@method)}; -fn simple_ignore_ty(_t: @ty) {} +fn simple_ignore_ty(_t: @Ty) {} fn default_simple_visitor() -> simple_visitor { return @{visit_mod: fn@(_m: _mod, _sp: span, _id: node_id) { }, @@ -577,7 +577,7 @@ fn mk_simple_visitor(v: simple_visitor) -> vt<()> { fn v_expr_post(f: fn@(@expr), ex: @expr, &&_e: (), _v: vt<()>) { f(ex); } - fn v_ty(f: fn@(@ty), ty: @ty, &&e: (), v: vt<()>) { + fn v_ty(f: fn@(@Ty), ty: @Ty, &&e: (), v: vt<()>) { f(ty); visit_ty(ty, e, v); } diff --git a/src/rustc/back/link.rs b/src/rustc/back/link.rs index 67edf6a32ba5c..61fd68c193a6a 100644 --- a/src/rustc/back/link.rs +++ b/src/rustc/back/link.rs @@ -1,6 +1,6 @@ use libc::{c_int, c_uint, c_char}; use driver::session; -use session::session; +use session::Session; use lib::llvm::llvm; use syntax::attr; use middle::ty; @@ -33,14 +33,14 @@ impl output_type : cmp::Eq { pure fn ne(other: &output_type) -> bool { !self.eq(other) } } -fn llvm_err(sess: session, msg: ~str) -> ! unsafe { +fn llvm_err(sess: Session, msg: ~str) -> ! unsafe { let cstr = llvm::LLVMRustGetLastError(); if cstr == ptr::null() { sess.fatal(msg); } else { sess.fatal(msg + ~": " + str::raw::from_c_str(cstr)); } } -fn WriteOutputFile(sess:session, +fn WriteOutputFile(sess: Session, PM: lib::llvm::PassManagerRef, M: ModuleRef, Triple: *c_char, // FIXME: When #2334 is fixed, change @@ -69,7 +69,7 @@ mod jit { env: *(), } - fn exec(sess: session, + fn exec(sess: Session, pm: PassManagerRef, m: ModuleRef, opt: c_int, @@ -131,7 +131,7 @@ mod write { return false; } - fn run_passes(sess: session, llmod: ModuleRef, output: &Path) { + fn run_passes(sess: Session, llmod: ModuleRef, output: &Path) { let opts = sess.opts; if sess.time_llvm_passes() { llvm::LLVMRustEnableTimePasses(); } let mut pm = mk_pass_manager(); @@ -384,7 +384,7 @@ mod write { * */ -fn build_link_meta(sess: session, c: ast::crate, output: &Path, +fn build_link_meta(sess: Session, c: ast::crate, output: &Path, symbol_hasher: &hash::State) -> link_meta { type provided_metas = @@ -392,7 +392,7 @@ fn build_link_meta(sess: session, c: ast::crate, output: &Path, vers: Option<~str>, cmh_items: ~[@ast::meta_item]}; - fn provided_link_metas(sess: session, c: ast::crate) -> + fn provided_link_metas(sess: Session, c: ast::crate) -> provided_metas { let mut name: Option<~str> = None; let mut vers: Option<~str> = None; @@ -454,13 +454,13 @@ fn build_link_meta(sess: session, c: ast::crate, output: &Path, return truncated_hash_result(symbol_hasher); } - fn warn_missing(sess: session, name: ~str, default: ~str) { + fn warn_missing(sess: Session, name: ~str, default: ~str) { if !sess.building_library { return; } sess.warn(fmt!("missing crate link meta `%s`, using `%s` as default", name, default)); } - fn crate_meta_name(sess: session, _crate: ast::crate, + fn crate_meta_name(sess: Session, _crate: ast::crate, output: &Path, metas: provided_metas) -> ~str { return match metas.name { Some(v) => v, @@ -477,7 +477,7 @@ fn build_link_meta(sess: session, c: ast::crate, output: &Path, }; } - fn crate_meta_vers(sess: session, _crate: ast::crate, + fn crate_meta_vers(sess: Session, _crate: ast::crate, metas: provided_metas) -> ~str { return match metas.vers { Some(v) => v, @@ -569,7 +569,7 @@ fn sanitize(s: ~str) -> ~str { return result; } -fn mangle(sess: session, ss: path) -> ~str { +fn mangle(sess: Session, ss: path) -> ~str { // Follow C++ namespace-mangling style let mut n = ~"_ZN"; // Begin name-sequence. @@ -584,7 +584,7 @@ fn mangle(sess: session, ss: path) -> ~str { n } -fn exported_name(sess: session, path: path, hash: ~str, vers: ~str) -> ~str { +fn exported_name(sess: Session, path: path, hash: ~str, vers: ~str) -> ~str { return mangle(sess, vec::append_one( vec::append_one(path, path_name(sess.ident_of(hash))), @@ -623,7 +623,7 @@ fn mangle_internal_name_by_seq(ccx: @crate_ctxt, flav: ~str) -> ~str { // If the user wants an exe generated we need to invoke // cc to link the object file with some libs -fn link_binary(sess: session, +fn link_binary(sess: Session, obj_filename: &Path, out_filename: &Path, lm: link_meta) { diff --git a/src/rustc/back/rpath.rs b/src/rustc/back/rpath.rs index 8aa7caefc7a5e..8038d7bb6ddf9 100644 --- a/src/rustc/back/rpath.rs +++ b/src/rustc/back/rpath.rs @@ -13,7 +13,7 @@ pure fn not_win32(os: session::os) -> bool { } } -fn get_rpath_flags(sess: session::session, out_filename: &Path) -> ~[~str] { +fn get_rpath_flags(sess: session::Session, out_filename: &Path) -> ~[~str] { let os = sess.targ_cfg.os; // No rpath on windows @@ -35,7 +35,7 @@ fn get_rpath_flags(sess: session::session, out_filename: &Path) -> ~[~str] { rpaths_to_flags(rpaths) } -fn get_sysroot_absolute_rt_lib(sess: session::session) -> Path { +fn get_sysroot_absolute_rt_lib(sess: session::Session) -> Path { let r = filesearch::relative_target_lib_path(sess.opts.target_triple); sess.filesearch.sysroot().push_rel(&r).push(os::dll_filename("rustrt")) } diff --git a/src/rustc/driver/driver.rs b/src/rustc/driver/driver.rs index e389f3a4bdf7d..5da8f5475ed13 100644 --- a/src/rustc/driver/driver.rs +++ b/src/rustc/driver/driver.rs @@ -1,6 +1,6 @@ // -*- rust -*- use metadata::{creader, cstore, filesearch}; -use session::{session, session_, OptLevel, No, Less, Default, Aggressive}; +use session::{Session, Session_, OptLevel, No, Less, Default, Aggressive}; use syntax::parse; use syntax::{ast, codemap}; use syntax::attr; @@ -32,7 +32,7 @@ fn source_name(input: input) -> ~str { } } -fn default_configuration(sess: session, argv0: ~str, input: input) -> +fn default_configuration(sess: Session, argv0: ~str, input: input) -> ast::crate_cfg { let libc = match sess.targ_cfg.os { session::os_win32 => ~"msvcrt.dll", @@ -70,7 +70,7 @@ fn append_configuration(cfg: ast::crate_cfg, name: ~str) -> ast::crate_cfg { } } -fn build_configuration(sess: session, argv0: ~str, input: input) -> +fn build_configuration(sess: Session, argv0: ~str, input: input) -> ast::crate_cfg { // Combine the configuration requested by the session (command line) with // some default and generated configuration items @@ -106,7 +106,7 @@ enum input { str_input(~str) } -fn parse_input(sess: session, cfg: ast::crate_cfg, input: input) +fn parse_input(sess: Session, cfg: ast::crate_cfg, input: input) -> @ast::crate { match input { file_input(file) => { @@ -145,7 +145,7 @@ impl compile_upto : cmp::Eq { pure fn ne(other: &compile_upto) -> bool { !self.eq(other) } } -fn compile_upto(sess: session, cfg: ast::crate_cfg, +fn compile_upto(sess: Session, cfg: ast::crate_cfg, input: input, upto: compile_upto, outputs: Option) -> {crate: @ast::crate, tcx: Option} { @@ -277,7 +277,7 @@ fn compile_upto(sess: session, cfg: ast::crate_cfg, return {crate: crate, tcx: Some(ty_cx)}; } -fn compile_input(sess: session, cfg: ast::crate_cfg, input: input, +fn compile_input(sess: Session, cfg: ast::crate_cfg, input: input, outdir: &Option, output: &Option) { let upto = if sess.opts.parse_only { cu_parse } @@ -287,7 +287,7 @@ fn compile_input(sess: session, cfg: ast::crate_cfg, input: input, compile_upto(sess, cfg, input, upto, Some(outputs)); } -fn pretty_print_input(sess: session, cfg: ast::crate_cfg, input: input, +fn pretty_print_input(sess: Session, cfg: ast::crate_cfg, input: input, ppm: pp_mode) { fn ann_paren_for_expr(node: pprust::ann_node) { match node { @@ -571,7 +571,7 @@ fn build_session_options(binary: ~str, } fn build_session(sopts: @session::options, - demitter: diagnostic::emitter) -> session { + demitter: diagnostic::emitter) -> Session { let codemap = codemap::new_codemap(); let diagnostic_handler = diagnostic::mk_handler(Some(demitter)); @@ -581,11 +581,10 @@ fn build_session(sopts: @session::options, } fn build_session_(sopts: @session::options, - cm: codemap::codemap, + cm: codemap::CodeMap, demitter: diagnostic::emitter, span_diagnostic_handler: diagnostic::span_handler) - -> session { - + -> Session { let target_cfg = build_target_config(sopts, demitter); let p_s = parse::new_parse_sess_special_handler(span_diagnostic_handler, cm); @@ -595,7 +594,7 @@ fn build_session_(sopts: @session::options, sopts.target_triple, sopts.addl_lib_search_paths); let lint_settings = lint::mk_lint_settings(); - session_(@{targ_cfg: target_cfg, + Session_(@{targ_cfg: target_cfg, opts: sopts, cstore: cstore, parse_sess: p_s, @@ -609,7 +608,7 @@ fn build_session_(sopts: @session::options, lint_settings: lint_settings}) } -fn parse_pretty(sess: session, &&name: ~str) -> pp_mode { +fn parse_pretty(sess: Session, &&name: ~str) -> pp_mode { match name { ~"normal" => ppm_normal, ~"expanded" => ppm_expanded, @@ -652,7 +651,7 @@ type output_filenames = @{out_filename:Path, obj_filename:Path}; fn build_output_filenames(input: input, odir: &Option, ofile: &Option, - sess: session) + sess: Session) -> output_filenames { let obj_path; let out_path; @@ -728,7 +727,7 @@ fn early_error(emitter: diagnostic::emitter, msg: ~str) -> ! { fail; } -fn list_metadata(sess: session, path: &Path, out: io::Writer) { +fn list_metadata(sess: Session, path: &Path, out: io::Writer) { metadata::loader::list_file_metadata( sess.parse_sess.interner, session::sess_os_to_meta_os(sess.targ_cfg.os), path, out); diff --git a/src/rustc/driver/rustc.rs b/src/rustc/driver/rustc.rs index 5833723ec101b..59d4e0dfdb84b 100644 --- a/src/rustc/driver/rustc.rs +++ b/src/rustc/driver/rustc.rs @@ -235,7 +235,7 @@ fn monitor(+f: fn~(diagnostic::emitter)) { // The 'diagnostics emitter'. Every error, warning, etc. should // go through this function. - let demitter = fn@(cmsp: Option<(codemap::codemap, codemap::span)>, + let demitter = fn@(cmsp: Option<(codemap::CodeMap, codemap::span)>, msg: &str, lvl: diagnostic::level) { if lvl == diagnostic::fatal { comm::send(ch, fatal); diff --git a/src/rustc/driver/session.rs b/src/rustc/driver/session.rs index 550656c23df69..ed73bcb6d7259 100644 --- a/src/rustc/driver/session.rs +++ b/src/rustc/driver/session.rs @@ -127,24 +127,24 @@ type options = type crate_metadata = {name: ~str, data: ~[u8]}; -type session_ = {targ_cfg: @config, +type Session_ = {targ_cfg: @config, opts: @options, - cstore: metadata::cstore::cstore, + cstore: metadata::cstore::CStore, parse_sess: parse_sess, - codemap: codemap::codemap, + codemap: codemap::CodeMap, // For a library crate, this is always none mut main_fn: Option<(node_id, codemap::span)>, span_diagnostic: diagnostic::span_handler, - filesearch: filesearch::filesearch, + filesearch: filesearch::FileSearch, mut building_library: bool, working_dir: Path, lint_settings: lint::lint_settings}; -enum session { - session_(@session_) +enum Session { + Session_(@Session_) } -impl session { +impl Session { fn span_fatal(sp: span, msg: ~str) -> ! { self.span_diagnostic.span_fatal(sp, msg) } @@ -270,7 +270,7 @@ fn basic_options() -> @options { } // Seems out of place, but it uses session, so I'm putting it here -fn expect(sess: session, opt: Option, msg: fn() -> ~str) -> T { +fn expect(sess: Session, opt: Option, msg: fn() -> ~str) -> T { diagnostic::expect(sess.diagnostic(), opt, msg) } diff --git a/src/rustc/front/core_inject.rs b/src/rustc/front/core_inject.rs index f198a2ca79dc3..e9be56e7d4803 100644 --- a/src/rustc/front/core_inject.rs +++ b/src/rustc/front/core_inject.rs @@ -1,4 +1,4 @@ -use driver::session::session; +use driver::session::Session; use syntax::codemap; use syntax::ast; use syntax::ast_util::*; @@ -6,7 +6,7 @@ use syntax::attr; export maybe_inject_libcore_ref; -fn maybe_inject_libcore_ref(sess: session, +fn maybe_inject_libcore_ref(sess: Session, crate: @ast::crate) -> @ast::crate { if use_core(crate) { inject_libcore_ref(sess, crate) @@ -19,7 +19,7 @@ fn use_core(crate: @ast::crate) -> bool { !attr::attrs_contains_name(crate.node.attrs, ~"no_core") } -fn inject_libcore_ref(sess: session, +fn inject_libcore_ref(sess: Session, crate: @ast::crate) -> @ast::crate { fn spanned(x: T) -> @ast::spanned { diff --git a/src/rustc/front/intrinsic_inject.rs b/src/rustc/front/intrinsic_inject.rs index 8fd885e8f8b59..ac74bac3f2f16 100644 --- a/src/rustc/front/intrinsic_inject.rs +++ b/src/rustc/front/intrinsic_inject.rs @@ -1,10 +1,10 @@ -use driver::session::session; +use driver::session::Session; use syntax::parse; use syntax::ast; export inject_intrinsic; -fn inject_intrinsic(sess: session, +fn inject_intrinsic(sess: Session, crate: @ast::crate) -> @ast::crate { let intrinsic_module = @include_str!("intrinsic.rs"); diff --git a/src/rustc/front/test.rs b/src/rustc/front/test.rs index 1a6cc6dd895e0..f0c9de4f2a245 100644 --- a/src/rustc/front/test.rs +++ b/src/rustc/front/test.rs @@ -7,7 +7,7 @@ use syntax::fold; use syntax::print::pprust; use syntax::codemap::span; use driver::session; -use session::session; +use session::Session; use syntax::attr; use dvec::DVec; @@ -19,14 +19,14 @@ type test = {span: span, path: ~[ast::ident], ignore: bool, should_fail: bool}; type test_ctxt = - @{sess: session::session, + @{sess: session::Session, crate: @ast::crate, mut path: ~[ast::ident], testfns: DVec}; // Traverse the crate, collecting all the test functions, eliding any // existing main functions, and synthesizing a main test harness -fn modify_for_testing(sess: session::session, +fn modify_for_testing(sess: session::Session, crate: @ast::crate) -> @ast::crate { if sess.opts.test { @@ -36,7 +36,7 @@ fn modify_for_testing(sess: session::session, } } -fn generate_test_harness(sess: session::session, +fn generate_test_harness(sess: session::Session, crate: @ast::crate) -> @ast::crate { let cx: test_ctxt = @{sess: sess, @@ -261,13 +261,13 @@ fn mk_path(cx: test_ctxt, path: ~[ast::ident]) -> ~[ast::ident] { else { vec::append(~[cx.sess.ident_of(~"std")], path) } } -// The ast::ty of ~[std::test::test_desc] -fn mk_test_desc_vec_ty(cx: test_ctxt) -> @ast::ty { +// The ast::Ty of ~[std::test::test_desc] +fn mk_test_desc_vec_ty(cx: test_ctxt) -> @ast::Ty { let test_desc_ty_path = path_node(mk_path(cx, ~[cx.sess.ident_of(~"test"), cx.sess.ident_of(~"TestDesc")])); - let test_desc_ty: ast::ty = + let test_desc_ty: ast::Ty = {id: cx.sess.next_node_id(), node: ast::ty_path(test_desc_ty_path, cx.sess.next_node_id()), span: dummy_sp()}; diff --git a/src/rustc/metadata/creader.rs b/src/rustc/metadata/creader.rs index 3ed56a1953e68..3080426e5319c 100644 --- a/src/rustc/metadata/creader.rs +++ b/src/rustc/metadata/creader.rs @@ -7,7 +7,7 @@ use syntax::visit; use syntax::codemap::span; use std::map::HashMap; use syntax::print::pprust; -use filesearch::filesearch; +use filesearch::FileSearch; use common::*; use dvec::DVec; use syntax::parse::token::ident_interner; @@ -17,7 +17,7 @@ export read_crates; // Traverses an AST, reading all the information about use'd crates and extern // libraries necessary for later resolving, typechecking, linking, etc. fn read_crates(diag: span_handler, crate: ast::crate, - cstore: cstore::cstore, filesearch: filesearch, + cstore: cstore::CStore, filesearch: FileSearch, os: loader::os, static: bool, intr: @ident_interner) { let e = @{diag: diag, filesearch: filesearch, @@ -88,8 +88,8 @@ fn warn_if_multiple_versions(e: env, diag: span_handler, } type env = @{diag: span_handler, - filesearch: filesearch, - cstore: cstore::cstore, + filesearch: FileSearch, + cstore: cstore::CStore, os: loader::os, static: bool, crate_cache: DVec, diff --git a/src/rustc/metadata/csearch.rs b/src/rustc/metadata/csearch.rs index ea6bd499a3b5e..d9fccb16de02e 100644 --- a/src/rustc/metadata/csearch.rs +++ b/src/rustc/metadata/csearch.rs @@ -39,18 +39,18 @@ struct ProvidedTraitMethodInfo { def_id: ast::def_id } -fn get_symbol(cstore: cstore::cstore, def: ast::def_id) -> ~str { +fn get_symbol(cstore: cstore::CStore, def: ast::def_id) -> ~str { let cdata = cstore::get_crate_data(cstore, def.crate).data; return decoder::get_symbol(cdata, def.node); } -fn get_type_param_count(cstore: cstore::cstore, def: ast::def_id) -> uint { +fn get_type_param_count(cstore: cstore::CStore, def: ast::def_id) -> uint { let cdata = cstore::get_crate_data(cstore, def.crate).data; return decoder::get_type_param_count(cdata, def.node); } /// Iterates over all the paths in the given crate. -fn each_path(cstore: cstore::cstore, cnum: ast::crate_num, +fn each_path(cstore: cstore::CStore, cnum: ast::crate_num, f: fn(decoder::path_entry) -> bool) { let crate_data = cstore::get_crate_data(cstore, cnum); decoder::each_path(cstore.intr, crate_data, f); @@ -91,7 +91,7 @@ fn get_enum_variants(tcx: ty::ctxt, def: ast::def_id) return decoder::get_enum_variants(cstore.intr, cdata, def.node, tcx) } -fn get_impls_for_mod(cstore: cstore::cstore, def: ast::def_id, +fn get_impls_for_mod(cstore: cstore::CStore, def: ast::def_id, name: Option) -> @~[@decoder::_impl] { let cdata = cstore::get_crate_data(cstore, def.crate); @@ -113,14 +113,14 @@ fn get_provided_trait_methods(tcx: ty::ctxt, def: ast::def_id) -> decoder::get_provided_trait_methods(cstore.intr, cdata, def.node, tcx) } -fn get_method_names_if_trait(cstore: cstore::cstore, def: ast::def_id) +fn get_method_names_if_trait(cstore: cstore::CStore, def: ast::def_id) -> Option<@DVec<(ast::ident, ast::self_ty_)>> { let cdata = cstore::get_crate_data(cstore, def.crate); return decoder::get_method_names_if_trait(cstore.intr, cdata, def.node); } -fn get_item_attrs(cstore: cstore::cstore, +fn get_item_attrs(cstore: cstore::CStore, def_id: ast::def_id, f: fn(~[@ast::meta_item])) { @@ -140,7 +140,7 @@ fn get_type(tcx: ty::ctxt, def: ast::def_id) -> ty::ty_param_bounds_and_ty { decoder::get_type(cdata, def.node, tcx) } -fn get_region_param(cstore: metadata::cstore::cstore, +fn get_region_param(cstore: metadata::cstore::CStore, def: ast::def_id) -> Option { let cdata = cstore::get_crate_data(cstore, def.crate); return decoder::get_region_param(cdata, def.node); @@ -177,7 +177,7 @@ fn get_impl_traits(tcx: ty::ctxt, def: ast::def_id) -> ~[ty::t] { decoder::get_impl_traits(cdata, def.node, tcx) } -fn get_impl_method(cstore: cstore::cstore, +fn get_impl_method(cstore: cstore::CStore, def: ast::def_id, mname: ast::ident) -> ast::def_id { let cdata = cstore::get_crate_data(cstore, def.crate); @@ -188,7 +188,7 @@ fn get_impl_method(cstore: cstore::cstore, for their methods (so that get_trait_methods can be reused to get class methods), classes require a slightly different version of get_impl_method. Sigh. */ -fn get_class_method(cstore: cstore::cstore, +fn get_class_method(cstore: cstore::CStore, def: ast::def_id, mname: ast::ident) -> ast::def_id { let cdata = cstore::get_crate_data(cstore, def.crate); @@ -196,7 +196,7 @@ fn get_class_method(cstore: cstore::cstore, } /* If def names a class with a dtor, return it. Otherwise, return none. */ -fn class_dtor(cstore: cstore::cstore, def: ast::def_id) +fn class_dtor(cstore: cstore::CStore, def: ast::def_id) -> Option { let cdata = cstore::get_crate_data(cstore, def.crate); decoder::class_dtor(cdata, def.node) diff --git a/src/rustc/metadata/cstore.rs b/src/rustc/metadata/cstore.rs index 483f7ea06a99a..4bbca3a06050c 100644 --- a/src/rustc/metadata/cstore.rs +++ b/src/rustc/metadata/cstore.rs @@ -6,7 +6,7 @@ use std::map::HashMap; use syntax::{ast, attr}; use syntax::parse::token::ident_interner; -export cstore; +export CStore; export cnum_map; export crate_metadata; export mk_cstore; @@ -49,7 +49,7 @@ type crate_metadata = @{name: ~str, // other modules to access the cstore's private data. This could also be // achieved with an obj, but at the expense of a vtable. Not sure if this is a // good pattern or not. -enum cstore { private(cstore_private), } +enum CStore { private(cstore_private), } type cstore_private = @{metas: map::HashMap, @@ -64,11 +64,11 @@ type cstore_private = type use_crate_map = map::HashMap; // Internal method to retrieve the data from the cstore -pure fn p(cstore: cstore) -> cstore_private { +pure fn p(cstore: CStore) -> cstore_private { match cstore { private(p) => p } } -fn mk_cstore(intr: @ident_interner) -> cstore { +fn mk_cstore(intr: @ident_interner) -> CStore { let meta_cache = map::HashMap(); let crate_map = map::HashMap(); let mod_path_map = HashMap(); @@ -81,21 +81,21 @@ fn mk_cstore(intr: @ident_interner) -> cstore { intr: intr}); } -fn get_crate_data(cstore: cstore, cnum: ast::crate_num) -> crate_metadata { +fn get_crate_data(cstore: CStore, cnum: ast::crate_num) -> crate_metadata { return p(cstore).metas.get(cnum); } -fn get_crate_hash(cstore: cstore, cnum: ast::crate_num) -> ~str { +fn get_crate_hash(cstore: CStore, cnum: ast::crate_num) -> ~str { let cdata = get_crate_data(cstore, cnum); return decoder::get_crate_hash(cdata.data); } -fn get_crate_vers(cstore: cstore, cnum: ast::crate_num) -> ~str { +fn get_crate_vers(cstore: CStore, cnum: ast::crate_num) -> ~str { let cdata = get_crate_data(cstore, cnum); return decoder::get_crate_vers(cdata.data); } -fn set_crate_data(cstore: cstore, cnum: ast::crate_num, +fn set_crate_data(cstore: CStore, cnum: ast::crate_num, data: crate_metadata) { p(cstore).metas.insert(cnum, data); for vec::each(decoder::get_crate_module_paths(cstore.intr, data)) |dp| { @@ -105,25 +105,25 @@ fn set_crate_data(cstore: cstore, cnum: ast::crate_num, } } -fn have_crate_data(cstore: cstore, cnum: ast::crate_num) -> bool { +fn have_crate_data(cstore: CStore, cnum: ast::crate_num) -> bool { return p(cstore).metas.contains_key(cnum); } -fn iter_crate_data(cstore: cstore, i: fn(ast::crate_num, crate_metadata)) { +fn iter_crate_data(cstore: CStore, i: fn(ast::crate_num, crate_metadata)) { for p(cstore).metas.each |k,v| { i(k, v);}; } -fn add_used_crate_file(cstore: cstore, lib: &Path) { +fn add_used_crate_file(cstore: CStore, lib: &Path) { if !vec::contains(p(cstore).used_crate_files, lib) { p(cstore).used_crate_files.push(copy *lib); } } -fn get_used_crate_files(cstore: cstore) -> ~[Path] { +fn get_used_crate_files(cstore: CStore) -> ~[Path] { return p(cstore).used_crate_files; } -fn add_used_library(cstore: cstore, lib: ~str) -> bool { +fn add_used_library(cstore: CStore, lib: ~str) -> bool { assert lib != ~""; if vec::contains(p(cstore).used_libraries, &lib) { return false; } @@ -131,31 +131,31 @@ fn add_used_library(cstore: cstore, lib: ~str) -> bool { return true; } -fn get_used_libraries(cstore: cstore) -> ~[~str] { +fn get_used_libraries(cstore: CStore) -> ~[~str] { return p(cstore).used_libraries; } -fn add_used_link_args(cstore: cstore, args: ~str) { +fn add_used_link_args(cstore: CStore, args: ~str) { p(cstore).used_link_args.push_all(str::split_char(args, ' ')); } -fn get_used_link_args(cstore: cstore) -> ~[~str] { +fn get_used_link_args(cstore: CStore) -> ~[~str] { return p(cstore).used_link_args; } -fn add_use_stmt_cnum(cstore: cstore, use_id: ast::node_id, +fn add_use_stmt_cnum(cstore: CStore, use_id: ast::node_id, cnum: ast::crate_num) { p(cstore).use_crate_map.insert(use_id, cnum); } -fn find_use_stmt_cnum(cstore: cstore, +fn find_use_stmt_cnum(cstore: CStore, use_id: ast::node_id) -> Option { p(cstore).use_crate_map.find(use_id) } // returns hashes of crates directly used by this crate. Hashes are // sorted by crate name. -fn get_dep_hashes(cstore: cstore) -> ~[~str] { +fn get_dep_hashes(cstore: CStore) -> ~[~str] { type crate_hash = {name: ~str, hash: ~str}; let mut result = ~[]; @@ -175,7 +175,7 @@ fn get_dep_hashes(cstore: cstore) -> ~[~str] { return vec::map(sorted, mapper); } -fn get_path(cstore: cstore, d: ast::def_id) -> ~[~str] { +fn get_path(cstore: CStore, d: ast::def_id) -> ~[~str] { option::map_default(&p(cstore).mod_path_map.find(d), ~[], |ds| str::split_str(**ds, ~"::")) } diff --git a/src/rustc/metadata/encoder.rs b/src/rustc/metadata/encoder.rs index 652af81659a08..0b25f0670b427 100644 --- a/src/rustc/metadata/encoder.rs +++ b/src/rustc/metadata/encoder.rs @@ -52,7 +52,7 @@ type encode_parms = { item_symbols: HashMap, discrim_symbols: HashMap, link_meta: link_meta, - cstore: cstore::cstore, + cstore: cstore::CStore, encode_inlined_item: encode_inlined_item }; @@ -77,7 +77,7 @@ enum encode_ctxt = { item_symbols: HashMap, discrim_symbols: HashMap, link_meta: link_meta, - cstore: cstore::cstore, + cstore: cstore::CStore, encode_inlined_item: encode_inlined_item, type_abbrevs: abbrev_map }; @@ -1035,9 +1035,9 @@ fn synthesize_crate_attrs(ecx: @encode_ctxt, crate: @crate) -> ~[attribute] { } fn encode_crate_deps(ecx: @encode_ctxt, ebml_w: ebml::Serializer, - cstore: cstore::cstore) { + cstore: cstore::CStore) { - fn get_ordered_deps(ecx: @encode_ctxt, cstore: cstore::cstore) + fn get_ordered_deps(ecx: @encode_ctxt, cstore: cstore::CStore) -> ~[decoder::crate_dep] { type hashkv = @{key: crate_num, val: cstore::crate_metadata}; diff --git a/src/rustc/metadata/filesearch.rs b/src/rustc/metadata/filesearch.rs index 63370b0932104..b2d20ce56e838 100644 --- a/src/rustc/metadata/filesearch.rs +++ b/src/rustc/metadata/filesearch.rs @@ -3,7 +3,7 @@ // probably just be folded into cstore. use result::Result; -export filesearch; +export FileSearch; export mk_filesearch; export pick; export pick_file; @@ -21,7 +21,7 @@ fn pick_file(file: Path, path: &Path) -> Option { else { option::None } } -trait filesearch { +trait FileSearch { fn sysroot() -> Path; fn lib_search_paths() -> ~[Path]; fn get_target_lib_path() -> Path; @@ -30,11 +30,11 @@ trait filesearch { fn mk_filesearch(maybe_sysroot: Option, target_triple: &str, - addl_lib_search_paths: ~[Path]) -> filesearch { + addl_lib_search_paths: ~[Path]) -> FileSearch { type filesearch_impl = {sysroot: Path, addl_lib_search_paths: ~[Path], target_triple: ~str}; - impl filesearch_impl: filesearch { + impl filesearch_impl: FileSearch { fn sysroot() -> Path { self.sysroot } fn lib_search_paths() -> ~[Path] { let mut paths = self.addl_lib_search_paths; @@ -64,10 +64,10 @@ fn mk_filesearch(maybe_sysroot: Option, debug!("using sysroot = %s", sysroot.to_str()); {sysroot: sysroot, addl_lib_search_paths: addl_lib_search_paths, - target_triple: str::from_slice(target_triple)} as filesearch + target_triple: str::from_slice(target_triple)} as FileSearch } -fn search(filesearch: filesearch, pick: pick) -> Option { +fn search(filesearch: FileSearch, pick: pick) -> Option { let mut rslt = None; for filesearch.lib_search_paths().each |lib_search_path| { debug!("searching %s", lib_search_path.to_str()); diff --git a/src/rustc/metadata/loader.rs b/src/rustc/metadata/loader.rs index 0a8354be71f76..61b8bcf9067b5 100644 --- a/src/rustc/metadata/loader.rs +++ b/src/rustc/metadata/loader.rs @@ -5,7 +5,7 @@ use syntax::{ast, attr}; use syntax::print::pprust; use syntax::codemap::span; use lib::llvm::{False, llvm, mk_object_file, mk_section_iter}; -use filesearch::filesearch; +use filesearch::FileSearch; use io::WriterUtil; use syntax::parse::token::ident_interner; @@ -28,7 +28,7 @@ enum os { type ctxt = { diag: span_handler, - filesearch: filesearch, + filesearch: FileSearch, span: span, ident: ast::ident, metas: ~[@ast::meta_item], @@ -66,7 +66,7 @@ fn libname(cx: ctxt) -> {prefix: ~str, suffix: ~str} { fn find_library_crate_aux(cx: ctxt, nn: {prefix: ~str, suffix: ~str}, - filesearch: filesearch::filesearch) -> + filesearch: filesearch::FileSearch) -> Option<{ident: ~str, data: @~[u8]}> { let crate_name = crate_name_from_metas(cx.metas); let prefix: ~str = nn.prefix + crate_name + ~"-"; diff --git a/src/rustc/metadata/tydecode.rs b/src/rustc/metadata/tydecode.rs index 1375ff2d0be08..14aef6db1adbd 100644 --- a/src/rustc/metadata/tydecode.rs +++ b/src/rustc/metadata/tydecode.rs @@ -162,7 +162,7 @@ fn parse_bound_region(st: @pstate) -> ty::bound_region { } } -fn parse_region(st: @pstate) -> ty::region { +fn parse_region(st: @pstate) -> ty::Region { match next(st) { 'b' => { ty::re_bound(parse_bound_region(st)) diff --git a/src/rustc/metadata/tyencode.rs b/src/rustc/metadata/tyencode.rs index 69689b16e1542..941dd35bdf0b4 100644 --- a/src/rustc/metadata/tyencode.rs +++ b/src/rustc/metadata/tyencode.rs @@ -125,7 +125,7 @@ fn enc_substs(w: io::Writer, cx: @ctxt, substs: ty::substs) { w.write_char(']'); } -fn enc_region(w: io::Writer, cx: @ctxt, r: ty::region) { +fn enc_region(w: io::Writer, cx: @ctxt, r: ty::Region) { match r { ty::re_bound(br) => { w.write_char('b'); diff --git a/src/rustc/middle/astencode.rs b/src/rustc/middle/astencode.rs index d1f766dd86724..b47e6d3b151c3 100644 --- a/src/rustc/middle/astencode.rs +++ b/src/rustc/middle/astencode.rs @@ -19,7 +19,7 @@ use middle::{ty, typeck}; use middle::typeck::{method_origin, method_map_entry, vtable_res, vtable_origin}; -use driver::session::session; +use driver::session::Session; use middle::freevars::freevar_entry; use c = metadata::common; use e = metadata::encoder; @@ -136,7 +136,7 @@ fn decode_inlined_item(cdata: cstore::crate_metadata, // ______________________________________________________________________ // Enumerating the IDs which appear in an AST -fn reserve_id_range(sess: session, +fn reserve_id_range(sess: Session, from_id_range: ast_util::id_range) -> ast_util::id_range { // Handle the case of an empty range: if ast_util::empty(from_id_range) { return from_id_range; } @@ -379,8 +379,8 @@ impl ty::AutoRef: tr { } } -impl ty::region: tr { - fn tr(xcx: extended_decode_ctxt) -> ty::region { +impl ty::Region: tr { + fn tr(xcx: extended_decode_ctxt) -> ty::Region { match self { ty::re_bound(br) => ty::re_bound(br.tr(xcx)), ty::re_free(id, br) => ty::re_free(xcx.tr_id(id), br.tr(xcx)), diff --git a/src/rustc/middle/borrowck.rs b/src/rustc/middle/borrowck.rs index 02fd2998f4dba..db0e092ed83e0 100644 --- a/src/rustc/middle/borrowck.rs +++ b/src/rustc/middle/borrowck.rs @@ -229,7 +229,6 @@ use result::{Result, Ok, Err}; use syntax::print::pprust; use util::common::indenter; use ty::to_str; -use driver::session::session; use dvec::DVec; use mem_categorization::*; @@ -319,8 +318,8 @@ enum bckerr_code { err_mut_variant, err_root_not_permitted, err_mutbl(ast::mutability), - err_out_of_root_scope(ty::region, ty::region), // superscope, subscope - err_out_of_scope(ty::region, ty::region) // superscope, subscope + err_out_of_root_scope(ty::Region, ty::Region), // superscope, subscope + err_out_of_scope(ty::Region, ty::Region) // superscope, subscope } impl bckerr_code : cmp::Eq { @@ -436,7 +435,7 @@ fn root_map() -> root_map { // Misc impl borrowck_ctxt { - fn is_subregion_of(r_sub: ty::region, r_sup: ty::region) -> bool { + fn is_subregion_of(r_sub: ty::Region, r_sup: ty::Region) -> bool { region::is_subregion_of(self.tcx.region_map, r_sub, r_sup) } diff --git a/src/rustc/middle/borrowck/gather_loans.rs b/src/rustc/middle/borrowck/gather_loans.rs index e8d11fd1708f9..e0eb5519d4da3 100644 --- a/src/rustc/middle/borrowck/gather_loans.rs +++ b/src/rustc/middle/borrowck/gather_loans.rs @@ -260,7 +260,7 @@ impl gather_loan_ctxt { fn guarantee_valid(&self, cmt: cmt, req_mutbl: ast::mutability, - scope_r: ty::region) { + scope_r: ty::Region) { self.bccx.guaranteed_paths += 1; @@ -390,7 +390,7 @@ impl gather_loan_ctxt { fn add_loans(&self, cmt: cmt, req_mutbl: ast::mutability, - scope_r: ty::region, + scope_r: ty::Region, +loans: ~[Loan]) { if loans.len() == 0 { return; diff --git a/src/rustc/middle/borrowck/loan.rs b/src/rustc/middle/borrowck/loan.rs index 5d3ccc392139e..7f4f857dae83f 100644 --- a/src/rustc/middle/borrowck/loan.rs +++ b/src/rustc/middle/borrowck/loan.rs @@ -7,7 +7,7 @@ use result::{Result, Ok, Err}; impl borrowck_ctxt { fn loan(cmt: cmt, - scope_region: ty::region, + scope_region: ty::Region, mutbl: ast::mutability) -> bckres<~[Loan]> { let lc = LoanContext { bccx: self, @@ -28,7 +28,7 @@ struct LoanContext { bccx: borrowck_ctxt, // the region scope for which we must preserve the memory - scope_region: ty::region, + scope_region: ty::Region, // accumulated list of loans that will be required mut loans: ~[Loan] @@ -39,7 +39,7 @@ impl LoanContext { fn issue_loan(&self, cmt: cmt, - scope_ub: ty::region, + scope_ub: ty::Region, req_mutbl: ast::mutability) -> bckres<()> { if self.bccx.is_subregion_of(self.scope_region, scope_ub) { match req_mutbl { diff --git a/src/rustc/middle/borrowck/preserve.rs b/src/rustc/middle/borrowck/preserve.rs index 7e1d47eed6944..556ea7867cfca 100644 --- a/src/rustc/middle/borrowck/preserve.rs +++ b/src/rustc/middle/borrowck/preserve.rs @@ -23,7 +23,7 @@ impl preserve_condition { impl borrowck_ctxt { fn preserve(cmt: cmt, - scope_region: ty::region, + scope_region: ty::Region, item_ub: ast::node_id, root_ub: ast::node_id) -> bckres { @@ -41,7 +41,7 @@ enum preserve_ctxt = { bccx: borrowck_ctxt, // the region scope for which we must preserve the memory - scope_region: ty::region, + scope_region: ty::Region, // the scope for the body of the enclosing fn/method item item_ub: ast::node_id, @@ -277,7 +277,7 @@ priv impl &preserve_ctxt { /// Checks that the scope for which the value must be preserved /// is a subscope of `scope_ub`; if so, success. fn compare_scope(cmt: cmt, - scope_ub: ty::region) -> bckres { + scope_ub: ty::Region) -> bckres { if self.bccx.is_subregion_of(self.scope_region, scope_ub) { Ok(pc_ok) } else { diff --git a/src/rustc/middle/capture.rs b/src/rustc/middle/capture.rs index 618d43e121a30..563ea8f84be76 100644 --- a/src/rustc/middle/capture.rs +++ b/src/rustc/middle/capture.rs @@ -1,5 +1,4 @@ use syntax::{ast, ast_util}; -use driver::session::session; use syntax::codemap::span; use std::map; use std::map::HashMap; diff --git a/src/rustc/middle/check_alt.rs b/src/rustc/middle/check_alt.rs index aab470f6907da..fc040ecc4cd3a 100644 --- a/src/rustc/middle/check_alt.rs +++ b/src/rustc/middle/check_alt.rs @@ -7,7 +7,6 @@ use syntax::print::pprust::pat_to_str; use util::ppaux::ty_to_str; use pat_util::*; use syntax::visit; -use driver::session::session; use middle::ty; use middle::ty::*; use std::map::HashMap; diff --git a/src/rustc/middle/check_const.rs b/src/rustc/middle/check_const.rs index bd3abe2013495..bdc042fb764e9 100644 --- a/src/rustc/middle/check_const.rs +++ b/src/rustc/middle/check_const.rs @@ -1,10 +1,10 @@ use syntax::ast::*; use syntax::{visit, ast_util, ast_map}; -use driver::session::session; +use driver::session::Session; use std::map::HashMap; use dvec::DVec; -fn check_crate(sess: session, crate: @crate, ast_map: ast_map::map, +fn check_crate(sess: Session, crate: @crate, ast_map: ast_map::map, def_map: resolve::DefMap, method_map: typeck::method_map, tcx: ty::ctxt) { visit::visit_crate(*crate, false, visit::mk_vt(@{ @@ -17,7 +17,7 @@ fn check_crate(sess: session, crate: @crate, ast_map: ast_map::map, sess.abort_if_errors(); } -fn check_item(sess: session, ast_map: ast_map::map, +fn check_item(sess: Session, ast_map: ast_map::map, def_map: resolve::DefMap, it: @item, &&_is_const: bool, v: visit::vt) { match it.node { @@ -55,7 +55,7 @@ fn check_pat(p: @pat, &&_is_const: bool, v: visit::vt) { } } -fn check_expr(sess: session, def_map: resolve::DefMap, +fn check_expr(sess: Session, def_map: resolve::DefMap, method_map: typeck::method_map, tcx: ty::ctxt, e: @expr, &&is_const: bool, v: visit::vt) { if is_const { @@ -142,12 +142,12 @@ fn check_expr(sess: session, def_map: resolve::DefMap, // Make sure a const item doesn't recursively refer to itself // FIXME: Should use the dependency graph when it's available (#1356) -fn check_item_recursion(sess: session, ast_map: ast_map::map, +fn check_item_recursion(sess: Session, ast_map: ast_map::map, def_map: resolve::DefMap, it: @item) { type env = { root_it: @item, - sess: session, + sess: Session, ast_map: ast_map::map, def_map: resolve::DefMap, idstack: @DVec, diff --git a/src/rustc/middle/check_loop.rs b/src/rustc/middle/check_loop.rs index 3cd26f3039b4c..3fa7f34fb33e2 100644 --- a/src/rustc/middle/check_loop.rs +++ b/src/rustc/middle/check_loop.rs @@ -1,6 +1,5 @@ use syntax::ast::*; use syntax::visit; -use driver::session::session; type ctx = {in_loop: bool, can_ret: bool}; diff --git a/src/rustc/middle/kind.rs b/src/rustc/middle/kind.rs index 36a05d6650615..e4dc9e8330e88 100644 --- a/src/rustc/middle/kind.rs +++ b/src/rustc/middle/kind.rs @@ -1,8 +1,7 @@ use syntax::{visit, ast_util}; use syntax::ast::*; use syntax::codemap::span; -use ty::{kind, kind_copyable, kind_noncopyable, kind_const}; -use driver::session::session; +use middle::ty::{Kind, kind_copyable, kind_noncopyable, kind_const}; use std::map::HashMap; use util::ppaux::{ty_to_str, tys_to_str}; use syntax::print::pprust::expr_to_str; @@ -40,7 +39,7 @@ use lint::{non_implicitly_copyable_typarams,implicit_copies}; const try_adding: &str = "Try adding a move"; -fn kind_to_str(k: kind) -> ~str { +fn kind_to_str(k: Kind) -> ~str { let mut kinds = ~[]; if ty::kind_lteq(kind_const(), k) { @@ -387,7 +386,7 @@ fn check_stmt(stmt: @stmt, cx: ctx, v: visit::vt) { visit::visit_stmt(stmt, cx, v); } -fn check_ty(aty: @ty, cx: ctx, v: visit::vt) { +fn check_ty(aty: @Ty, cx: ctx, v: visit::vt) { match aty.node { ty_path(_, id) => { do option::iter(&cx.tcx.node_type_substs.find(id)) |ts| { diff --git a/src/rustc/middle/lang_items.rs b/src/rustc/middle/lang_items.rs index 7cb2c9eb9cf19..383fe2db3231c 100644 --- a/src/rustc/middle/lang_items.rs +++ b/src/rustc/middle/lang_items.rs @@ -9,7 +9,7 @@ // // * Functions called by the compiler itself. -use driver::session::session; +use driver::session::Session; use metadata::csearch::{each_path, get_item_attrs}; use metadata::cstore::{iter_crate_data}; use metadata::decoder::{dl_def, dl_field, dl_impl}; @@ -50,7 +50,7 @@ struct LanguageItems { mut log_type_fn: Option } -mod LanguageItems { +mod language_items { #[legacy_exports]; fn make() -> LanguageItems { LanguageItems { @@ -83,7 +83,7 @@ mod LanguageItems { } } -fn LanguageItemCollector(crate: @crate, session: session, +fn LanguageItemCollector(crate: @crate, session: Session, items: &r/LanguageItems) -> LanguageItemCollector/&r { @@ -127,7 +127,7 @@ struct LanguageItemCollector { items: &LanguageItems, crate: @crate, - session: session, + session: Session, item_refs: HashMap<~str,&mut Option>, } @@ -239,8 +239,8 @@ impl LanguageItemCollector { } } -fn collect_language_items(crate: @crate, session: session) -> LanguageItems { - let items = LanguageItems::make(); +fn collect_language_items(crate: @crate, session: Session) -> LanguageItems { + let items = language_items::make(); let collector = LanguageItemCollector(crate, session, &items); collector.collect(); copy items diff --git a/src/rustc/middle/lint.rs b/src/rustc/middle/lint.rs index 0f31f2056a14a..0768a09252247 100644 --- a/src/rustc/middle/lint.rs +++ b/src/rustc/middle/lint.rs @@ -1,5 +1,5 @@ use driver::session; -use driver::session::session; +use driver::session::Session; use middle::ty; use syntax::{ast, ast_util, visit}; use syntax::attr; @@ -244,7 +244,7 @@ fn clone_lint_modes(modes: lint_modes) -> lint_modes { type ctxt_ = {dict: lint_dict, curr: lint_modes, is_default: bool, - sess: session}; + sess: Session}; enum ctxt { ctxt_(ctxt_) @@ -355,7 +355,7 @@ fn build_settings_item(i: @ast::item, &&cx: ctxt, v: visit::vt) { } } -fn build_settings_crate(sess: session::session, crate: @ast::crate) { +fn build_settings_crate(sess: session::Session, crate: @ast::crate) { let cx = ctxt_({dict: get_lint_dict(), curr: std::smallintmap::mk(), diff --git a/src/rustc/middle/liveness.rs b/src/rustc/middle/liveness.rs index a0a422bc027b7..89d5c842a9f62 100644 --- a/src/rustc/middle/liveness.rs +++ b/src/rustc/middle/liveness.rs @@ -99,7 +99,6 @@ use syntax::print::pprust::{expr_to_str}; use visit::vt; use syntax::codemap::span; use syntax::ast::*; -use driver::session::session; use io::WriterUtil; use capture::{cap_move, cap_drop, cap_copy, cap_ref}; diff --git a/src/rustc/middle/mem_categorization.rs b/src/rustc/middle/mem_categorization.rs index dc5874ea2cfae..a61cb28c16b1a 100644 --- a/src/rustc/middle/mem_categorization.rs +++ b/src/rustc/middle/mem_categorization.rs @@ -122,7 +122,7 @@ impl categorization : cmp::Eq { enum ptr_kind { uniq_ptr, gc_ptr, - region_ptr(ty::region), + region_ptr(ty::Region), unsafe_ptr } @@ -993,7 +993,7 @@ impl &mem_categorization_ctxt { } } - fn region_to_str(r: ty::region) -> ~str { + fn region_to_str(r: ty::Region) -> ~str { region_to_str(self.tcx, r) } } diff --git a/src/rustc/middle/region.rs b/src/rustc/middle/region.rs index eb0bf8796f075..5c70cd3e2795e 100644 --- a/src/rustc/middle/region.rs +++ b/src/rustc/middle/region.rs @@ -7,7 +7,7 @@ region parameterized. */ -use driver::session::session; +use driver::session::Session; use middle::ty; use syntax::{ast, visit}; use syntax::codemap::span; @@ -41,7 +41,7 @@ Encodes the bounding lifetime for a given AST node: type region_map = HashMap; struct ctxt { - sess: session, + sess: Session, def_map: resolve::DefMap, // Generated maps: @@ -108,8 +108,8 @@ fn scope_contains(region_map: region_map, superscope: ast::node_id, /// intended to run *after inference* and sadly the logic is somewhat /// duplicated with the code in infer.rs. fn is_subregion_of(region_map: region_map, - sub_region: ty::region, - super_region: ty::region) -> bool { + sub_region: ty::Region, + super_region: ty::Region) -> bool { sub_region == super_region || match (sub_region, super_region) { (_, ty::re_static) => { @@ -328,7 +328,7 @@ fn resolve_fn(fk: visit::fn_kind, decl: ast::fn_decl, body: ast::blk, visit::visit_fn(fk, decl, body, sp, id, fn_cx, visitor); } -fn resolve_crate(sess: session, def_map: resolve::DefMap, +fn resolve_crate(sess: Session, def_map: resolve::DefMap, crate: @ast::crate) -> region_map { let cx: ctxt = ctxt {sess: sess, def_map: def_map, @@ -382,7 +382,7 @@ impl region_dep : cmp::Eq { } type determine_rp_ctxt_ = { - sess: session, + sess: Session, ast_map: ast_map::map, def_map: resolve::DefMap, region_paramd_items: region_paramd_items, @@ -599,7 +599,7 @@ fn determine_rp_in_ty_method(ty_m: ast::ty_method, } } -fn determine_rp_in_ty(ty: @ast::ty, +fn determine_rp_in_ty(ty: @ast::Ty, &&cx: determine_rp_ctxt, visitor: visit::vt) { @@ -755,7 +755,7 @@ fn determine_rp_in_struct_field(cm: @ast::struct_field, } } -fn determine_rp_in_crate(sess: session, +fn determine_rp_in_crate(sess: Session, ast_map: ast_map::map, def_map: resolve::DefMap, crate: @ast::crate) -> region_paramd_items { diff --git a/src/rustc/middle/resolve.rs b/src/rustc/middle/resolve.rs index 4f170fd050be8..81ea6daf19581 100644 --- a/src/rustc/middle/resolve.rs +++ b/src/rustc/middle/resolve.rs @@ -1,4 +1,4 @@ -use driver::session::session; +use driver::session::Session; use metadata::csearch::{each_path, get_method_names_if_trait}; use metadata::cstore::find_use_stmt_cnum; use metadata::decoder::{def_like, dl_def, dl_field, dl_impl}; @@ -35,7 +35,7 @@ use syntax::ast::{pat_box, pat_lit, pat_range, pat_rec, pat_struct}; use syntax::ast::{pat_tup, pat_uniq, pat_wild, private, provided, public}; use syntax::ast::{required, rem, self_ty_, shl, shr, stmt_decl}; use syntax::ast::{struct_field, struct_variant_kind, sty_static, subtract}; -use syntax::ast::{trait_ref, tuple_variant_kind, ty, ty_bool, ty_char}; +use syntax::ast::{trait_ref, tuple_variant_kind, Ty, ty_bool, ty_char}; use syntax::ast::{ty_f, ty_f32, ty_f64, ty_float, ty_i, ty_i16, ty_i32}; use syntax::ast::{ty_i64, ty_i8, ty_int, ty_param, ty_path, ty_str, ty_u}; use syntax::ast::{ty_u16, ty_u32, ty_u64, ty_u8, ty_uint, type_value_ns}; @@ -115,7 +115,6 @@ impl PatternBindingMode : cmp::Eq { enum Namespace { - ModuleNS, TypeNS, ValueNS } @@ -166,19 +165,8 @@ enum CaptureClause { type ResolveVisitor = vt<()>; -enum ModuleDef { - NoModuleDef, // Does not define a module. - ModuleDef(Privacy, @Module), // Defines a module. -} - -impl ModuleDef { - pure fn is_none() -> bool { - match self { NoModuleDef => true, _ => false } - } -} - enum ImportDirectiveNS { - ModuleNSOnly, + TypeNSOnly, AnyNS } @@ -363,7 +351,6 @@ struct ImportResolution { mut outstanding_references: uint, - mut module_target: Option, mut value_target: Option, mut type_target: Option, @@ -375,7 +362,6 @@ fn ImportResolution(privacy: Privacy, span: span) -> ImportResolution { privacy: privacy, span: span, outstanding_references: 0u, - module_target: None, value_target: None, type_target: None, used: false @@ -385,7 +371,6 @@ fn ImportResolution(privacy: Privacy, span: span) -> ImportResolution { impl ImportResolution { fn target_for_namespace(namespace: Namespace) -> Option { match namespace { - ModuleNS => return copy self.module_target, TypeNS => return copy self.type_target, ValueNS => return copy self.value_target } @@ -482,7 +467,7 @@ pure fn is_none(x: Option) -> bool { } } -fn unused_import_lint_level(session: session) -> level { +fn unused_import_lint_level(session: Session) -> level { for session.opts.lint_opts.each |lint_option_pair| { let (lint_type, lint_level) = *lint_option_pair; if lint_type == unused_imports { @@ -504,8 +489,14 @@ impl Privacy : cmp::Eq { pure fn ne(other: &Privacy) -> bool { !self.eq(other) } } -// Records a possibly-private definition. -struct Definition { +// Records a possibly-private type definition. +enum TypeNsDef { + ModuleDef(Privacy, @Module), + TypeDef(Privacy, def) +} + +// Records a possibly-private value definition. +struct ValueNsDef { privacy: Privacy, def: def, } @@ -513,13 +504,11 @@ struct Definition { // Records the definitions (at most one for each namespace) that a name is // bound to. struct NameBindings { - mut module_def: ModuleDef, //< Meaning in module namespace. - mut type_def: Option, //< Meaning in type namespace. - mut value_def: Option, //< Meaning in value namespace. + mut type_def: Option, //< Meaning in type namespace. + mut value_def: Option, //< Meaning in value namespace. // For error reporting - // XXX: Merge me into Definition. - mut module_span: Option, + // XXX: Merge me into TypeDef and ValueDef. mut type_span: Option, mut value_span: Option, } @@ -532,30 +521,30 @@ impl NameBindings { def_id: Option, legacy_exports: bool, sp: span) { - if self.module_def.is_none() { + if self.type_def.is_none() { let module_ = @Module(parent_link, def_id, legacy_exports); - self.module_def = ModuleDef(privacy, module_); - self.module_span = Some(sp); + self.type_def = Some(ModuleDef(privacy, module_)); + self.type_span = Some(sp); } } /// Records a type definition. fn define_type(privacy: Privacy, def: def, sp: span) { - self.type_def = Some(Definition { privacy: privacy, def: def }); + self.type_def = Some(TypeDef(privacy, def)); self.type_span = Some(sp); } /// Records a value definition. fn define_value(privacy: Privacy, def: def, sp: span) { - self.value_def = Some(Definition { privacy: privacy, def: def }); + self.value_def = Some(ValueNsDef { privacy: privacy, def: def }); self.value_span = Some(sp); } /// Returns the module node if applicable. fn get_module_if_available() -> Option<@Module> { - match self.module_def { - NoModuleDef => return None, - ModuleDef(_privacy, module_) => return Some(module_) + match self.type_def { + Some(ModuleDef(_, module_)) => return Some(module_), + None | Some(TypeDef(_, _)) => return None, } } @@ -564,70 +553,76 @@ impl NameBindings { * definition. */ fn get_module() -> @Module { - match self.module_def { - NoModuleDef => { - fail - ~"get_module called on a node with no module definition!"; - } - ModuleDef(_, module_) => { - return module_; + match self.type_def { + None | Some(TypeDef(*)) => { + fail ~"get_module called on a node with no module \ + definition!" } + Some(ModuleDef(_, module_)) => module_ } } fn defined_in_namespace(namespace: Namespace) -> bool { match namespace { - ModuleNS => { - match self.module_def { - NoModuleDef => false, - _ => true - } - } TypeNS => return self.type_def.is_some(), ValueNS => return self.value_def.is_some() } } - fn def_for_namespace(namespace: Namespace) -> Option { + fn def_for_namespace(namespace: Namespace) -> Option { match namespace { - TypeNS => return self.type_def, - ValueNS => return self.value_def, - ModuleNS => match self.module_def { - NoModuleDef => return None, - ModuleDef(privacy, module_) => - match module_.def_id { - None => return None, - Some(def_id) => { - return Some(Definition { - privacy: privacy, - def: def_mod(def_id) - }); + TypeNS => { + match self.type_def { + None => None, + Some(ModuleDef(_, module_)) => { + module_.def_id.map(|def_id| def_mod(*def_id)) } + Some(TypeDef(_, def)) => Some(def) } - } + } + ValueNS => { + match self.value_def { + None => None, + Some(value_def) => Some(value_def.def) + } + } + } + } + + fn privacy_for_namespace(namespace: Namespace) -> Option { + match namespace { + TypeNS => { + match self.type_def { + None => None, + Some(ModuleDef(privacy, _)) | Some(TypeDef(privacy, _)) => + Some(privacy) + } + } + ValueNS => { + match self.value_def { + None => None, + Some(value_def) => Some(value_def.privacy) + } + } } } fn span_for_namespace(namespace: Namespace) -> Option { - match self.def_for_namespace(namespace) { - Some(_) => { + if self.defined_in_namespace(namespace) { match namespace { - TypeNS => self.type_span, - ValueNS => self.value_span, - ModuleNS => self.module_span + TypeNS => self.type_span, + ValueNS => self.value_span, } - } - None => None + } else { + None } } } fn NameBindings() -> NameBindings { NameBindings { - module_def: NoModuleDef, type_def: None, value_def: None, - module_span: None, type_span: None, value_span: None } @@ -675,9 +670,8 @@ fn PrimitiveTypeTable(intr: @ident_interner) -> PrimitiveTypeTable { fn namespace_to_str(ns: Namespace) -> ~str { match ns { - TypeNS => ~"type", - ValueNS => ~"value", - ModuleNS => ~"module" + TypeNS => ~"type", + ValueNS => ~"value", } } @@ -693,9 +687,8 @@ fn has_legacy_export_attr(attrs: &[syntax::ast::attribute]) -> bool { return false; } -fn Resolver(session: session, lang_items: LanguageItems, +fn Resolver(session: Session, lang_items: LanguageItems, crate: @crate) -> Resolver { - let graph_root = @NameBindings(); (*graph_root).define_module(Public, @@ -735,7 +728,7 @@ fn Resolver(session: session, lang_items: LanguageItems, primitive_type_table: @PrimitiveTypeTable(session. parse_sess.interner), - namespaces: ~[ ModuleNS, TypeNS, ValueNS ], + namespaces: ~[ TypeNS, ValueNS ], def_map: HashMap(), export_map2: HashMap(), @@ -749,7 +742,7 @@ fn Resolver(session: session, lang_items: LanguageItems, /// The main resolver class. struct Resolver { - session: session, + session: Session, lang_items: LanguageItems, crate: @crate, @@ -992,14 +985,14 @@ impl Resolver { match item.node { item_mod(module_) => { - let legacy = has_legacy_export_attr(item.attrs); - let (name_bindings, new_parent) = self.add_child(ident, parent, - ~[ModuleNS], sp); + let legacy = has_legacy_export_attr(item.attrs); + let (name_bindings, new_parent) = + self.add_child(ident, parent, ~[TypeNS], sp); let parent_link = self.get_parent_link(new_parent, ident); let def_id = { crate: 0, node: item.id }; - (*name_bindings).define_module(privacy, parent_link, - Some(def_id), legacy, sp); + (*name_bindings).define_module(privacy, parent_link, + Some(def_id), legacy, sp); let new_parent = ModuleReducedGraphParent((*name_bindings).get_module()); @@ -1007,25 +1000,30 @@ impl Resolver { visit_mod(module_, sp, item.id, new_parent, visitor); } item_foreign_mod(fm) => { - let legacy = has_legacy_export_attr(item.attrs); - let new_parent = match fm.sort { - named => { - let (name_bindings, new_parent) = self.add_child(ident, - parent, ~[ModuleNS], sp); + let legacy = has_legacy_export_attr(item.attrs); + let new_parent = match fm.sort { + named => { + let (name_bindings, new_parent) = + self.add_child(ident, parent, ~[TypeNS], sp); - let parent_link = self.get_parent_link(new_parent, ident); - let def_id = { crate: 0, node: item.id }; - (*name_bindings).define_module(privacy, parent_link, - Some(def_id), legacy, sp); + let parent_link = self.get_parent_link(new_parent, + ident); + let def_id = { crate: 0, node: item.id }; + (*name_bindings).define_module(privacy, + parent_link, + Some(def_id), + legacy, + sp); + + ModuleReducedGraphParent(name_bindings.get_module()) + } - ModuleReducedGraphParent((*name_bindings).get_module()) - } - // For anon foreign mods, the contents just go in the - // current scope - anonymous => parent - }; + // For anon foreign mods, the contents just go in the + // current scope + anonymous => parent + }; - visit_item(item, new_parent, visitor); + visit_item(item, new_parent, visitor); } // These items live in the value namespace. @@ -1226,7 +1224,7 @@ impl Resolver { match view_path.node { view_path_simple(binding, full_path, ns, _) => { let ns = match ns { - module_ns => ModuleNSOnly, + module_ns => TypeNSOnly, type_value_ns => AnyNS }; @@ -1326,8 +1324,7 @@ impl Resolver { match find_use_stmt_cnum(self.session.cstore, node_id) { Some(crate_id) => { let (child_name_bindings, new_parent) = - // should this be in ModuleNS? --tjc - self.add_child(name, parent, ~[ModuleNS], + self.add_child(name, parent, ~[TypeNS], view_item.span); let def_id = { crate: crate_id, node: 0 }; @@ -1410,8 +1407,8 @@ impl Resolver { ident: ident, new_parent: ReducedGraphParent) { match def { def_mod(def_id) | def_foreign_mod(def_id) => { - match copy child_name_bindings.module_def { - NoModuleDef => { + match copy child_name_bindings.type_def { + None => { debug!("(building reduced graph for \ external crate) building module \ %s", final_ident); @@ -1441,10 +1438,8 @@ impl Resolver { fail ~"can't happen"; } ModuleParentLink(parent_module, ident) => { - let name_bindings = parent_module.children.get(ident); - - resolution.module_target = + resolution.type_target = Some(Target(parent_module, name_bindings)); } } @@ -1456,13 +1451,16 @@ impl Resolver { } } } - ModuleDef(_priv, module_) => { + Some(ModuleDef(_, module_)) => { debug!("(building reduced graph for \ external crate) already created \ module"); module_.def_id = Some(def_id); modules.insert(def_id, module_); } + Some(TypeDef(*)) => { + self.session.bug(~"external module def overwriting type def"); + } } } def_fn(*) | def_static_method(*) | def_const(*) | @@ -1553,8 +1551,8 @@ impl Resolver { ~[], dummy_sp()); // Define or reuse the module node. - match child_name_bindings.module_def { - NoModuleDef => { + match child_name_bindings.type_def { + None => { debug!("(building reduced graph for external crate) \ autovivifying %s", *ident_str); let parent_link = self.get_parent_link(new_parent, @@ -1564,7 +1562,7 @@ impl Resolver { None, false, dummy_sp()); } - ModuleDef(*) => { /* Fall through. */ } + Some(_) => { /* Fall through. */ } } current_module = (*child_name_bindings).get_module(); @@ -1807,7 +1805,7 @@ impl Resolver { target, source); } - SingleImport(target, source, ModuleNSOnly) => { + SingleImport(target, source, TypeNSOnly) => { resolution_result = self.resolve_single_module_import (module_, containing_module, target, @@ -1876,12 +1874,11 @@ impl Resolver { return Failed; } - // We need to resolve all four namespaces for this to succeed. + // We need to resolve both namespaces for this to succeed. // // XXX: See if there's some way of handling namespaces in a more - // generic way. We have four of them; it seems worth doing... + // generic way. We have two of them; it seems worth doing... - let mut module_result = UnknownResult; let mut value_result = UnknownResult; let mut type_result = UnknownResult; @@ -1891,10 +1888,6 @@ impl Resolver { // Continue. } Some(child_name_bindings) => { - if (*child_name_bindings).defined_in_namespace(ModuleNS) { - module_result = BoundResult(containing_module, - child_name_bindings); - } if (*child_name_bindings).defined_in_namespace(ValueNS) { value_result = BoundResult(containing_module, child_name_bindings); @@ -1906,11 +1899,10 @@ impl Resolver { } } - // Unless we managed to find a result in all four namespaces - // (exceedingly unlikely), search imports as well. - - match (module_result, value_result, type_result) { - (BoundResult(*), BoundResult(*), BoundResult(*)) => { + // Unless we managed to find a result in both namespaces (unlikely), + // search imports as well. + match (value_result, type_result) { + (BoundResult(*), BoundResult(*)) => { // Continue. } _ => { @@ -1934,9 +1926,6 @@ impl Resolver { // therefore accurately report that the names are // unbound. - if module_result.is_unknown() { - module_result = UnboundResult; - } if value_result.is_unknown() { value_result = UnboundResult; } @@ -1973,11 +1962,6 @@ impl Resolver { // The name is an import which has been fully // resolved. We can, therefore, just follow it. - - if module_result.is_unknown() { - module_result = get_binding(import_resolution, - ModuleNS); - } if value_result.is_unknown() { value_result = get_binding(import_resolution, ValueNS); @@ -2001,20 +1985,6 @@ impl Resolver { assert module_.import_resolutions.contains_key(target); let import_resolution = module_.import_resolutions.get(target); - match module_result { - BoundResult(target_module, name_bindings) => { - debug!("(resolving single import) found module binding"); - import_resolution.module_target = - Some(Target(target_module, name_bindings)); - } - UnboundResult => { - debug!("(resolving single import) didn't find module \ - binding"); - } - UnknownResult => { - fail ~"module result should be known at this point"; - } - } match value_result { BoundResult(target_module, name_bindings) => { import_resolution.value_target = @@ -2037,12 +2007,10 @@ impl Resolver { } let i = import_resolution; - match (i.module_target, i.value_target, i.type_target) { - /* - If this name wasn't found in any of the four namespaces, it's - definitely unresolved - */ - (None, None, None) => { return Failed; } + match (i.value_target, i.type_target) { + // If this name wasn't found in either namespace, it's definitely + // unresolved. + (None, None) => { return Failed; } _ => {} } @@ -2081,7 +2049,7 @@ impl Resolver { // Continue. } Some(child_name_bindings) => { - if (*child_name_bindings).defined_in_namespace(ModuleNS) { + if (*child_name_bindings).defined_in_namespace(TypeNS) { module_result = BoundResult(containing_module, child_name_bindings); } @@ -2125,8 +2093,8 @@ impl Resolver { // resolved. We can, therefore, just follow it. if module_result.is_unknown() { - match (*import_resolution). - target_for_namespace(ModuleNS) { + match (*import_resolution).target_for_namespace( + TypeNS) { None => { module_result = UnboundResult; } @@ -2156,7 +2124,7 @@ impl Resolver { match module_result { BoundResult(target_module, name_bindings) => { debug!("(resolving single import) found module binding"); - import_resolution.module_target = + import_resolution.type_target = Some(Target(target_module, name_bindings)); } UnboundResult => { @@ -2169,8 +2137,8 @@ impl Resolver { } let i = import_resolution; - if i.module_target.is_none() { - // If this name wasn't found in the module namespace, it's + if i.type_target.is_none() { + // If this name wasn't found in the type namespace, it's // definitely unresolved. return Failed; } @@ -2222,7 +2190,7 @@ impl Resolver { debug!("(resolving glob import) writing module resolution \ %? into `%s`", - is_none(target_import_resolution.module_target), + is_none(target_import_resolution.type_target), self.module_to_str(module_)); // Here we merge two import resolutions. @@ -2232,8 +2200,6 @@ impl Resolver { let new_import_resolution = @ImportResolution(privacy, target_import_resolution.span); - new_import_resolution.module_target = - copy target_import_resolution.module_target; new_import_resolution.value_target = copy target_import_resolution.value_target; new_import_resolution.type_target = @@ -2246,15 +2212,6 @@ impl Resolver { // Merge the two import resolutions at a finer-grained // level. - match copy target_import_resolution.module_target { - None => { - // Continue. - } - Some(module_target) => { - dest_import_resolution.module_target = - Some(copy module_target); - } - } match copy target_import_resolution.value_target { None => { // Continue. @@ -2307,11 +2264,6 @@ impl Resolver { self.module_to_str(module_)); // Merge the child item into the import resolution. - if (*name_bindings).defined_in_namespace(ModuleNS) { - debug!("(resolving glob import) ... for module target"); - dest_import_resolution.module_target = - Some(Target(containing_module, name_bindings)); - } if (*name_bindings).defined_in_namespace(ValueNS) { debug!("(resolving glob import) ... for value target"); dest_import_resolution.value_target = @@ -2345,9 +2297,8 @@ impl Resolver { while index < module_path_len { let name = (*module_path).get_elt(index); - match self.resolve_name_in_module(search_module, name, ModuleNS, - xray) { - + match self.resolve_name_in_module(search_module, name, TypeNS, + xray) { Failed => { self.session.span_err(span, ~"unresolved name"); return Failed; @@ -2359,8 +2310,8 @@ impl Resolver { return Indeterminate; } Success(target) => { - match target.bindings.module_def { - NoModuleDef => { + match target.bindings.type_def { + None | Some(TypeDef(*)) => { // Not a module. self.session.span_err(span, fmt!("not a module: %s", @@ -2368,7 +2319,7 @@ impl Resolver { str_of(name))); return Failed; } - ModuleDef(_, copy module_) => { + Some(ModuleDef(_, copy module_)) => { search_module = module_; } } @@ -2443,7 +2394,6 @@ impl Resolver { match module_.children.find(name) { Some(name_bindings) if (*name_bindings).defined_in_namespace(namespace) => { - return Success(Target(module_, name_bindings)); } Some(_) | None => { /* Not found; continue. */ } @@ -2516,15 +2466,15 @@ impl Resolver { fn resolve_module_in_lexical_scope(module_: @Module, name: ident) -> ResolveResult<@Module> { - match self.resolve_item_in_lexical_scope(module_, name, ModuleNS) { + match self.resolve_item_in_lexical_scope(module_, name, TypeNS) { Success(target) => { - match target.bindings.module_def { - NoModuleDef => { + match target.bindings.type_def { + None | Some(TypeDef(*)) => { error!("!!! (resolving module in lexical scope) module wasn't actually a module!"); return Failed; } - ModuleDef(_, module_) => { + Some(ModuleDef(_, module_)) => { return Success(module_); } } @@ -2661,8 +2611,7 @@ impl Resolver { debug!("(resolving one-level naming result) searching for module"); match self.resolve_item_in_lexical_scope(module_, source_name, - ModuleNS) { - + TypeNS) { Failed => { debug!("(resolving one-level renaming import) didn't find \ module result"); @@ -2682,7 +2631,7 @@ impl Resolver { let mut value_result; let mut type_result; - if allowable_namespaces == ModuleNSOnly { + if allowable_namespaces == TypeNSOnly { value_result = None; type_result = None; } else { @@ -2772,7 +2721,6 @@ impl Resolver { self.session.str_of(target_name), self.module_to_str(module_)); - import_resolution.module_target = module_result; import_resolution.value_target = value_result; import_resolution.type_target = type_result; @@ -2885,18 +2833,19 @@ impl Resolver { ident: ident, namebindings: @NameBindings, reexport: bool) { - for [ModuleNS, TypeNS, ValueNS].each |ns| { - match namebindings.def_for_namespace(*ns) { - Some(d) if d.privacy == Public => { + for [ TypeNS, ValueNS ].each |ns| { + match (namebindings.def_for_namespace(*ns), + namebindings.privacy_for_namespace(*ns)) { + (Some(d), Some(Public)) => { debug!("(computing exports) YES: %s '%s' \ => %?", if reexport { ~"reexport" } else { ~"export"}, self.session.str_of(ident), - def_id_of_def(d.def)); + def_id_of_def(d)); exports2.push(Export2 { reexport: reexport, name: self.session.str_of(ident), - def_id: def_id_of_def(d.def) + def_id: def_id_of_def(d) }); } _ => () @@ -2914,12 +2863,13 @@ impl Resolver { } for module_.import_resolutions.each_ref |ident, importresolution| { - for [ModuleNS, TypeNS, ValueNS].each |ns| { + for [ TypeNS, ValueNS ].each |ns| { match importresolution.target_for_namespace(*ns) { Some(target) => { debug!("(computing exports) maybe reexport '%s'", self.session.str_of(*ident)); - self.add_exports_of_namebindings(exports2, *ident, + self.add_exports_of_namebindings(exports2, + *ident, target.bindings, true) } @@ -3666,7 +3616,7 @@ impl Resolver { span: span, type_parameters: ~[ty_param], opt_trait_reference: Option<@trait_ref>, - self_type: @ty, + self_type: @Ty, methods: ~[@method], visitor: ResolveVisitor) { @@ -3864,7 +3814,7 @@ impl Resolver { debug!("(resolving block) leaving block"); } - fn resolve_type(ty: @ty, visitor: ResolveVisitor) { + fn resolve_type(ty: @Ty, visitor: ResolveVisitor) { match ty.node { // Like path expressions, the interpretation of path types depends // on whether the path has multiple elements in it or not. @@ -3872,42 +3822,44 @@ impl Resolver { ty_path(path, path_id) => { // This is a path in the type namespace. Walk through scopes // scopes looking for it. + let mut result_def = None; - let mut result_def; - match self.resolve_path(path, TypeNS, true, visitor) { - Some(def) => { - debug!("(resolving type) resolved `%s` to type", - self.session.str_of(path.idents.last())); - result_def = Some(def); - } - None => { - result_def = None; + // First, check to see whether the name is a primitive type. + if path.idents.len() == 1u { + let name = path.idents.last(); + + match self.primitive_type_table + .primitive_types + .find(name) { + + Some(primitive_type) => { + result_def = + Some(def_prim_ty(primitive_type)); + } + None => { + // Continue. + } } } match result_def { - Some(_) => { - // Continue. - } None => { - // Check to see whether the name is a primitive type. - if path.idents.len() == 1u { - let name = path.idents.last(); - - match self.primitive_type_table - .primitive_types - .find(name) { - - Some(primitive_type) => { - result_def = - Some(def_prim_ty(primitive_type)); - } - None => { - // Continue. - } + match self.resolve_path(path, TypeNS, true, visitor) { + Some(def) => { + debug!("(resolving type) resolved `%s` to \ + type", + self.session.str_of( + path.idents.last())); + result_def = Some(def); + } + None => { + result_def = None; } } } + Some(_) => { + // Continue. + } } match copy result_def { @@ -4223,12 +4175,17 @@ impl Resolver { // First, search children. match containing_module.children.find(name) { Some(child_name_bindings) => { - match (*child_name_bindings).def_for_namespace(namespace) { - Some(def) if def.privacy == Public || xray == Xray => { + match (child_name_bindings.def_for_namespace(namespace), + child_name_bindings.privacy_for_namespace(namespace)) { + (Some(def), Some(Public)) => { // Found it. Stop the search here. - return ChildNameDefinition(def.def); + return ChildNameDefinition(def); } - Some(_) | None => { + (Some(def), _) if xray == Xray => { + // Found it. Stop the search here. + return ChildNameDefinition(def); + } + (Some(_), _) | (None, _) => { // Continue. } } @@ -4244,14 +4201,15 @@ impl Resolver { xray == Xray => { match (*import_resolution).target_for_namespace(namespace) { Some(target) => { - match (*target.bindings) - .def_for_namespace(namespace) { - Some(def) if def.privacy == Public => { + match (target.bindings.def_for_namespace(namespace), + target.bindings.privacy_for_namespace( + namespace)) { + (Some(def), Some(Public)) => { // Found it. import_resolution.used = true; - return ImportNameDefinition(def.def); + return ImportNameDefinition(def); } - Some(_) | None => { + (Some(_), _) | (None, _) => { // This can happen with external impls, due to // the imperfect way we read the metadata. @@ -4391,9 +4349,6 @@ impl Resolver { search_result = self.search_ribs(self.type_ribs, ident, span, AllowCapturingSelf); } - ModuleNS => { - fail ~"module namespaces do not have local ribs"; - } } match copy search_result { @@ -4413,23 +4368,22 @@ impl Resolver { fn resolve_item_by_identifier_in_lexical_scope(ident: ident, namespace: Namespace) -> Option { - // Check the items. match self.resolve_item_in_lexical_scope(self.current_module, ident, namespace) { - Success(target) => { match (*target.bindings).def_for_namespace(namespace) { None => { - fail ~"resolved name in a namespace to a set of name \ - bindings with no def for that namespace?!"; + // This can happen if we were looking for a type and + // found a module instead. Modules don't have defs. + return None; } Some(def) => { debug!("(resolving item path in lexical scope) \ resolved `%s` to item", self.session.str_of(ident)); - return Some(def.def); + return Some(def); } } } @@ -4703,7 +4657,7 @@ impl Resolver { for search_module.children.each |_name, child_name_bindings| { match child_name_bindings.def_for_namespace(TypeNS) { Some(def) => { - match def.def { + match def { def_ty(trait_def_id) => { self.add_trait_info_if_containing_method( found_traits, trait_def_id, name); @@ -4730,7 +4684,7 @@ impl Resolver { Some(target) => { match target.bindings.def_for_namespace(TypeNS) { Some(def) => { - match def.def { + match def { def_ty(trait_def_id) => { self. add_trait_info_if_containing_method( @@ -4937,15 +4891,6 @@ impl Resolver { debug!("Import resolutions:"); for module_.import_resolutions.each |name, import_resolution| { - let mut module_repr; - match (*import_resolution).target_for_namespace(ModuleNS) { - None => { module_repr = ~""; } - Some(_) => { - module_repr = ~" module:?"; - // XXX - } - } - let mut value_repr; match (*import_resolution).target_for_namespace(ValueNS) { None => { value_repr = ~""; } @@ -4964,15 +4909,14 @@ impl Resolver { } } - debug!("* %s:%s%s%s", - self.session.str_of(name), - module_repr, value_repr, type_repr); + debug!("* %s:%s%s", self.session.str_of(name), + value_repr, type_repr); } } } /// Entry point to crate resolution. -fn resolve_crate(session: session, lang_items: LanguageItems, crate: @crate) +fn resolve_crate(session: Session, lang_items: LanguageItems, crate: @crate) -> { def_map: DefMap, exp_map2: ExportMap2, trait_map: TraitMap } { diff --git a/src/rustc/middle/trans/alt.rs b/src/rustc/middle/trans/alt.rs index 50ea80a134cb1..d760bc349073d 100644 --- a/src/rustc/middle/trans/alt.rs +++ b/src/rustc/middle/trans/alt.rs @@ -99,7 +99,6 @@ * */ -use driver::session::session; use lib::llvm::llvm; use lib::llvm::{ValueRef, BasicBlockRef}; use pat_util::*; diff --git a/src/rustc/middle/trans/base.rs b/src/rustc/middle/trans/base.rs index 93e8435d3e957..4c9a006007e75 100644 --- a/src/rustc/middle/trans/base.rs +++ b/src/rustc/middle/trans/base.rs @@ -17,7 +17,7 @@ use libc::{c_uint, c_ulonglong}; use std::{map, time, list}; use std::map::HashMap; use driver::session; -use session::session; +use session::Session; use syntax::attr; use back::{link, abi, upcall}; use syntax::{ast, ast_util, codemap, ast_map}; @@ -2377,7 +2377,7 @@ fn create_module_map(ccx: @crate_ctxt) -> ValueRef { } -fn decl_crate_map(sess: session::session, mapmeta: link_meta, +fn decl_crate_map(sess: session::Session, mapmeta: link_meta, llmod: ModuleRef) -> ValueRef { let targ_cfg = sess.targ_cfg; let int_type = T_int(targ_cfg); @@ -2482,7 +2482,7 @@ fn write_abi_version(ccx: @crate_ctxt) { false); } -fn trans_crate(sess: session::session, +fn trans_crate(sess: session::Session, crate: @ast::crate, tcx: ty::ctxt, output: &Path, diff --git a/src/rustc/middle/trans/build.rs b/src/rustc/middle/trans/build.rs index 69de8a2cca3e2..dfcc66adc3ac6 100644 --- a/src/rustc/middle/trans/build.rs +++ b/src/rustc/middle/trans/build.rs @@ -6,7 +6,6 @@ use codemap::span; use lib::llvm::{ValueRef, TypeRef, BasicBlockRef, BuilderRef, ModuleRef}; use lib::llvm::{Opcode, IntPredicate, RealPredicate, True, False, CallConv, TypeKind, AtomicBinOp, AtomicOrdering}; -use driver::session::session; use common::*; fn B(cx: block) -> BuilderRef { diff --git a/src/rustc/middle/trans/common.rs b/src/rustc/middle/trans/common.rs index 0fa22dd65ba7b..931e82d5be985 100644 --- a/src/rustc/middle/trans/common.rs +++ b/src/rustc/middle/trans/common.rs @@ -8,7 +8,7 @@ use vec::raw::to_ptr; use std::map::{HashMap,Set}; use syntax::{ast, ast_map}; use driver::session; -use session::session; +use session::Session; use middle::ty; use back::{link, abi, upcall}; use syntax::codemap::span; @@ -110,7 +110,7 @@ fn BuilderRef_res(B: BuilderRef) -> BuilderRef_res { // Crate context. Every crate we compile has one of these. type crate_ctxt = { - sess: session::session, + sess: session::Session, llmod: ModuleRef, td: target_data, tn: type_names, @@ -605,7 +605,7 @@ fn block_parent(cx: block) -> block { impl block { pure fn ccx() -> @crate_ctxt { self.fcx.ccx } pure fn tcx() -> ty::ctxt { self.fcx.ccx.tcx } - pure fn sess() -> session { self.fcx.ccx.sess } + pure fn sess() -> Session { self.fcx.ccx.sess } fn node_id_to_str(id: ast::node_id) -> ~str { ast_map::node_id_to_str(self.tcx().items, id, self.sess().intr()) @@ -1191,7 +1191,7 @@ fn align_to(cx: block, off: ValueRef, align: ValueRef) -> ValueRef { return build::And(cx, bumped, build::Not(cx, mask)); } -fn path_str(sess: session::session, p: path) -> ~str { +fn path_str(sess: session::Session, p: path) -> ~str { let mut r = ~"", first = true; for vec::each(p) |e| { match *e { diff --git a/src/rustc/middle/trans/debuginfo.rs b/src/rustc/middle/trans/debuginfo.rs index 068ec49d6c7ec..2db0dd59cf918 100644 --- a/src/rustc/middle/trans/debuginfo.rs +++ b/src/rustc/middle/trans/debuginfo.rs @@ -9,7 +9,7 @@ use middle::ty; use syntax::{ast, codemap, ast_util, ast_map}; use syntax::parse::token::ident_interner; use codemap::span; -use ast::ty; +use ast::Ty; use pat_util::*; use util::ppaux::ty_to_str; use driver::session::session; @@ -229,7 +229,7 @@ fn create_file(cx: @crate_ctxt, full_path: ~str) -> @metadata { return mdval; } -fn line_from_span(cm: codemap::codemap, sp: span) -> uint { +fn line_from_span(cm: codemap::CodeMap, sp: span) -> uint { codemap::lookup_char_pos(cm, sp.lo).line } @@ -469,7 +469,7 @@ fn create_composite_type(type_tag: int, name: ~str, file: ValueRef, line: int, } fn create_vec(cx: @crate_ctxt, vec_t: ty::t, elem_t: ty::t, - vec_ty_span: codemap::span, elem_ty: @ast::ty) + vec_ty_span: codemap::span, elem_ty: @ast::Ty) -> @metadata { let fname = filename_from_span(cx, vec_ty_span); let file_node = create_file(cx, fname); @@ -492,7 +492,7 @@ fn create_vec(cx: @crate_ctxt, vec_t: ty::t, elem_t: ty::t, return @{node: llnode, data: {hash: ty::type_id(vec_t)}}; } -fn create_ty(_cx: @crate_ctxt, _t: ty::t, _ty: @ast::ty) +fn create_ty(_cx: @crate_ctxt, _t: ty::t, _ty: @ast::Ty) -> @metadata { /*let cache = get_cache(cx); match cached_metadata::<@metadata>( diff --git a/src/rustc/middle/trans/foreign.rs b/src/rustc/middle/trans/foreign.rs index 5a6260ae27008..8a03884f415ca 100644 --- a/src/rustc/middle/trans/foreign.rs +++ b/src/rustc/middle/trans/foreign.rs @@ -1,7 +1,7 @@ // The classification code for the x86_64 ABI is taken from the clay language // https://github.com/jckarter/clay/blob/master/compiler/src/externals.cpp -use driver::session::{session, arch_x86_64}; +use driver::session::arch_x86_64; use syntax::codemap::span; use libc::c_uint; use syntax::{attr, ast_map}; diff --git a/src/rustc/middle/trans/reachable.rs b/src/rustc/middle/trans/reachable.rs index bfb8de76a6c58..a99ef96b2544a 100644 --- a/src/rustc/middle/trans/reachable.rs +++ b/src/rustc/middle/trans/reachable.rs @@ -128,7 +128,7 @@ fn mk_ty_visitor() -> visit::vt { visit::mk_vt(@{visit_ty: traverse_ty, ..*visit::default_visitor()}) } -fn traverse_ty(ty: @ty, cx: ctx, v: visit::vt) { +fn traverse_ty(ty: @Ty, cx: ctx, v: visit::vt) { if cx.rmap.contains_key(ty.id) { return; } cx.rmap.insert(ty.id, ()); diff --git a/src/rustc/middle/trans/reflect.rs b/src/rustc/middle/trans/reflect.rs index c105caecaebbe..18a25888bb46e 100644 --- a/src/rustc/middle/trans/reflect.rs +++ b/src/rustc/middle/trans/reflect.rs @@ -1,5 +1,4 @@ use std::map::HashMap; -use driver::session::session; use lib::llvm::{TypeRef, ValueRef}; use syntax::ast; use back::abi; diff --git a/src/rustc/middle/trans/tvec.rs b/src/rustc/middle/trans/tvec.rs index b78314a67478c..149c6ea532d49 100644 --- a/src/rustc/middle/trans/tvec.rs +++ b/src/rustc/middle/trans/tvec.rs @@ -1,5 +1,4 @@ use syntax::ast; -use driver::session::session; use lib::llvm::{ValueRef, TypeRef}; use back::abi; use syntax::codemap::span; diff --git a/src/rustc/middle/trans/type_use.rs b/src/rustc/middle/trans/type_use.rs index 8ccc8a28de3a2..8b2efacd4d16a 100644 --- a/src/rustc/middle/trans/type_use.rs +++ b/src/rustc/middle/trans/type_use.rs @@ -20,7 +20,6 @@ use std::map::HashMap; use std::list; use std::list::{List, Cons, Nil}; -use driver::session::session; use metadata::csearch; use syntax::ast::*, syntax::ast_util, syntax::visit; use syntax::ast_map; diff --git a/src/rustc/middle/ty.rs b/src/rustc/middle/ty.rs index a0ca46ee01782..84510c7161e39 100644 --- a/src/rustc/middle/ty.rs +++ b/src/rustc/middle/ty.rs @@ -5,7 +5,7 @@ use std::{map, smallintmap}; use result::Result; use std::map::HashMap; use driver::session; -use session::session; +use session::Session; use syntax::{ast, ast_map}; use syntax::ast_util; use syntax::ast_util::{is_local, local_def}; @@ -103,7 +103,7 @@ export ty_infer, mk_infer, type_is_ty_var, mk_var, mk_int_var; export InferTy, TyVar, IntVar; export ty_self, mk_self, type_has_self; export ty_class; -export region, bound_region, encl_region; +export Region, bound_region, encl_region; export re_bound, re_free, re_scope, re_static, re_var; export br_self, br_anon, br_named, br_cap_avoid; export get, type_has_params, type_needs_infer, type_has_regions; @@ -114,7 +114,7 @@ export ty_var_id; export ty_to_def_id; export ty_fn_args; export ty_region; -export kind, kind_implicitly_copyable, kind_send_copy, kind_copyable; +export Kind, kind_implicitly_copyable, kind_send_copy, kind_copyable; export kind_noncopyable, kind_const; export kind_can_be_copied, kind_can_be_sent, kind_can_be_implicitly_copied; export kind_is_safe_for_default_mode; @@ -219,7 +219,7 @@ enum vstore { vstore_fixed(uint), vstore_uniq, vstore_box, - vstore_slice(region) + vstore_slice(Region) } type field_ty = { @@ -302,7 +302,7 @@ type AutoAdjustment = { #[auto_deserialize] type AutoRef = { kind: AutoRefKind, - region: region, + region: Region, mutbl: ast::mutability }; @@ -327,8 +327,8 @@ type ctxt = mut next_id: uint, vecs_implicitly_copyable: bool, legacy_modes: bool, - cstore: metadata::cstore::cstore, - sess: session::session, + cstore: metadata::cstore::CStore, + sess: session::Session, def_map: resolve::DefMap, region_map: middle::region::region_map, @@ -354,8 +354,8 @@ type ctxt = short_names_cache: HashMap, needs_drop_cache: HashMap, needs_unwind_cleanup_cache: HashMap, - kind_cache: HashMap, - ast_ty_to_ty_cache: HashMap<@ast::ty, ast_ty_to_ty_cache_entry>, + kind_cache: HashMap, + ast_ty_to_ty_cache: HashMap<@ast::Ty, ast_ty_to_ty_cache_entry>, enum_var_cache: HashMap, trait_method_cache: HashMap, ty_param_bounds: HashMap, @@ -519,7 +519,7 @@ impl param_ty : to_bytes::IterBytes { /// Representation of regions: #[auto_serialize] #[auto_deserialize] -enum region { +enum Region { /// Bound regions are found (primarily) in function types. They indicate /// region parameters that have yet to be replaced with actual regions /// (analogous to type parameters, except that due to the monomorphic @@ -570,7 +570,7 @@ enum bound_region { br_cap_avoid(ast::node_id, @bound_region), } -type opt_region = Option; +type opt_region = Option; /** * The type substs represents the kinds of things that can be substituted to @@ -610,7 +610,7 @@ enum sty { ty_uniq(mt), ty_evec(mt, vstore), ty_ptr(mt), - ty_rptr(region, mt), + ty_rptr(Region, mt), ty_rec(~[field]), ty_fn(FnTy), ty_trait(def_id, substs, vstore), @@ -656,9 +656,9 @@ enum type_err { terr_record_fields(expected_found), terr_arg_count, terr_mode_mismatch(expected_found), - terr_regions_does_not_outlive(region, region), - terr_regions_not_same(region, region), - terr_regions_no_overlap(region, region), + terr_regions_does_not_outlive(Region, Region), + terr_regions_not_same(Region, Region), + terr_regions_no_overlap(Region, Region), terr_vstores_differ(terr_vstore_kind, expected_found), terr_in_field(@type_err, ast::ident), terr_sorts(expected_found), @@ -783,7 +783,7 @@ impl FnVid : to_bytes::IterBytes { } } -fn param_bounds_to_kind(bounds: param_bounds) -> kind { +fn param_bounds_to_kind(bounds: param_bounds) -> Kind { let mut kind = kind_noncopyable(); for vec::each(*bounds) |bound| { match *bound { @@ -834,7 +834,7 @@ fn new_ty_hash() -> map::HashMap { map::HashMap() } -fn mk_ctxt(s: session::session, +fn mk_ctxt(s: session::Session, dm: resolve::DefMap, amap: ast_map::map, freevars: freevars::freevar_map, @@ -904,7 +904,7 @@ fn mk_t_with_id(cx: ctxt, +st: sty, o_def_id: Option) -> t { _ => () } let mut flags = 0u; - fn rflags(r: region) -> uint { + fn rflags(r: Region) -> uint { (has_regions as uint) | { match r { ty::re_var(_) => needs_infer as uint, @@ -1018,12 +1018,12 @@ fn mk_imm_uniq(cx: ctxt, ty: t) -> t { mk_uniq(cx, {ty: ty, fn mk_ptr(cx: ctxt, tm: mt) -> t { mk_t(cx, ty_ptr(tm)) } -fn mk_rptr(cx: ctxt, r: region, tm: mt) -> t { mk_t(cx, ty_rptr(r, tm)) } +fn mk_rptr(cx: ctxt, r: Region, tm: mt) -> t { mk_t(cx, ty_rptr(r, tm)) } -fn mk_mut_rptr(cx: ctxt, r: region, ty: t) -> t { +fn mk_mut_rptr(cx: ctxt, r: Region, ty: t) -> t { mk_rptr(cx, r, {ty: ty, mutbl: ast::m_mutbl}) } -fn mk_imm_rptr(cx: ctxt, r: region, ty: t) -> t { +fn mk_imm_rptr(cx: ctxt, r: Region, ty: t) -> t { mk_rptr(cx, r, {ty: ty, mutbl: ast::m_imm}) } @@ -1148,7 +1148,7 @@ fn default_arg_mode_for_ty(tcx: ctxt, ty: ty::t) -> ast::rmode { // Returns the narrowest lifetime enclosing the evaluation of the expression // with id `id`. -fn encl_region(cx: ctxt, id: ast::node_id) -> ty::region { +fn encl_region(cx: ctxt, id: ast::node_id) -> ty::Region { match cx.region_map.find(id) { Some(encl_scope) => ty::re_scope(encl_scope), None => ty::re_static @@ -1265,7 +1265,7 @@ fn fold_ty(cx: ctxt, t0: t, fldop: fn(t) -> t) -> t { fn walk_regions_and_ty( cx: ctxt, ty: t, - walkr: fn(r: region), + walkr: fn(r: Region), walkt: fn(t: t) -> bool) { if (walkt(ty)) { @@ -1280,13 +1280,13 @@ fn walk_regions_and_ty( fn fold_regions_and_ty( cx: ctxt, ty: t, - fldr: fn(r: region) -> region, + fldr: fn(r: Region) -> Region, fldfnt: fn(t: t) -> t, fldt: fn(t: t) -> t) -> t { fn fold_substs( substs: &substs, - fldr: fn(r: region) -> region, + fldr: fn(r: Region) -> Region, fldt: fn(t: t) -> t) -> substs { {self_r: substs.self_r.map(|r| fldr(*r)), @@ -1351,10 +1351,10 @@ fn fold_regions_and_ty( fn fold_regions( cx: ctxt, ty: t, - fldr: fn(r: region, in_fn: bool) -> region) -> t { + fldr: fn(r: Region, in_fn: bool) -> Region) -> t { fn do_fold(cx: ctxt, ty: t, in_fn: bool, - fldr: fn(region, bool) -> region) -> t { + fldr: fn(Region, bool) -> Region) -> t { if !type_has_regions(ty) { return ty; } fold_regions_and_ty( cx, ty, @@ -1365,9 +1365,9 @@ fn fold_regions( do_fold(cx, ty, false, fldr) } -fn fold_region(cx: ctxt, t0: t, fldop: fn(region, bool) -> region) -> t { +fn fold_region(cx: ctxt, t0: t, fldop: fn(Region, bool) -> Region) -> t { fn do_fold(cx: ctxt, t0: t, under_r: bool, - fldop: fn(region, bool) -> region) -> t { + fldop: fn(Region, bool) -> Region) -> t { let tb = get(t0); if !tbox_has_flag(tb, has_regions) { return t0; } match tb.sty { @@ -1777,7 +1777,7 @@ fn type_needs_unwind_cleanup_(cx: ctxt, ty: t, return needs_unwind_cleanup; } -enum kind { kind_(u32) } +enum Kind { kind_(u32) } /// can be copied (implicitly or explicitly) const KIND_MASK_COPY : u32 = 0b000000000000000000000000001_u32; @@ -1797,92 +1797,92 @@ const KIND_MASK_IMPLICIT : u32 = 0b000000000000000000000010000_u32; /// safe for default mode (subset of KIND_MASK_IMPLICIT) const KIND_MASK_DEFAULT_MODE : u32 = 0b000000000000000000000100000_u32; -fn kind_noncopyable() -> kind { +fn kind_noncopyable() -> Kind { kind_(0u32) } -fn kind_copyable() -> kind { +fn kind_copyable() -> Kind { kind_(KIND_MASK_COPY) } -fn kind_implicitly_copyable() -> kind { +fn kind_implicitly_copyable() -> Kind { kind_(KIND_MASK_IMPLICIT | KIND_MASK_COPY) } -fn kind_safe_for_default_mode() -> kind { +fn kind_safe_for_default_mode() -> Kind { // similar to implicit copy, but always includes vectors and strings kind_(KIND_MASK_DEFAULT_MODE | KIND_MASK_IMPLICIT | KIND_MASK_COPY) } -fn kind_implicitly_sendable() -> kind { +fn kind_implicitly_sendable() -> Kind { kind_(KIND_MASK_IMPLICIT | KIND_MASK_COPY | KIND_MASK_SEND) } -fn kind_safe_for_default_mode_send() -> kind { +fn kind_safe_for_default_mode_send() -> Kind { // similar to implicit copy, but always includes vectors and strings kind_(KIND_MASK_DEFAULT_MODE | KIND_MASK_IMPLICIT | KIND_MASK_COPY | KIND_MASK_SEND) } -fn kind_send_copy() -> kind { +fn kind_send_copy() -> Kind { kind_(KIND_MASK_COPY | KIND_MASK_SEND) } -fn kind_send_only() -> kind { +fn kind_send_only() -> Kind { kind_(KIND_MASK_SEND) } -fn kind_const() -> kind { +fn kind_const() -> Kind { kind_(KIND_MASK_CONST) } -fn kind_owned() -> kind { +fn kind_owned() -> Kind { kind_(KIND_MASK_OWNED) } -fn kind_top() -> kind { +fn kind_top() -> Kind { kind_(0xffffffffu32) } -fn remove_const(k: kind) -> kind { +fn remove_const(k: Kind) -> Kind { k - kind_const() } -fn remove_implicit(k: kind) -> kind { +fn remove_implicit(k: Kind) -> Kind { k - kind_(KIND_MASK_IMPLICIT | KIND_MASK_DEFAULT_MODE) } -fn remove_send(k: kind) -> kind { +fn remove_send(k: Kind) -> Kind { k - kind_(KIND_MASK_SEND) } -fn remove_owned_send(k: kind) -> kind { +fn remove_owned_send(k: Kind) -> Kind { k - kind_(KIND_MASK_OWNED) - kind_(KIND_MASK_SEND) } -fn remove_copyable(k: kind) -> kind { +fn remove_copyable(k: Kind) -> Kind { k - kind_(KIND_MASK_COPY | KIND_MASK_DEFAULT_MODE) } -impl kind : ops::BitAnd { - pure fn bitand(other: &kind) -> kind { +impl Kind : ops::BitAnd { + pure fn bitand(other: &Kind) -> Kind { unsafe { lower_kind(self, (*other)) } } } -impl kind : ops::BitOr { - pure fn bitor(other: &kind) -> kind { +impl Kind : ops::BitOr { + pure fn bitor(other: &Kind) -> Kind { unsafe { raise_kind(self, (*other)) } } } -impl kind : ops::Sub { - pure fn sub(other: &kind) -> kind { +impl Kind : ops::Sub { + pure fn sub(other: &Kind) -> Kind { unsafe { kind_(*self & !*(*other)) } @@ -1892,27 +1892,27 @@ impl kind : ops::Sub { // Using these query functions is preferable to direct comparison or matching // against the kind constants, as we may modify the kind hierarchy in the // future. -pure fn kind_can_be_implicitly_copied(k: kind) -> bool { +pure fn kind_can_be_implicitly_copied(k: Kind) -> bool { *k & KIND_MASK_IMPLICIT == KIND_MASK_IMPLICIT } -pure fn kind_is_safe_for_default_mode(k: kind) -> bool { +pure fn kind_is_safe_for_default_mode(k: Kind) -> bool { *k & KIND_MASK_DEFAULT_MODE == KIND_MASK_DEFAULT_MODE } -pure fn kind_can_be_copied(k: kind) -> bool { +pure fn kind_can_be_copied(k: Kind) -> bool { *k & KIND_MASK_COPY == KIND_MASK_COPY } -pure fn kind_can_be_sent(k: kind) -> bool { +pure fn kind_can_be_sent(k: Kind) -> bool { *k & KIND_MASK_SEND == KIND_MASK_SEND } -pure fn kind_is_owned(k: kind) -> bool { +pure fn kind_is_owned(k: Kind) -> bool { *k & KIND_MASK_OWNED == KIND_MASK_OWNED } -fn meta_kind(p: FnMeta) -> kind { +fn meta_kind(p: FnMeta) -> Kind { match p.proto { // XXX consider the kind bounds! proto_vstore(vstore_slice(_)) => kind_noncopyable() | kind_(KIND_MASK_DEFAULT_MODE), @@ -1927,15 +1927,15 @@ fn meta_kind(p: FnMeta) -> kind { } } -fn kind_lteq(a: kind, b: kind) -> bool { +fn kind_lteq(a: Kind, b: Kind) -> bool { *a & *b == *a } -fn lower_kind(a: kind, b: kind) -> kind { +fn lower_kind(a: Kind, b: Kind) -> Kind { kind_(*a & *b) } -fn raise_kind(a: kind, b: kind) -> kind { +fn raise_kind(a: Kind, b: Kind) -> Kind { kind_(*a | *b) } @@ -1960,7 +1960,7 @@ fn test_kinds() { // with the given mutability can have. // This is used to prevent objects containing mutable state from being // implicitly copied and to compute whether things have const kind. -fn mutability_kind(m: mutability) -> kind { +fn mutability_kind(m: mutability) -> Kind { match (m) { m_mutbl => remove_const(remove_implicit(kind_top())), m_const => remove_implicit(kind_top()), @@ -1968,11 +1968,11 @@ fn mutability_kind(m: mutability) -> kind { } } -fn mutable_type_kind(cx: ctxt, ty: mt) -> kind { +fn mutable_type_kind(cx: ctxt, ty: mt) -> Kind { lower_kind(mutability_kind(ty.mutbl), type_kind(cx, ty.ty)) } -fn type_kind(cx: ctxt, ty: t) -> kind { +fn type_kind(cx: ctxt, ty: t) -> Kind { match cx.kind_cache.find(ty) { Some(result) => return result, None => {/* fall through */ } @@ -2550,7 +2550,7 @@ impl bound_region : to_bytes::IterBytes { } } -impl region : to_bytes::IterBytes { +impl Region : to_bytes::IterBytes { pure fn iter_bytes(+lsb0: bool, f: to_bytes::Cb) { match self { re_bound(ref br) => @@ -2763,7 +2763,7 @@ fn is_fn_ty(fty: t) -> bool { } } -fn ty_region(ty: t) -> region { +fn ty_region(ty: t) -> Region { match get(ty).sty { ty_rptr(r, _) => r, s => fail fmt!("ty_region() invoked on non-rptr: %?", s) @@ -4084,8 +4084,8 @@ impl RegionVid : cmp::Eq { pure fn ne(other: &RegionVid) -> bool { *self != *(*other) } } -impl region : cmp::Eq { - pure fn eq(other: ®ion) -> bool { +impl Region : cmp::Eq { + pure fn eq(other: &Region) -> bool { match self { re_bound(e0a) => { match (*other) { @@ -4119,7 +4119,7 @@ impl region : cmp::Eq { } } } - pure fn ne(other: ®ion) -> bool { !self.eq(other) } + pure fn ne(other: &Region) -> bool { !self.eq(other) } } impl bound_region : cmp::Eq { @@ -4367,9 +4367,9 @@ impl param_bound : cmp::Eq { pure fn ne(other: ¶m_bound) -> bool { !self.eq(other) } } -impl kind : cmp::Eq { - pure fn eq(other: &kind) -> bool { *self == *(*other) } - pure fn ne(other: &kind) -> bool { *self != *(*other) } +impl Kind : cmp::Eq { + pure fn eq(other: &Kind) -> bool { *self == *(*other) } + pure fn ne(other: &Kind) -> bool { *self != *(*other) } } diff --git a/src/rustc/middle/typeck.rs b/src/rustc/middle/typeck.rs index 077d34700b8d4..8d10343d78ee0 100644 --- a/src/rustc/middle/typeck.rs +++ b/src/rustc/middle/typeck.rs @@ -46,7 +46,6 @@ use syntax::ast_map::node_id_to_str; use syntax::ast_util::{local_def, respan, split_trait_methods}; use syntax::visit; use metadata::csearch; -use driver::session::session; use util::common::may_break; use syntax::codemap::span; use pat_util::{pat_is_variant, pat_id_map, PatIdMap}; diff --git a/src/rustc/middle/typeck/astconv.rs b/src/rustc/middle/typeck/astconv.rs index 389c1adb016bc..b8ff637f7dd37 100644 --- a/src/rustc/middle/typeck/astconv.rs +++ b/src/rustc/middle/typeck/astconv.rs @@ -58,7 +58,7 @@ trait ast_conv { fn get_region_reporting_err(tcx: ty::ctxt, span: span, - res: Result) -> ty::region { + res: Result) -> ty::Region { match res { result::Ok(r) => r, @@ -70,7 +70,7 @@ fn get_region_reporting_err(tcx: ty::ctxt, } fn ast_region_to_region( - self: AC, rscope: RS, span: span, a_r: @ast::region) -> ty::region { + self: AC, rscope: RS, span: span, a_r: @ast::region) -> ty::Region { let res = match a_r.node { ast::re_static => Ok(ty::re_static), @@ -155,7 +155,7 @@ const NO_TPS: uint = 2u; // internal notion of a type. `getter` is a function that returns the type // corresponding to a definition ID: fn ast_ty_to_ty( - self: AC, rscope: RS, &&ast_ty: @ast::ty) -> ty::t { + self: AC, rscope: RS, &&ast_ty: @ast::Ty) -> ty::t { fn ast_mt_to_mt( self: AC, rscope: RS, mt: ast::mt) -> ty::mt { diff --git a/src/rustc/middle/typeck/check.rs b/src/rustc/middle/typeck/check.rs index 9a7f2192cb144..6de249ebc68f2 100644 --- a/src/rustc/middle/typeck/check.rs +++ b/src/rustc/middle/typeck/check.rs @@ -166,20 +166,20 @@ fn blank_fn_ctxt(ccx: @crate_ctxt, rty: ty::t, } // a list of mapping from in-scope-region-names ("isr") to the -// corresponding ty::region -type isr_alist = @List<(ty::bound_region, ty::region)>; +// corresponding ty::Region +type isr_alist = @List<(ty::bound_region, ty::Region)>; trait get_and_find_region { - fn get(br: ty::bound_region) -> ty::region; - fn find(br: ty::bound_region) -> Option; + fn get(br: ty::bound_region) -> ty::Region; + fn find(br: ty::bound_region) -> Option; } impl isr_alist: get_and_find_region { - fn get(br: ty::bound_region) -> ty::region { + fn get(br: ty::bound_region) -> ty::Region { self.find(br).get() } - fn find(br: ty::bound_region) -> Option { + fn find(br: ty::bound_region) -> Option { for list::each(self) |isr| { let (isr_br, isr_r) = *isr; if isr_br == br { return Some(isr_r); } @@ -563,7 +563,7 @@ impl @fn_ctxt: ast_conv { impl @fn_ctxt { fn search_in_scope_regions(br: ty::bound_region) - -> Result + -> Result { match self.in_scope_regions.find(br) { Some(r) => result::Ok(r), @@ -581,13 +581,13 @@ impl @fn_ctxt { } impl @fn_ctxt: region_scope { - fn anon_region(span: span) -> Result { + fn anon_region(span: span) -> Result { result::Ok(self.infcx().next_region_var_nb(span)) } - fn self_region(_span: span) -> Result { + fn self_region(_span: span) -> Result { self.search_in_scope_regions(ty::br_self) } - fn named_region(_span: span, id: ast::ident) -> Result { + fn named_region(_span: span, id: ast::ident) -> Result { self.search_in_scope_regions(ty::br_named(id)) } } @@ -600,7 +600,7 @@ impl @fn_ctxt { pprust::expr_to_str(expr, self.tcx().sess.intr())) } - fn block_region() -> ty::region { + fn block_region() -> ty::Region { ty::re_scope(self.region_lb) } @@ -645,7 +645,7 @@ impl @fn_ctxt { self.write_ty(node_id, ty::mk_bot(self.tcx())); } - fn to_ty(ast_t: @ast::ty) -> ty::t { + fn to_ty(ast_t: @ast::Ty) -> ty::t { ast_ty_to_ty(self, self, ast_t) } @@ -736,7 +736,7 @@ impl @fn_ctxt { } fn mk_subr(a_is_expected: bool, span: span, - sub: ty::region, sup: ty::region) -> Result<(), ty::type_err> { + sub: ty::Region, sup: ty::Region) -> Result<(), ty::type_err> { infer::mk_subr(self.infcx(), a_is_expected, span, sub, sup) } @@ -760,8 +760,8 @@ impl @fn_ctxt { fn region_var_if_parameterized(rp: Option, span: span, - lower_bound: ty::region) - -> Option + lower_bound: ty::Region) + -> Option { rp.map( |_rp| self.infcx().next_region_var_with_lb(span, lower_bound)) @@ -1359,7 +1359,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, // Check field access expressions fn check_field(fcx: @fn_ctxt, expr: @ast::expr, is_callee: bool, - base: @ast::expr, field: ast::ident, tys: ~[@ast::ty]) + base: @ast::expr, field: ast::ident, tys: ~[@ast::Ty]) -> bool { let tcx = fcx.ccx.tcx; @@ -2443,7 +2443,7 @@ fn instantiate_path(fcx: @fn_ctxt, tpt: ty_param_bounds_and_ty, span: span, node_id: ast::node_id, - region_lb: ty::region) { + region_lb: ty::Region) { let ty_param_count = vec::len(*tpt.bounds); let ty_substs_len = vec::len(pth.types); diff --git a/src/rustc/middle/typeck/check/alt.rs b/src/rustc/middle/typeck/check/alt.rs index 24bcc2281fb9a..caace6051982e 100644 --- a/src/rustc/middle/typeck/check/alt.rs +++ b/src/rustc/middle/typeck/check/alt.rs @@ -112,8 +112,8 @@ fn check_legality_of_move_bindings(fcx: @fn_ctxt, type pat_ctxt = { fcx: @fn_ctxt, map: PatIdMap, - alt_region: ty::region, // Region for the alt as a whole - block_region: ty::region, // Region for the block of the arm + alt_region: ty::Region, // Region for the alt as a whole + block_region: ty::Region, // Region for the block of the arm }; fn check_pat_variant(pcx: pat_ctxt, pat: @ast::pat, path: @ast::path, diff --git a/src/rustc/middle/typeck/check/method.rs b/src/rustc/middle/typeck/check/method.rs index 04be004754828..eaf1a45afa9c0 100644 --- a/src/rustc/middle/typeck/check/method.rs +++ b/src/rustc/middle/typeck/check/method.rs @@ -654,7 +654,7 @@ impl LookupContext { kind: AutoRefKind, autoderefs: uint, mutbls: &[ast::mutability], - mk_autoref_ty: &fn(ast::mutability, ty::region) -> ty::t) + mk_autoref_ty: &fn(ast::mutability, ty::Region) -> ty::t) -> Option { // This is hokey. We should have mutability inference as a @@ -930,7 +930,7 @@ impl LookupContext { } fn transform_self_type_for_method(tcx: ty::ctxt, - self_region: Option, + self_region: Option, impl_ty: ty::t, self_type: ast::self_ty_) -> ty::t diff --git a/src/rustc/middle/typeck/check/regionck.rs b/src/rustc/middle/typeck/check/regionck.rs index 0b258da5672db..932cdd994da7d 100644 --- a/src/rustc/middle/typeck/check/regionck.rs +++ b/src/rustc/middle/typeck/check/regionck.rs @@ -32,7 +32,7 @@ use middle::ty::{vstore_uniq}; enum rcx { rcx_({fcx: @fn_ctxt, mut errors_reported: uint}) } type rvt = visit::vt<@rcx>; -fn encl_region_of_def(fcx: @fn_ctxt, def: ast::def) -> ty::region { +fn encl_region_of_def(fcx: @fn_ctxt, def: ast::def) -> ty::Region { let tcx = fcx.tcx(); match def { def_local(node_id, _) | def_arg(node_id, _) | def_self(node_id) | @@ -335,7 +335,7 @@ fn constrain_auto_ref( fn constrain_free_variables( rcx: @rcx, - region: ty::region, + region: ty::Region, expr: @ast::expr) { /*! @@ -373,7 +373,7 @@ fn constrain_free_variables( fn constrain_regions_in_type_of_node( rcx: @rcx, id: ast::node_id, - encl_region: ty::region, + encl_region: ty::Region, span: span) -> bool { let tcx = rcx.fcx.tcx(); @@ -395,7 +395,7 @@ fn constrain_regions_in_type_of_node( fn constrain_regions_in_type( rcx: @rcx, - encl_region: ty::region, + encl_region: ty::Region, span: span, ty: ty::t) -> bool { @@ -417,9 +417,9 @@ fn constrain_regions_in_type( return (e == rcx.errors_reported); fn constrain_region(rcx: @rcx, - encl_region: ty::region, + encl_region: ty::Region, span: span, - region: ty::region) { + region: ty::Region) { let tcx = rcx.fcx.ccx.tcx; debug!("constrain_region(encl_region=%?, region=%?)", diff --git a/src/rustc/middle/typeck/check/regionmanip.rs b/src/rustc/middle/typeck/check/regionmanip.rs index 4afb3ad78a635..806b234540cdc 100644 --- a/src/rustc/middle/typeck/check/regionmanip.rs +++ b/src/rustc/middle/typeck/check/regionmanip.rs @@ -10,7 +10,7 @@ fn replace_bound_regions_in_fn_ty( isr: isr_alist, self_info: Option, fn_ty: &ty::FnTy, - mapf: fn(ty::bound_region) -> ty::region) -> + mapf: fn(ty::bound_region) -> ty::Region) -> {isr: isr_alist, self_info: Option, fn_ty: ty::FnTy} { // Take self_info apart; the self_ty part is the only one we want @@ -83,7 +83,7 @@ fn replace_bound_regions_in_fn_ty( tcx: ty::ctxt, isr: isr_alist, tys: ~[ty::t], - to_r: fn(ty::bound_region) -> ty::region) -> isr_alist { + to_r: fn(ty::bound_region) -> ty::Region) -> isr_alist { // Takes `isr` (described above), `to_r` (described above), // and `r`, a region. If `r` is anything other than a bound @@ -93,8 +93,8 @@ fn replace_bound_regions_in_fn_ty( // updated isr_alist that now contains a mapping from `r` to // the result of calling `to_r` on it. fn append_isr(isr: isr_alist, - to_r: fn(ty::bound_region) -> ty::region, - r: ty::region) -> isr_alist { + to_r: fn(ty::bound_region) -> ty::Region, + r: ty::Region) -> isr_alist { match r { ty::re_free(_, _) | ty::re_static | ty::re_scope(_) | ty::re_var(_) => { diff --git a/src/rustc/middle/typeck/coherence.rs b/src/rustc/middle/typeck/coherence.rs index 9a9a8dda6e4d8..189e7377d9c30 100644 --- a/src/rustc/middle/typeck/coherence.rs +++ b/src/rustc/middle/typeck/coherence.rs @@ -6,7 +6,7 @@ use metadata::csearch::{ProvidedTraitMethodInfo, each_path, get_impl_traits}; use metadata::csearch::{get_impls_for_mod}; -use metadata::cstore::{cstore, iter_crate_data}; +use metadata::cstore::{CStore, iter_crate_data}; use metadata::decoder::{dl_def, dl_field, dl_impl}; use middle::resolve::{Impl, MethodInfo}; use middle::ty::{ProvidedMethodSource, get, lookup_item_type, subst, t}; @@ -595,7 +595,7 @@ impl CoherenceChecker { fn create_impl_from_item(item: @item) -> @Impl { fn add_provided_methods(all_methods: &mut ~[@MethodInfo], all_provided_methods: ~[@ProvidedMethodInfo], - sess: driver::session::session) { + sess: driver::session::Session) { for all_provided_methods.each |provided_method| { debug!( "(creating impl) adding provided method `%s` to impl", @@ -694,7 +694,7 @@ impl CoherenceChecker { // External crate handling fn add_impls_for_module(impls_seen: HashMap, - crate_store: cstore, + crate_store: CStore, module_def_id: def_id) { let implementations = get_impls_for_mod(crate_store, diff --git a/src/rustc/middle/typeck/collect.rs b/src/rustc/middle/typeck/collect.rs index 9e51225f172f2..a5390d8f293af 100644 --- a/src/rustc/middle/typeck/collect.rs +++ b/src/rustc/middle/typeck/collect.rs @@ -76,7 +76,7 @@ fn collect_item_types(ccx: @crate_ctxt, crate: @ast::crate) { impl @crate_ctxt { fn to_ty( - rs: RS, ast_ty: @ast::ty) -> ty::t { + rs: RS, ast_ty: @ast::Ty) -> ty::t { ast_ty_to_ty(self, rs, ast_ty) } @@ -345,7 +345,7 @@ fn compare_impl_method(tcx: ty::ctxt, sp: span, // Replaces bound references to the self region with `with_r`. fn replace_bound_self(tcx: ty::ctxt, ty: ty::t, - with_r: ty::region) -> ty::t { + with_r: ty::Region) -> ty::t { do ty::fold_regions(tcx, ty) |r, _in_fn| { if r == ty::re_bound(ty::br_self) {with_r} else {r} } diff --git a/src/rustc/middle/typeck/infer.rs b/src/rustc/middle/typeck/infer.rs index 96849bf918d4f..e0465b22c931f 100644 --- a/src/rustc/middle/typeck/infer.rs +++ b/src/rustc/middle/typeck/infer.rs @@ -258,7 +258,6 @@ use util::ppaux::{ty_to_str, mt_to_str}; use result::{Result, Ok, Err, map_vec, map_vec2, iter_vec2}; use ty::{mk_fn, type_is_bot}; use check::regionmanip::{replace_bound_regions_in_fn_ty}; -use driver::session::session; use util::common::{indent, indenter}; use ast::{unsafe_fn, impure_fn, pure_fn, extern_fn}; use ast::{m_const, m_imm, m_mutbl}; @@ -275,7 +274,7 @@ use unify::{vals_and_bindings, root}; use integral::{int_ty_set, int_ty_set_all}; use combine::{combine_fields, eq_tys}; use assignment::Assign; -use to_str::to_str; +use to_str::ToStr; use sub::Sub; use lub::Lub; @@ -385,7 +384,7 @@ fn can_mk_subty(cx: infer_ctxt, a: ty::t, b: ty::t) -> ures { } fn mk_subr(cx: infer_ctxt, a_is_expected: bool, span: span, - a: ty::region, b: ty::region) -> ures { + a: ty::Region, b: ty::Region) -> ures { debug!("mk_subr(%s <: %s)", a.to_str(cx), b.to_str(cx)); do indent { do cx.commit { @@ -431,8 +430,8 @@ fn resolve_type(cx: infer_ctxt, a: ty::t, modes: uint) resolver(cx, modes).resolve_type_chk(a) } -fn resolve_region(cx: infer_ctxt, r: ty::region, modes: uint) - -> fres { +fn resolve_region(cx: infer_ctxt, r: ty::Region, modes: uint) + -> fres { resolver(cx, modes).resolve_region_chk(r) } @@ -628,12 +627,12 @@ impl infer_ctxt { ty::mk_int_var(self.tcx, self.next_int_var_id()) } - fn next_region_var_nb(span: span) -> ty::region { + fn next_region_var_nb(span: span) -> ty::Region { ty::re_var(self.region_vars.new_region_var(span)) } fn next_region_var_with_lb(span: span, - lb_region: ty::region) -> ty::region { + lb_region: ty::Region) -> ty::Region { let region_var = self.next_region_var_nb(span); // add lb_region as a lower bound on the newly built variable @@ -644,7 +643,7 @@ impl infer_ctxt { return region_var; } - fn next_region_var(span: span, scope_id: ast::node_id) -> ty::region { + fn next_region_var(span: span, scope_id: ast::node_id) -> ty::Region { self.next_region_var_with_lb(span, ty::re_scope(scope_id)) } diff --git a/src/rustc/middle/typeck/infer/assignment.rs b/src/rustc/middle/typeck/infer/assignment.rs index 53731551df57c..a5af58904dd77 100644 --- a/src/rustc/middle/typeck/infer/assignment.rs +++ b/src/rustc/middle/typeck/infer/assignment.rs @@ -48,7 +48,7 @@ // A. But this upper-bound might be stricter than what is truly // needed. -use to_str::to_str; +use to_str::ToStr; use combine::combine_fields; fn to_ares(+c: cres) -> ares { @@ -190,7 +190,7 @@ priv impl Assign { a: ty::t, nr_b: ty::t, m: ast::mutability, - r_b: ty::region) -> ares { + r_b: ty::Region) -> ares { debug!("try_assign(a=%s, nr_b=%s, m=%?, r_b=%s)", a.to_str(self.infcx), diff --git a/src/rustc/middle/typeck/infer/combine.rs b/src/rustc/middle/typeck/infer/combine.rs index bdda45c1168ad..342a2ce2b76cd 100644 --- a/src/rustc/middle/typeck/infer/combine.rs +++ b/src/rustc/middle/typeck/infer/combine.rs @@ -44,7 +44,7 @@ // terms of error reporting, although we do not do that properly right // now. -use to_str::to_str; +use to_str::ToStr; use ty::{FnTyBase, FnMeta, FnSig}; trait combine { @@ -72,8 +72,8 @@ trait combine { fn protos(p1: ty::fn_proto, p2: ty::fn_proto) -> cres; fn ret_styles(r1: ret_style, r2: ret_style) -> cres; fn purities(a: purity, b: purity) -> cres; - fn contraregions(a: ty::region, b: ty::region) -> cres; - fn regions(a: ty::region, b: ty::region) -> cres; + fn contraregions(a: ty::Region, b: ty::Region) -> cres; + fn regions(a: ty::Region, b: ty::Region) -> cres; fn vstores(vk: ty::terr_vstore_kind, a: ty::vstore, b: ty::vstore) -> cres; } @@ -103,7 +103,7 @@ fn eq_tys(self: &C, a: ty::t, b: ty::t) -> ures { } } -fn eq_regions(self: &C, a: ty::region, b: ty::region) -> ures { +fn eq_regions(self: &C, a: ty::Region, b: ty::Region) -> ures { debug!("eq_regions(%s, %s)", a.to_str(self.infcx()), b.to_str(self.infcx())); @@ -127,8 +127,8 @@ fn eq_regions(self: &C, a: ty::region, b: ty::region) -> ures { fn eq_opt_regions( self: &C, - a: Option, - b: Option) -> cres> { + a: Option, + b: Option) -> cres> { match (a, b) { (None, None) => { @@ -160,9 +160,9 @@ fn super_substs( fn relate_region_param( self: &C, did: ast::def_id, - a: Option, - b: Option) - -> cres> + a: Option, + b: Option) + -> cres> { let polyty = ty::lookup_item_type(self.infcx().tcx, did); match (polyty.region_param, a, b) { diff --git a/src/rustc/middle/typeck/infer/glb.rs b/src/rustc/middle/typeck/infer/glb.rs index a8676a63b8895..77e753fa2204f 100644 --- a/src/rustc/middle/typeck/infer/glb.rs +++ b/src/rustc/middle/typeck/infer/glb.rs @@ -1,6 +1,6 @@ use combine::*; use lattice::*; -use to_str::to_str; +use to_str::ToStr; enum Glb = combine_fields; // "greatest lower bound" (common subtype) @@ -109,7 +109,7 @@ impl Glb: combine { } } - fn regions(a: ty::region, b: ty::region) -> cres { + fn regions(a: ty::Region, b: ty::Region) -> cres { debug!("%s.regions(%?, %?)", self.tag(), a.to_str(self.infcx), @@ -120,7 +120,7 @@ impl Glb: combine { } } - fn contraregions(a: ty::region, b: ty::region) -> cres { + fn contraregions(a: ty::Region, b: ty::Region) -> cres { Lub(*self).regions(a, b) } diff --git a/src/rustc/middle/typeck/infer/integral.rs b/src/rustc/middle/typeck/infer/integral.rs index 168709596dc43..1b23cb52b20c8 100644 --- a/src/rustc/middle/typeck/infer/integral.rs +++ b/src/rustc/middle/typeck/infer/integral.rs @@ -4,7 +4,7 @@ Code related to integral type inference. */ -use to_str::to_str; +use to_str::ToStr; // Bitvector to represent sets of integral types enum int_ty_set = uint; diff --git a/src/rustc/middle/typeck/infer/lattice.rs b/src/rustc/middle/typeck/infer/lattice.rs index 04133cab9d76c..699613e8ae655 100644 --- a/src/rustc/middle/typeck/infer/lattice.rs +++ b/src/rustc/middle/typeck/infer/lattice.rs @@ -1,6 +1,6 @@ use combine::*; use unify::*; -use to_str::to_str; +use to_str::ToStr; // ______________________________________________________________________ // Lattice operations on variables diff --git a/src/rustc/middle/typeck/infer/lub.rs b/src/rustc/middle/typeck/infer/lub.rs index 093da5caec872..dcff863a126f0 100644 --- a/src/rustc/middle/typeck/infer/lub.rs +++ b/src/rustc/middle/typeck/infer/lub.rs @@ -1,6 +1,6 @@ use combine::*; use lattice::*; -use to_str::to_str; +use to_str::ToStr; enum Lub = combine_fields; // "subtype", "subregion" etc @@ -88,11 +88,11 @@ impl Lub: combine { } } - fn contraregions(a: ty::region, b: ty::region) -> cres { + fn contraregions(a: ty::Region, b: ty::Region) -> cres { return Glb(*self).regions(a, b); } - fn regions(a: ty::region, b: ty::region) -> cres { + fn regions(a: ty::Region, b: ty::Region) -> cres { debug!("%s.regions(%?, %?)", self.tag(), a.to_str(self.infcx), diff --git a/src/rustc/middle/typeck/infer/region_var_bindings.rs b/src/rustc/middle/typeck/infer/region_var_bindings.rs index 8bbdab74d230e..86a872341f561 100644 --- a/src/rustc/middle/typeck/infer/region_var_bindings.rs +++ b/src/rustc/middle/typeck/infer/region_var_bindings.rs @@ -312,10 +312,10 @@ use std::map::HashMap; use std::cell::{Cell, empty_cell}; use std::list::{List, Nil, Cons}; -use ty::{region, RegionVid}; use region::is_subregion_of; +use ty::{Region, RegionVid}; use syntax::codemap; -use to_str::to_str; +use to_str::ToStr; use util::ppaux::note_and_explain_region; export RegionVarBindings; @@ -325,8 +325,8 @@ export glb_regions; enum Constraint { ConstrainVarSubVar(RegionVid, RegionVid), - ConstrainRegSubVar(region, RegionVid), - ConstrainVarSubReg(RegionVid, region) + ConstrainRegSubVar(Region, RegionVid), + ConstrainVarSubReg(RegionVid, Region) } impl Constraint : cmp::Eq { @@ -365,8 +365,8 @@ impl Constraint : to_bytes::IterBytes { } struct TwoRegions { - a: region, - b: region, + a: Region, + b: Region, } impl TwoRegions : cmp::Eq { @@ -394,7 +394,7 @@ type CombineMap = HashMap; struct RegionVarBindings { tcx: ty::ctxt, var_spans: DVec, - values: Cell<~[ty::region]>, + values: Cell<~[ty::Region]>, constraints: HashMap, lubs: CombineMap, glbs: CombineMap, @@ -501,7 +501,7 @@ impl RegionVarBindings { } } - fn make_subregion(span: span, sub: region, sup: region) -> cres<()> { + fn make_subregion(span: span, sub: Region, sup: Region) -> cres<()> { // cannot add constraints once regions are resolved assert self.values.is_empty(); @@ -529,7 +529,7 @@ impl RegionVarBindings { } } - fn lub_regions(span: span, a: region, b: region) -> cres { + fn lub_regions(span: span, a: Region, b: Region) -> cres { // cannot add constraints once regions are resolved assert self.values.is_empty(); @@ -551,7 +551,7 @@ impl RegionVarBindings { } } - fn glb_regions(span: span, a: region, b: region) -> cres { + fn glb_regions(span: span, a: Region, b: Region) -> cres { // cannot add constraints once regions are resolved assert self.values.is_empty(); @@ -574,7 +574,7 @@ impl RegionVarBindings { } } - fn resolve_var(rid: RegionVid) -> ty::region { + fn resolve_var(rid: RegionVid) -> ty::Region { debug!("RegionVarBindings: resolve_var(%?=%u)", rid, *rid); if self.values.is_empty() { self.tcx.sess.span_bug( @@ -586,9 +586,9 @@ impl RegionVarBindings { self.values.with_ref(|values| values[*rid]) } - fn combine_vars(combines: CombineMap, a: region, b: region, span: span, - relate: fn(old_r: region, new_r: region) -> cres<()>) - -> cres { + fn combine_vars(combines: CombineMap, a: Region, b: Region, span: span, + relate: fn(old_r: Region, new_r: Region) -> cres<()>) + -> cres { let vars = TwoRegions { a: a, b: b }; match combines.find(vars) { @@ -623,11 +623,11 @@ impl RegionVarBindings { } priv impl RegionVarBindings { - fn is_subregion_of(sub: region, sup: region) -> bool { + fn is_subregion_of(sub: Region, sup: Region) -> bool { is_subregion_of(self.tcx.region_map, sub, sup) } - fn lub_concrete_regions(+a: region, +b: region) -> region { + fn lub_concrete_regions(+a: Region, +b: Region) -> Region { match (a, b) { (ty::re_static, _) | (_, ty::re_static) => { ty::re_static // nothing lives longer than static @@ -682,7 +682,7 @@ priv impl RegionVarBindings { } } - fn glb_concrete_regions(+a: region, +b: region) -> cres { + fn glb_concrete_regions(+a: Region, +b: Region) -> cres { match (a, b) { (ty::re_static, r) | (r, ty::re_static) => { // static lives longer than everything else @@ -771,7 +771,7 @@ impl Classification : cmp::Eq { pure fn ne(other: &Classification) -> bool { !self.eq(other) } } -enum GraphNodeValue { NoValue, Value(region), ErrorValue } +enum GraphNodeValue { NoValue, Value(Region), ErrorValue } struct GraphNode { span: span, @@ -792,7 +792,7 @@ struct Graph { } struct SpannedRegion { - region: region, + region: Region, span: span, } @@ -803,7 +803,7 @@ fn TwoRegionsMap() -> TwoRegionsMap { } impl RegionVarBindings { - fn infer_variable_values() -> ~[region] { + fn infer_variable_values() -> ~[Region] { let graph = self.construct_graph(); self.expansion(&graph); self.contraction(&graph); @@ -895,7 +895,7 @@ impl RegionVarBindings { } } - fn expand_node(a_region: region, + fn expand_node(a_region: Region, b_vid: RegionVid, b_node: &GraphNode) -> bool { debug!("expand_node(%?, %? == %?)", @@ -955,7 +955,7 @@ impl RegionVarBindings { fn contract_node(a_vid: RegionVid, a_node: &GraphNode, - b_region: region) -> bool { + b_region: Region) -> bool { debug!("contract_node(%? == %?/%?, %?)", a_vid, a_node.value, a_node.classification, b_region); @@ -985,8 +985,8 @@ impl RegionVarBindings { fn check_node(self: &RegionVarBindings, a_vid: RegionVid, a_node: &GraphNode, - a_region: region, - b_region: region) -> bool { + a_region: Region, + b_region: Region) -> bool { if !self.is_subregion_of(a_region, b_region) { debug!("Setting %? to ErrorValue: %? not subregion of %?", a_vid, a_region, b_region); @@ -998,8 +998,8 @@ impl RegionVarBindings { fn adjust_node(self: &RegionVarBindings, a_vid: RegionVid, a_node: &GraphNode, - a_region: region, - b_region: region) -> bool { + a_region: Region, + b_region: Region) -> bool { match self.glb_concrete_regions(a_region, b_region) { Ok(glb) => { if glb == a_region { @@ -1040,7 +1040,7 @@ impl RegionVarBindings { debug!("---- %s Complete after %u iteration(s)", tag, iteration); } - fn extract_regions_and_report_errors(graph: &Graph) -> ~[region] { + fn extract_regions_and_report_errors(graph: &Graph) -> ~[Region] { let dup_map = TwoRegionsMap(); graph.nodes.mapi(|idx, node| { match node.value { @@ -1073,8 +1073,8 @@ impl RegionVarBindings { // Used to suppress reporting the same basic error over and over fn is_reported(dup_map: TwoRegionsMap, - r_a: region, - r_b: region) -> bool { + r_a: Region, + r_b: Region) -> bool { let key = TwoRegions { a: r_a, b: r_b }; !dup_map.insert(key, ()) } diff --git a/src/rustc/middle/typeck/infer/resolve.rs b/src/rustc/middle/typeck/infer/resolve.rs index 2a851a5f7bb26..5a55fbf9a5dfc 100644 --- a/src/rustc/middle/typeck/infer/resolve.rs +++ b/src/rustc/middle/typeck/infer/resolve.rs @@ -35,7 +35,7 @@ // probably better off writing `resolve_all - resolve_ivar`. use integral::*; -use to_str::to_str; +use to_str::ToStr; const resolve_nested_tvar: uint = 0b00000001; const resolve_rvar: uint = 0b00000010; @@ -98,7 +98,7 @@ impl resolve_state { } } - fn resolve_region_chk(orig: ty::region) -> fres { + fn resolve_region_chk(orig: ty::Region) -> fres { self.err = None; let resolved = indent(|| self.resolve_region(orig) ); match self.err { @@ -145,7 +145,7 @@ impl resolve_state { } } - fn resolve_region(orig: ty::region) -> ty::region { + fn resolve_region(orig: ty::Region) -> ty::Region { debug!("Resolve_region(%s)", orig.to_str(self.infcx)); match orig { ty::re_var(rid) => self.resolve_region_var(rid), @@ -153,14 +153,14 @@ impl resolve_state { } } - fn resolve_region_var(rid: RegionVid) -> ty::region { + fn resolve_region_var(rid: RegionVid) -> ty::Region { if !self.should(resolve_rvar) { return ty::re_var(rid) } self.infcx.region_vars.resolve_var(rid) } - fn assert_not_rvar(rid: RegionVid, r: ty::region) { + fn assert_not_rvar(rid: RegionVid, r: ty::Region) { match r { ty::re_var(rid2) => { self.err = Some(region_var_bound_by_region_var(rid, rid2)); diff --git a/src/rustc/middle/typeck/infer/sub.rs b/src/rustc/middle/typeck/infer/sub.rs index e6bcdf3e71ff1..0aba993512bb4 100644 --- a/src/rustc/middle/typeck/infer/sub.rs +++ b/src/rustc/middle/typeck/infer/sub.rs @@ -1,6 +1,6 @@ use combine::*; use unify::*; -use to_str::to_str; +use to_str::ToStr; enum Sub = combine_fields; // "subtype", "subregion" etc @@ -20,14 +20,14 @@ impl Sub: combine { Sub(opp).tys(b, a) } - fn contraregions(a: ty::region, b: ty::region) -> cres { + fn contraregions(a: ty::Region, b: ty::Region) -> cres { let opp = combine_fields { a_is_expected: !self.a_is_expected,.. *self }; Sub(opp).regions(b, a) } - fn regions(a: ty::region, b: ty::region) -> cres { + fn regions(a: ty::Region, b: ty::Region) -> cres { debug!("%s.regions(%s, %s)", self.tag(), a.to_str(self.infcx), diff --git a/src/rustc/middle/typeck/infer/to_str.rs b/src/rustc/middle/typeck/infer/to_str.rs index 7acfdcac424de..c98a217a7464e 100644 --- a/src/rustc/middle/typeck/infer/to_str.rs +++ b/src/rustc/middle/typeck/infer/to_str.rs @@ -1,29 +1,29 @@ use integral::{int_ty_set}; use unify::{var_value, redirect, root}; -trait to_str { +trait ToStr { fn to_str(cx: infer_ctxt) -> ~str; } -impl ty::t: to_str { +impl ty::t: ToStr { fn to_str(cx: infer_ctxt) -> ~str { ty_to_str(cx.tcx, self) } } -impl ty::mt: to_str { +impl ty::mt: ToStr { fn to_str(cx: infer_ctxt) -> ~str { mt_to_str(cx.tcx, self) } } -impl ty::region: to_str { +impl ty::Region: ToStr { fn to_str(cx: infer_ctxt) -> ~str { util::ppaux::region_to_str(cx.tcx, self) } } -impl bound: to_str { +impl bound: ToStr { fn to_str(cx: infer_ctxt) -> ~str { match self { Some(v) => v.to_str(cx), @@ -32,7 +32,7 @@ impl bound: to_str { } } -impl bounds: to_str { +impl bounds: ToStr { fn to_str(cx: infer_ctxt) -> ~str { fmt!("{%s <: %s}", self.lb.to_str(cx), @@ -40,7 +40,7 @@ impl bounds: to_str { } } -impl int_ty_set: to_str { +impl int_ty_set: ToStr { fn to_str(_cx: infer_ctxt) -> ~str { match self { int_ty_set(v) => uint::to_str(v, 10u) @@ -48,7 +48,7 @@ impl int_ty_set: to_str { } } -impl var_value: to_str { +impl var_value: ToStr { fn to_str(cx: infer_ctxt) -> ~str { match self { redirect(vid) => fmt!("redirect(%s)", vid.to_str()), diff --git a/src/rustc/middle/typeck/infer/unify.rs b/src/rustc/middle/typeck/infer/unify.rs index 7ccbaa40ada19..f865705563c60 100644 --- a/src/rustc/middle/typeck/infer/unify.rs +++ b/src/rustc/middle/typeck/infer/unify.rs @@ -1,6 +1,6 @@ use combine::combine; use integral::*; -use to_str::to_str; +use to_str::ToStr; use std::smallintmap::SmallIntMap; enum var_value { @@ -46,7 +46,7 @@ impl infer_ctxt { } } - fn set( + fn set( vb: &vals_and_bindings, vid: V, +new_v: var_value) { diff --git a/src/rustc/middle/typeck/rscope.rs b/src/rustc/middle/typeck/rscope.rs index 9b9695088f379..d379607d6a81f 100644 --- a/src/rustc/middle/typeck/rscope.rs +++ b/src/rustc/middle/typeck/rscope.rs @@ -2,21 +2,21 @@ use result::Result; use syntax::parse::token::special_idents; trait region_scope { - fn anon_region(span: span) -> Result; - fn self_region(span: span) -> Result; - fn named_region(span: span, id: ast::ident) -> Result; + fn anon_region(span: span) -> Result; + fn self_region(span: span) -> Result; + fn named_region(span: span, id: ast::ident) -> Result; } enum empty_rscope { empty_rscope } impl empty_rscope: region_scope { - fn anon_region(_span: span) -> Result { + fn anon_region(_span: span) -> Result { result::Ok(ty::re_static) } - fn self_region(_span: span) -> Result { + fn self_region(_span: span) -> Result { result::Err(~"only the static region is allowed here") } fn named_region(_span: span, _id: ast::ident) - -> Result + -> Result { result::Err(~"only the static region is allowed here") } @@ -24,17 +24,17 @@ impl empty_rscope: region_scope { enum type_rscope = Option; impl type_rscope: region_scope { - fn anon_region(_span: span) -> Result { + fn anon_region(_span: span) -> Result { match *self { Some(_) => result::Ok(ty::re_bound(ty::br_self)), None => result::Err(~"to use region types here, the containing \ type must be declared with a region bound") } } - fn self_region(span: span) -> Result { + fn self_region(span: span) -> Result { self.anon_region(span) } - fn named_region(span: span, id: ast::ident) -> Result { + fn named_region(span: span, id: ast::ident) -> Result { do empty_rscope.named_region(span, id).chain_err |_e| { result::Err(~"named regions other than `self` are not \ allowed as part of a type declaration") @@ -42,26 +42,26 @@ impl type_rscope: region_scope { } } -fn bound_self_region(rp: Option) -> Option { +fn bound_self_region(rp: Option) -> Option { match rp { Some(_) => Some(ty::re_bound(ty::br_self)), None => None } } -enum anon_rscope = {anon: ty::region, base: region_scope}; -fn in_anon_rscope(self: RS, r: ty::region) +enum anon_rscope = {anon: ty::Region, base: region_scope}; +fn in_anon_rscope(self: RS, r: ty::Region) -> @anon_rscope { @anon_rscope({anon: r, base: self as region_scope}) } impl @anon_rscope: region_scope { - fn anon_region(_span: span) -> Result { + fn anon_region(_span: span) -> Result { result::Ok(self.anon) } - fn self_region(span: span) -> Result { + fn self_region(span: span) -> Result { self.base.self_region(span) } - fn named_region(span: span, id: ast::ident) -> Result { + fn named_region(span: span, id: ast::ident) -> Result { self.base.named_region(span, id) } } @@ -76,15 +76,15 @@ fn in_binding_rscope(self: RS) @binding_rscope { base: base, anon_bindings: 0 } } impl @binding_rscope: region_scope { - fn anon_region(_span: span) -> Result { + fn anon_region(_span: span) -> Result { let idx = self.anon_bindings; self.anon_bindings += 1; result::Ok(ty::re_bound(ty::br_anon(idx))) } - fn self_region(span: span) -> Result { + fn self_region(span: span) -> Result { self.base.self_region(span) } - fn named_region(span: span, id: ast::ident) -> Result { + fn named_region(span: span, id: ast::ident) -> Result { do self.base.named_region(span, id).chain_err |_e| { result::Ok(ty::re_bound(ty::br_named(id))) } diff --git a/src/rustc/util/common.rs b/src/rustc/util/common.rs index e314a12a6765b..0c6ec267da812 100644 --- a/src/rustc/util/common.rs +++ b/src/rustc/util/common.rs @@ -1,6 +1,5 @@ use std::map::HashMap; use syntax::ast; -use ast::{ty, pat}; use syntax::codemap::{span}; use syntax::visit; use syntax::print; diff --git a/src/rustc/util/ppaux.rs b/src/rustc/util/ppaux.rs index 3f8ca0f6e6a64..27ace283fa0ce 100644 --- a/src/rustc/util/ppaux.rs +++ b/src/rustc/util/ppaux.rs @@ -6,7 +6,7 @@ use middle::ty::{bound_copy, bound_const, bound_owned, bound_send, use middle::ty::{bound_region, br_anon, br_named, br_self, br_cap_avoid}; use middle::ty::{ck_block, ck_box, ck_uniq, ctxt, field, method}; use middle::ty::{mt, t, param_bound}; -use middle::ty::{re_bound, re_free, re_scope, re_var, re_static, region}; +use middle::ty::{re_bound, re_free, re_scope, re_var, re_static, Region}; use middle::ty::{ty_bool, ty_bot, ty_box, ty_class, ty_enum}; use middle::ty::{ty_estr, ty_evec, ty_float, ty_fn, ty_trait, ty_int}; use middle::ty::{ty_nil, ty_opaque_box, ty_opaque_closure_ptr, ty_param}; @@ -21,11 +21,10 @@ use syntax::print::pprust::{path_to_str, proto_to_str, mode_to_str, purity_to_str}; use syntax::{ast, ast_util}; use syntax::ast_map; -use driver::session::session; fn note_and_explain_region(cx: ctxt, prefix: ~str, - region: ty::region, + region: ty::Region, suffix: ~str) { match explain_region_and_span(cx, region) { (str, Some(span)) => { @@ -42,13 +41,13 @@ fn note_and_explain_region(cx: ctxt, /// Returns a string like "the block at 27:31" that attempts to explain a /// lifetime in a way it might plausibly be understood. -fn explain_region(cx: ctxt, region: ty::region) -> ~str { +fn explain_region(cx: ctxt, region: ty::Region) -> ~str { let (res, _) = explain_region_and_span(cx, region); return res; } -fn explain_region_and_span(cx: ctxt, region: ty::region) +fn explain_region_and_span(cx: ctxt, region: ty::Region) -> (~str, Option) { return match region { @@ -172,7 +171,7 @@ fn re_scope_id_to_str(cx: ctxt, node_id: ast::node_id) -> ~str { // In general, if you are giving a region error message, // you should use `explain_region()` or, better yet, // `note_and_explain_region()` -fn region_to_str(cx: ctxt, region: region) -> ~str { +fn region_to_str(cx: ctxt, region: Region) -> ~str { if cx.sess.verbose() { return fmt!("&%?", region); } @@ -381,7 +380,7 @@ fn ty_to_str(cx: ctxt, typ: t) -> ~str { fn parameterized(cx: ctxt, base: ~str, - self_r: Option, + self_r: Option, tps: ~[ty::t]) -> ~str { let r_str = match self_r { diff --git a/src/rustdoc/astsrv.rs b/src/rustdoc/astsrv.rs index 27d4d51a010f3..7b2c6fe5f0cbc 100644 --- a/src/rustdoc/astsrv.rs +++ b/src/rustdoc/astsrv.rs @@ -10,7 +10,7 @@ non-sendableness. use std::map::HashMap; use rustc::driver::session; use session::{basic_options, options}; -use session::session; +use session::Session; use rustc::driver::driver; use syntax::diagnostic; use syntax::diagnostic::handler; @@ -35,7 +35,7 @@ type Ctxt = { type SrvOwner = fn(srv: Srv) -> T; type CtxtHandler = fn~(ctxt: Ctxt) -> T; -type Parser = fn~(session, ~str) -> @ast::crate; +type Parser = fn~(Session, ~str) -> @ast::crate; enum Msg { HandleRequest(fn~(Ctxt)), @@ -101,7 +101,7 @@ fn exec( comm::recv(po) } -fn build_ctxt(sess: session, +fn build_ctxt(sess: Session, ast: @ast::crate) -> Ctxt { use rustc::front::config; @@ -118,7 +118,7 @@ fn build_ctxt(sess: session, } } -fn build_session() -> session { +fn build_session() -> Session { let sopts: @options = basic_options(); let codemap = codemap::new_codemap(); let error_handlers = build_error_handlers(codemap); @@ -137,7 +137,7 @@ type ErrorHandlers = { // Build a custom error handler that will allow us to ignore non-fatal // errors fn build_error_handlers( - codemap: codemap::codemap + codemap: codemap::CodeMap ) -> ErrorHandlers { type DiagnosticHandler = { @@ -156,13 +156,13 @@ fn build_error_handlers( fn note(msg: &str) { self.inner.note(msg) } fn bug(msg: &str) -> ! { self.inner.bug(msg) } fn unimpl(msg: &str) -> ! { self.inner.unimpl(msg) } - fn emit(cmsp: Option<(codemap::codemap, codemap::span)>, + fn emit(cmsp: Option<(codemap::CodeMap, codemap::span)>, msg: &str, lvl: diagnostic::level) { self.inner.emit(cmsp, msg, lvl) } } - let emitter = fn@(cmsp: Option<(codemap::codemap, codemap::span)>, + let emitter = fn@(cmsp: Option<(codemap::CodeMap, codemap::span)>, msg: &str, lvl: diagnostic::level) { diagnostic::emit(cmsp, msg, lvl); }; diff --git a/src/rustdoc/parse.rs b/src/rustdoc/parse.rs index 59d64f18d59af..7fc17dfe83861 100644 --- a/src/rustdoc/parse.rs +++ b/src/rustdoc/parse.rs @@ -20,16 +20,16 @@ fn from_str(source: ~str) -> @ast::crate { ~"-", @source, ~[], parse::new_parse_sess(None)) } -fn from_file_sess(sess: session::session, file: &Path) -> @ast::crate { +fn from_file_sess(sess: session::Session, file: &Path) -> @ast::crate { parse::parse_crate_from_file( file, cfg(sess, file_input(*file)), sess.parse_sess) } -fn from_str_sess(sess: session::session, source: ~str) -> @ast::crate { +fn from_str_sess(sess: session::Session, source: ~str) -> @ast::crate { parse::parse_crate_from_source_str( ~"-", @source, cfg(sess, str_input(source)), sess.parse_sess) } -fn cfg(sess: session::session, input: driver::input) -> ast::crate_cfg { +fn cfg(sess: session::Session, input: driver::input) -> ast::crate_cfg { driver::default_configuration(sess, ~"rustdoc", input) } diff --git a/src/test/run-pass/issue-2930.rs b/src/test/run-pass/issue-2930.rs index c480d382adc03..bccaeeaf18ddc 100644 --- a/src/test/run-pass/issue-2930.rs +++ b/src/test/run-pass/issue-2930.rs @@ -1,6 +1,6 @@ proto! stream ( - stream:send { - send(T) -> stream + Stream:send { + send(T) -> Stream } ) diff --git a/src/test/run-pass/pipe-select.rs b/src/test/run-pass/pipe-select.rs index 627cdbee9cabd..23588de2ecafc 100644 --- a/src/test/run-pass/pipe-select.rs +++ b/src/test/run-pass/pipe-select.rs @@ -14,8 +14,8 @@ proto! oneshot ( ) proto! stream ( - stream:send { - send(T) -> stream + Stream:send { + send(T) -> Stream } ) From 7d845056540237e54bf8f02ab8f5e477f71a7edd Mon Sep 17 00:00:00 2001 From: Tim Chevalier Date: Mon, 15 Oct 2012 15:29:45 -0700 Subject: [PATCH 14/40] Error out in resolve if structs try to capture type parameters Closes #3214 --- src/rustc/middle/resolve.rs | 4 ++-- src/test/compile-fail/issue-3214.rs | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/rustc/middle/resolve.rs b/src/rustc/middle/resolve.rs index 81ea6daf19581..6f7230fe4c1aa 100644 --- a/src/rustc/middle/resolve.rs +++ b/src/rustc/middle/resolve.rs @@ -3522,8 +3522,8 @@ impl Resolver { let outer_type_parameter_count = (*type_parameters).len(); let borrowed_type_parameters: &~[ty_param] = &*type_parameters; do self.with_type_parameter_rib(HasTypeParameters - (borrowed_type_parameters, id, 0u, - NormalRibKind)) { + (borrowed_type_parameters, id, 0, + OpaqueFunctionRibKind)) { // Resolve the type parameters. self.resolve_type_parameters(*type_parameters, visitor); diff --git a/src/test/compile-fail/issue-3214.rs b/src/test/compile-fail/issue-3214.rs index 3c783b99232d9..7008f8c4ce293 100644 --- a/src/test/compile-fail/issue-3214.rs +++ b/src/test/compile-fail/issue-3214.rs @@ -1,7 +1,7 @@ -// xfail-test fn foo() { struct foo { - mut x: T, //~ ERROR quux + mut x: T, //~ ERROR attempt to use a type argument out of scope + //~^ ERROR use of undeclared type name drop { } } } From d29328617da4704063fca25792bda6811e18ad1b Mon Sep 17 00:00:00 2001 From: Tim Chevalier Date: Mon, 15 Oct 2012 17:09:05 -0700 Subject: [PATCH 15/40] Add test case for Issue 2895 --- src/test/run-pass/issue-2895.rs | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 src/test/run-pass/issue-2895.rs diff --git a/src/test/run-pass/issue-2895.rs b/src/test/run-pass/issue-2895.rs new file mode 100644 index 0000000000000..b51e6360e524e --- /dev/null +++ b/src/test/run-pass/issue-2895.rs @@ -0,0 +1,16 @@ +use sys::size_of; +extern mod std; + +struct Cat { + x: int +} + +struct Kitty { + x: int, + drop {} +} + +fn main() { + assert (size_of::() == 8 as uint); + assert (size_of::() == 16 as uint); +} From bbc46d527d70ac0bcab6c2b0763f059efc27a142 Mon Sep 17 00:00:00 2001 From: Tim Chevalier Date: Mon, 15 Oct 2012 17:12:42 -0700 Subject: [PATCH 16/40] Add test for Issue 2823 --- src/test/compile-fail/issue-2823.rs | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 src/test/compile-fail/issue-2823.rs diff --git a/src/test/compile-fail/issue-2823.rs b/src/test/compile-fail/issue-2823.rs new file mode 100644 index 0000000000000..fd2c17584581c --- /dev/null +++ b/src/test/compile-fail/issue-2823.rs @@ -0,0 +1,12 @@ +struct C { + x: int, + drop { + #error("dropping: %?", self.x); + } +} + +fn main() { + let c = C{ x: 2}; + let d = copy c; //~ ERROR copying a noncopyable value + #error("%?", d.x); +} \ No newline at end of file From b38092e9a2b5965ab8b8e73580e4d39ed701330a Mon Sep 17 00:00:00 2001 From: Tim Chevalier Date: Mon, 15 Oct 2012 17:18:45 -0700 Subject: [PATCH 17/40] In ty::normalize_ty, don't replace self_regions with None Instead, replace with re_static. This was causing ty::subst to fail when called from trans::type_of::type_of. Already discussed with nmatsakis and it's a small change, so no review. Closes #3447 --- src/rustc/middle/ty.rs | 14 ++++++++++---- src/test/run-pass/issue-3447.rs | 5 ++--- 2 files changed, 12 insertions(+), 7 deletions(-) diff --git a/src/rustc/middle/ty.rs b/src/rustc/middle/ty.rs index 84510c7161e39..90527e88bc850 100644 --- a/src/rustc/middle/ty.rs +++ b/src/rustc/middle/ty.rs @@ -1473,7 +1473,10 @@ fn subst(cx: ctxt, fold_regions_and_ty( cx, typ, |r| match r { - re_bound(br_self) => substs.self_r.get(), + re_bound(br_self) => substs.self_r.expect( + #fmt("ty::subst: \ + Reference to self region when given substs with no \ + self region, ty = %s", ty_to_str(cx, typ))), _ => r }, |t| do_subst(cx, substs, t), @@ -3910,9 +3913,11 @@ fn normalize_ty(cx: ctxt, t: t) -> t { ty_enum(did, r) => match r.self_r { Some(_) => - // This enum has a self region. Get rid of it + // Use re_static since trans doesn't care about regions mk_enum(cx, did, - {self_r: None, self_ty: None, tps: r.tps}), + {self_r: Some(ty::re_static), + self_ty: None, + tps: r.tps}), None => t }, @@ -3921,7 +3926,8 @@ fn normalize_ty(cx: ctxt, t: t) -> t { match r.self_r { Some(_) => // Ditto. - mk_class(cx, did, {self_r: None, self_ty: None, tps: r.tps}), + mk_class(cx, did, {self_r: Some(ty::re_static), self_ty: None, + tps: r.tps}), None => t }, diff --git a/src/test/run-pass/issue-3447.rs b/src/test/run-pass/issue-3447.rs index fab92ea6dcad5..7302163a312c4 100644 --- a/src/test/run-pass/issue-3447.rs +++ b/src/test/run-pass/issue-3447.rs @@ -1,4 +1,3 @@ -// xfail-test struct list { element: &self/T, mut next: Option<@list> @@ -11,13 +10,13 @@ impl list{ next: option::None }; - self.next = Some(@newList); + self.next = Some(@(move newList)); } } fn main() { let s = @"str"; - let ls: list<@str> = list { + let ls = list { element: &s, next: option::None }; From 4dc67c5e6af76eab83b93e5b494d3073d78229db Mon Sep 17 00:00:00 2001 From: Patrick Walton Date: Mon, 15 Oct 2012 18:04:15 -0700 Subject: [PATCH 18/40] rustc: Implement intra-crate static methods on anonymous trait implementations. --- src/rustc/middle/resolve.rs | 406 +++++++++++++----- src/test/run-pass/anon-trait-static-method.rs | 15 + 2 files changed, 312 insertions(+), 109 deletions(-) create mode 100644 src/test/run-pass/anon-trait-static-method.rs diff --git a/src/rustc/middle/resolve.rs b/src/rustc/middle/resolve.rs index 6f7230fe4c1aa..f522cca7a9957 100644 --- a/src/rustc/middle/resolve.rs +++ b/src/rustc/middle/resolve.rs @@ -9,9 +9,8 @@ use syntax::ast::{_mod, add, arm}; use syntax::ast::{bind_by_ref, bind_by_implicit_ref, bind_by_value}; use syntax::ast::{bitand, bitor, bitxor}; use syntax::ast::{blk, bound_const, bound_copy, bound_owned, bound_send}; -use syntax::ast::{bound_trait, binding_mode, - capture_clause, class_ctor, class_dtor}; -use syntax::ast::{crate, crate_num, decl_item}; +use syntax::ast::{bound_trait, binding_mode, capture_clause, class_ctor}; +use syntax::ast::{class_dtor, crate, crate_num, decl_item}; use syntax::ast::{def, def_arg, def_binding, def_class, def_const, def_fn}; use syntax::ast::{def_foreign_mod, def_id, def_label, def_local, def_mod}; use syntax::ast::{def_prim_ty, def_region, def_self, def_ty, def_ty_param}; @@ -294,6 +293,35 @@ enum EnumVariantOrConstResolution { EnumVariantOrConstNotFound } +// Specifies how duplicates should be handled when adding a child item if +// another item exists with the same name in some namespace. +enum DuplicateCheckingMode { + ForbidDuplicateModules, + ForbidDuplicateTypes, + ForbidDuplicateValues, + ForbidDuplicateTypesAndValues, + OverwriteDuplicates +} + +impl DuplicateCheckingMode : cmp::Eq { + pure fn eq(other: &DuplicateCheckingMode) -> bool { + (self as uint) == (*other as uint) + } + pure fn ne(other: &DuplicateCheckingMode) -> bool { !self.eq(other) } +} + +// Returns the namespace associated with the given duplicate checking mode, +// or fails for OverwriteDuplicates. This is used for error messages. +fn namespace_for_duplicate_checking_mode(mode: DuplicateCheckingMode) -> + Namespace { + match mode { + ForbidDuplicateModules | ForbidDuplicateTypes | + ForbidDuplicateTypesAndValues => TypeNS, + ForbidDuplicateValues => ValueNS, + OverwriteDuplicates => fail ~"OverwriteDuplicates has no namespace" + } +} + /// One local scope. struct Rib { bindings: HashMap, @@ -490,9 +518,10 @@ impl Privacy : cmp::Eq { } // Records a possibly-private type definition. -enum TypeNsDef { - ModuleDef(Privacy, @Module), - TypeDef(Privacy, def) +struct TypeNsDef { + mut privacy: Privacy, + mut module_def: Option<@Module>, + mut type_def: Option } // Records a possibly-private value definition. @@ -508,7 +537,7 @@ struct NameBindings { mut value_def: Option, //< Meaning in value namespace. // For error reporting - // XXX: Merge me into TypeDef and ValueDef. + // FIXME (#3783): Merge me into TypeNsDef and ValueNsDef. mut type_span: Option, mut value_span: Option, } @@ -521,16 +550,46 @@ impl NameBindings { def_id: Option, legacy_exports: bool, sp: span) { - if self.type_def.is_none() { - let module_ = @Module(parent_link, def_id, legacy_exports); - self.type_def = Some(ModuleDef(privacy, module_)); - self.type_span = Some(sp); + // Merges the module with the existing type def or creates a new one. + let module_ = @Module(parent_link, def_id, legacy_exports); + match self.type_def { + None => { + self.type_def = Some(TypeNsDef { + privacy: privacy, + module_def: Some(module_), + type_def: None + }); + } + Some(copy type_def) => { + self.type_def = Some(TypeNsDef { + privacy: privacy, + module_def: Some(module_), + .. type_def + }); + } } + self.type_span = Some(sp); } /// Records a type definition. fn define_type(privacy: Privacy, def: def, sp: span) { - self.type_def = Some(TypeDef(privacy, def)); + // Merges the type with the existing type def or creates a new one. + match self.type_def { + None => { + self.type_def = Some(TypeNsDef { + privacy: privacy, + module_def: None, + type_def: Some(def) + }); + } + Some(copy type_def) => { + self.type_def = Some(TypeNsDef { + privacy: privacy, + type_def: Some(def), + .. type_def + }); + } + } self.type_span = Some(sp); } @@ -543,8 +602,8 @@ impl NameBindings { /// Returns the module node if applicable. fn get_module_if_available() -> Option<@Module> { match self.type_def { - Some(ModuleDef(_, module_)) => return Some(module_), - None | Some(TypeDef(_, _)) => return None, + Some(type_def) => type_def.module_def, + None => None } } @@ -553,12 +612,12 @@ impl NameBindings { * definition. */ fn get_module() -> @Module { - match self.type_def { - None | Some(TypeDef(*)) => { + match self.get_module_if_available() { + None => { fail ~"get_module called on a node with no module \ definition!" } - Some(ModuleDef(_, module_)) => module_ + Some(module_def) => module_def } } @@ -574,10 +633,23 @@ impl NameBindings { TypeNS => { match self.type_def { None => None, - Some(ModuleDef(_, module_)) => { - module_.def_id.map(|def_id| def_mod(*def_id)) + Some(type_def) => { + // FIXME (#3784): This is reallllly questionable. + // Perhaps the right thing to do is to merge def_mod + // and def_ty. + match type_def.type_def { + Some(type_def) => Some(type_def), + None => { + match type_def.module_def { + Some(module_def) => { + module_def.def_id.map(|def_id| + def_mod(*def_id)) + } + None => None + } + } + } } - Some(TypeDef(_, def)) => Some(def) } } ValueNS => { @@ -594,8 +666,7 @@ impl NameBindings { TypeNS => { match self.type_def { None => None, - Some(ModuleDef(privacy, _)) | Some(TypeDef(privacy, _)) => - Some(privacy) + Some(type_def) => Some(type_def.privacy) } } ValueNS => { @@ -882,9 +953,7 @@ impl Resolver { */ fn add_child(name: ident, reduced_graph_parent: ReducedGraphParent, - // Pass in the namespaces for the child item so that we can - // check for duplicate items in the same namespace - ns: ~[Namespace], + duplicate_checking_mode: DuplicateCheckingMode, // For printing errors sp: span) -> (@NameBindings, ReducedGraphParent) { @@ -904,29 +973,67 @@ impl Resolver { let new_parent = ModuleReducedGraphParent(module_); match module_.children.find(name) { None => { - let child = @NameBindings(); - module_.children.insert(name, child); - return (child, new_parent); + let child = @NameBindings(); + module_.children.insert(name, child); + return (child, new_parent); } Some(child) => { - // We don't want to complain if the multiple definitions - // are in different namespaces. - match ns.find(|n| child.defined_in_namespace(n)) { - Some(ns) => { - self.session.span_err(sp, - fmt!("Duplicate definition of %s %s", - namespace_to_str(ns), - self.session.str_of(name))); - do child.span_for_namespace(ns).iter() |sp| { - self.session.span_note(*sp, - fmt!("First definition of %s %s here:", - namespace_to_str(ns), - self.session.str_of(name))); - } + // Enforce the duplicate checking mode. If we're requesting + // duplicate module checking, check that there isn't a module + // in the module with the same name. If we're requesting + // duplicate type checking, check that there isn't a type in + // the module with the same name. If we're requesting + // duplicate value checking, check that there isn't a value in + // the module with the same name. If we're requesting + // duplicate type checking and duplicate value checking, check + // that there isn't a duplicate type and a duplicate value + // with the same name. If no duplicate checking was requested + // at all, do nothing. + + let mut is_duplicate = false; + match duplicate_checking_mode { + ForbidDuplicateModules => { + is_duplicate = + child.get_module_if_available().is_some(); + } + ForbidDuplicateTypes => { + match child.def_for_namespace(TypeNS) { + Some(def_mod(_)) | None => {} + Some(_) => is_duplicate = true + } + } + ForbidDuplicateValues => { + is_duplicate = child.defined_in_namespace(ValueNS); + } + ForbidDuplicateTypesAndValues => { + match child.def_for_namespace(TypeNS) { + Some(def_mod(_)) | None => {} + Some(_) => is_duplicate = true + }; + if child.defined_in_namespace(ValueNS) { + is_duplicate = true; + } + } + OverwriteDuplicates => {} + } + if duplicate_checking_mode != OverwriteDuplicates && + is_duplicate { + // Return an error here by looking up the namespace that + // had the duplicate. + let ns = namespace_for_duplicate_checking_mode( + duplicate_checking_mode); + self.session.span_err(sp, + fmt!("duplicate definition of %s %s", + namespace_to_str(ns), + self.session.str_of(name))); + do child.span_for_namespace(ns).iter() |sp| { + self.session.span_note(*sp, + fmt!("first definition of %s %s here:", + namespace_to_str(ns), + self.session.str_of(name))); + } } - _ => {} - } - return (child, new_parent); + return (child, new_parent); } } } @@ -987,7 +1094,7 @@ impl Resolver { item_mod(module_) => { let legacy = has_legacy_export_attr(item.attrs); let (name_bindings, new_parent) = - self.add_child(ident, parent, ~[TypeNS], sp); + self.add_child(ident, parent, ForbidDuplicateModules, sp); let parent_link = self.get_parent_link(new_parent, ident); let def_id = { crate: 0, node: item.id }; @@ -999,12 +1106,14 @@ impl Resolver { visit_mod(module_, sp, item.id, new_parent, visitor); } + item_foreign_mod(fm) => { let legacy = has_legacy_export_attr(item.attrs); let new_parent = match fm.sort { named => { let (name_bindings, new_parent) = - self.add_child(ident, parent, ~[TypeNS], sp); + self.add_child(ident, parent, + ForbidDuplicateModules, sp); let parent_link = self.get_parent_link(new_parent, ident); @@ -1028,15 +1137,15 @@ impl Resolver { // These items live in the value namespace. item_const(*) => { - let (name_bindings, _) = self.add_child(ident, parent, - ~[ValueNS], sp); + let (name_bindings, _) = + self.add_child(ident, parent, ForbidDuplicateValues, sp); (*name_bindings).define_value (privacy, def_const(local_def(item.id)), sp); } item_fn(_, purity, _, _) => { - let (name_bindings, new_parent) = self.add_child(ident, parent, - ~[ValueNS], sp); + let (name_bindings, new_parent) = + self.add_child(ident, parent, ForbidDuplicateValues, sp); let def = def_fn(local_def(item.id), purity); (*name_bindings).define_value(privacy, def, sp); @@ -1045,17 +1154,16 @@ impl Resolver { // These items live in the type namespace. item_ty(*) => { - let (name_bindings, _) = self.add_child(ident, parent, - ~[TypeNS], sp); + let (name_bindings, _) = + self.add_child(ident, parent, ForbidDuplicateTypes, sp); (*name_bindings).define_type (privacy, def_ty(local_def(item.id)), sp); } item_enum(enum_definition, _) => { - - let (name_bindings, new_parent) = self.add_child(ident, parent, - ~[TypeNS], sp); + let (name_bindings, new_parent) = + self.add_child(ident, parent, ForbidDuplicateTypes, sp); (*name_bindings).define_type (privacy, def_ty(local_def(item.id)), sp); @@ -1072,7 +1180,7 @@ impl Resolver { // These items live in both the type and value namespaces. item_class(*) => { let (name_bindings, new_parent) = - self.add_child(ident, parent, ~[TypeNS], sp); + self.add_child(ident, parent, ForbidDuplicateTypes, sp); (*name_bindings).define_type (privacy, def_ty(local_def(item.id)), sp); @@ -1083,13 +1191,75 @@ impl Resolver { visit_item(item, new_parent, visitor); } - item_impl(*) => { + item_impl(_, trait_ref_opt, ty, methods) => { + // If this implements an anonymous trait and it has static + // methods, then add all the static methods within to a new + // module, if the type was defined within this module. + // + // FIXME (#3785): This is quite unsatisfactory. Perhaps we + // should modify anonymous traits to only be implementable in + // the same module that declared the type. + + // Bail out early if there are no static methods. + let mut has_static_methods = false; + for methods.each |method| { + match method.self_ty.node { + sty_static => has_static_methods = true, + _ => {} + } + } + + // If there are static methods, then create the module + // and add them. + match (trait_ref_opt, ty) { + (None, @{ id: _, node: ty_path(path, _), span: _ }) if + has_static_methods && path.idents.len() == 1 => { + // Create the module. + let name = path_to_ident(path); + let (name_bindings, new_parent) = + self.add_child(name, + parent, + ForbidDuplicateModules, + sp); + + let parent_link = self.get_parent_link(new_parent, + ident); + let def_id = local_def(item.id); + name_bindings.define_module(privacy, parent_link, + Some(def_id), false, sp); + + let new_parent = ModuleReducedGraphParent( + name_bindings.get_module()); + + // For each static method... + for methods.each |method| { + match method.self_ty.node { + sty_static => { + // Add the static method to the module. + let ident = method.ident; + let (method_name_bindings, _) = + self.add_child(ident, + new_parent, + ForbidDuplicateValues, + method.span); + let def = def_fn(local_def(method.id), + method.purity); + method_name_bindings.define_value( + Public, def, method.span); + } + _ => {} + } + } + } + _ => {} + } + visit_item(item, parent, visitor); } item_trait(_, _, methods) => { - let (name_bindings, new_parent) = self.add_child(ident, parent, - ~[TypeNS], sp); + let (name_bindings, new_parent) = + self.add_child(ident, parent, ForbidDuplicateTypes, sp); // Add the names of all the methods to the trait info. let method_names = @HashMap(); @@ -1103,8 +1273,8 @@ impl Resolver { sty_static => { // which parent to use?? let (method_name_bindings, _) = - self.add_child(ident, new_parent, ~[ValueNS], - ty_m.span); + self.add_child(ident, new_parent, + ForbidDuplicateValues, ty_m.span); let def = def_static_method(local_def(ty_m.id), local_def(item.id), ty_m.purity); @@ -1142,7 +1312,7 @@ impl Resolver { &&visitor: vt) { let ident = variant.node.name; - let (child, _) = self.add_child(ident, parent, ~[ValueNS], + let (child, _) = self.add_child(ident, parent, ForbidDuplicateValues, variant.span); let privacy; @@ -1324,7 +1494,7 @@ impl Resolver { match find_use_stmt_cnum(self.session.cstore, node_id) { Some(crate_id) => { let (child_name_bindings, new_parent) = - self.add_child(name, parent, ~[TypeNS], + self.add_child(name, parent, ForbidDuplicateTypes, view_item.span); let def_id = { crate: crate_id, node: 0 }; @@ -1355,7 +1525,8 @@ impl Resolver { let name = foreign_item.ident; let (name_bindings, new_parent) = - self.add_child(name, parent, ~[ValueNS], foreign_item.span); + self.add_child(name, parent, ForbidDuplicateValues, + foreign_item.span); match foreign_item.node { foreign_item_fn(_, purity, type_parameters) => { @@ -1408,7 +1579,13 @@ impl Resolver { match def { def_mod(def_id) | def_foreign_mod(def_id) => { match copy child_name_bindings.type_def { - None => { + Some(TypeNsDef { module_def: Some(copy module_def), _ }) => { + debug!("(building reduced graph for external crate) \ + already created module"); + module_def.def_id = Some(def_id); + modules.insert(def_id, module_def); + } + Some(_) | None => { debug!("(building reduced graph for \ external crate) building module \ %s", final_ident); @@ -1451,16 +1628,6 @@ impl Resolver { } } } - Some(ModuleDef(_, module_)) => { - debug!("(building reduced graph for \ - external crate) already created \ - module"); - module_.def_id = Some(def_id); - modules.insert(def_id, module_); - } - Some(TypeDef(*)) => { - self.session.bug(~"external module def overwriting type def"); - } } } def_fn(*) | def_static_method(*) | def_const(*) | @@ -1476,8 +1643,7 @@ impl Resolver { // If this is a trait, add all the method names // to the trait info. - match get_method_names_if_trait(self.session.cstore, - def_id) { + match get_method_names_if_trait(self.session.cstore, def_id) { None => { // Nothing to do. } @@ -1547,8 +1713,8 @@ impl Resolver { let (child_name_bindings, new_parent) = self.add_child(ident, ModuleReducedGraphParent(current_module), - // May want a better span - ~[], dummy_sp()); + OverwriteDuplicates, + dummy_sp()); // Define or reuse the module node. match child_name_bindings.type_def { @@ -1572,7 +1738,8 @@ impl Resolver { let (child_name_bindings, new_parent) = self.add_child(final_ident, ModuleReducedGraphParent(current_module), - ~[], dummy_sp()); + OverwriteDuplicates, + dummy_sp()); match path_entry.def_like { dl_def(def) => { @@ -1582,12 +1749,12 @@ impl Resolver { final_ident, new_parent); } dl_impl(_) => { - // Because of the infelicitous way the metadata is - // written, we can't process this impl now. We'll get it - // later. - + // We only process static methods of impls here. debug!("(building reduced graph for external crate) \ - ignoring impl %s", final_ident_str); + processing impl %s", final_ident_str); + + // FIXME (#3786): Cross-crate static methods in anonymous + // traits. } dl_field => { debug!("(building reduced graph for external crate) \ @@ -2310,18 +2477,34 @@ impl Resolver { return Indeterminate; } Success(target) => { + // Check to see whether there are type bindings, and, if + // so, whether there is a module within. match target.bindings.type_def { - None | Some(TypeDef(*)) => { - // Not a module. + Some(copy type_def) => { + match type_def.module_def { + None => { + // Not a module. + self.session.span_err(span, + fmt!("not a \ + module: %s", + self.session. + str_of( + name))); + return Failed; + } + Some(copy module_def) => { + search_module = module_def; + } + } + } + None => { + // There are no type bindings at all. self.session.span_err(span, fmt!("not a module: %s", - self.session. - str_of(name))); + self.session.str_of( + name))); return Failed; } - Some(ModuleDef(_, copy module_)) => { - search_module = module_; - } } } } @@ -2469,14 +2652,24 @@ impl Resolver { match self.resolve_item_in_lexical_scope(module_, name, TypeNS) { Success(target) => { match target.bindings.type_def { - None | Some(TypeDef(*)) => { + Some(type_def) => { + match type_def.module_def { + None => { + error!("!!! (resolving module in lexical \ + scope) module wasn't actually a \ + module!"); + return Failed; + } + Some(module_def) => { + return Success(module_def); + } + } + } + None => { error!("!!! (resolving module in lexical scope) module wasn't actually a module!"); return Failed; } - Some(ModuleDef(_, module_)) => { - return Success(module_); - } } } Indeterminate => { @@ -3403,7 +3596,6 @@ impl Resolver { self_binding: SelfBinding, capture_clause: CaptureClause, visitor: ResolveVisitor) { - // Check each element of the capture clause. match capture_clause { NoCaptureClause => { @@ -3495,7 +3687,6 @@ impl Resolver { fn resolve_type_parameters(type_parameters: ~[ty_param], visitor: ResolveVisitor) { - for type_parameters.each |type_parameter| { for type_parameter.bounds.each |bound| { match *bound { @@ -3517,7 +3708,6 @@ impl Resolver { methods: ~[@method], optional_destructor: Option, visitor: ResolveVisitor) { - // If applicable, create a rib for the type parameters. let outer_type_parameter_count = (*type_parameters).len(); let borrowed_type_parameters: &~[ty_param] = &*type_parameters; @@ -3619,23 +3809,21 @@ impl Resolver { self_type: @Ty, methods: ~[@method], visitor: ResolveVisitor) { - // If applicable, create a rib for the type parameters. let outer_type_parameter_count = type_parameters.len(); let borrowed_type_parameters: &~[ty_param] = &type_parameters; do self.with_type_parameter_rib(HasTypeParameters (borrowed_type_parameters, id, 0u, NormalRibKind)) { - // Resolve the type parameters. self.resolve_type_parameters(type_parameters, visitor); // Resolve the trait reference, if necessary. let original_trait_refs = self.current_trait_refs; match opt_trait_reference { - Some(trait_reference) => { - let new_trait_refs = @DVec(); - match self.resolve_path( + Some(trait_reference) => { + let new_trait_refs = @DVec(); + match self.resolve_path( trait_reference.path, TypeNS, true, visitor) { None => { self.session.span_err(span, @@ -3649,10 +3837,10 @@ impl Resolver { (*new_trait_refs).push(def_id_of_def(def)); } } - // Record the current set of trait references. - self.current_trait_refs = Some(new_trait_refs); - } - None => () + // Record the current set of trait references. + self.current_trait_refs = Some(new_trait_refs); + } + None => () } // Resolve the self type. diff --git a/src/test/run-pass/anon-trait-static-method.rs b/src/test/run-pass/anon-trait-static-method.rs new file mode 100644 index 0000000000000..6c4e9abc5ff7d --- /dev/null +++ b/src/test/run-pass/anon-trait-static-method.rs @@ -0,0 +1,15 @@ +struct Foo { + x: int +} + +impl Foo { + static fn new() -> Foo { + Foo { x: 3 } + } +} + +fn main() { + let x = Foo::new(); + io::println(x.x.to_str()); +} + From a92c3db0b34905e7a828bb72931b791818a02b6b Mon Sep 17 00:00:00 2001 From: Niko Matsakis Date: Tue, 16 Oct 2012 20:19:34 -0700 Subject: [PATCH 19/40] add missing pub from multifile example in sec 12.3 --- doc/tutorial.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/tutorial.md b/doc/tutorial.md index 8746cf026f9ec..645e150b40a77 100644 --- a/doc/tutorial.md +++ b/doc/tutorial.md @@ -2360,7 +2360,7 @@ these two files: ~~~~ // world.rs #[link(name = "world", vers = "1.0")]; -fn explore() -> ~str { ~"world" } +pub fn explore() -> ~str { ~"world" } ~~~~ ~~~~ {.xfail-test} From 1679960889498d389af1869429b30a7a4360a929 Mon Sep 17 00:00:00 2001 From: Tim Chevalier Date: Tue, 16 Oct 2012 22:13:41 -0700 Subject: [PATCH 20/40] Change a use of map::get to map::find --- src/rustc/middle/region.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/rustc/middle/region.rs b/src/rustc/middle/region.rs index 5c70cd3e2795e..95280032ae0ab 100644 --- a/src/rustc/middle/region.rs +++ b/src/rustc/middle/region.rs @@ -640,8 +640,8 @@ fn determine_rp_in_ty(ty: @ast::Ty, // that as a direct dependency. match ty.node { ast::ty_path(path, id) => { - match cx.def_map.get(id) { - ast::def_ty(did) | ast::def_class(did) => { + match cx.def_map.find(id) { + Some(ast::def_ty(did)) | Some(ast::def_class(did)) => { if did.crate == ast::local_crate { if cx.opt_region_is_relevant(path.rp) { cx.add_dep(did.node); From 081a0434fb222b3f182ee6be903b6536c85396d7 Mon Sep 17 00:00:00 2001 From: Tim Chevalier Date: Tue, 16 Oct 2012 22:13:55 -0700 Subject: [PATCH 21/40] Remove integer suffixes --- src/rustc/middle/typeck/astconv.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/rustc/middle/typeck/astconv.rs b/src/rustc/middle/typeck/astconv.rs index b8ff637f7dd37..da7a71b3efbf7 100644 --- a/src/rustc/middle/typeck/astconv.rs +++ b/src/rustc/middle/typeck/astconv.rs @@ -148,8 +148,8 @@ fn ast_path_to_ty( return {substs: substs, ty: ty}; } -const NO_REGIONS: uint = 1u; -const NO_TPS: uint = 2u; +const NO_REGIONS: uint = 1; +const NO_TPS: uint = 2; // Parses the programmer's textual representation of a type into our // internal notion of a type. `getter` is a function that returns the type From 47c83f1844d033a2ee12aa05eed30112804f4585 Mon Sep 17 00:00:00 2001 From: Tim Chevalier Date: Wed, 17 Oct 2012 10:39:01 -0700 Subject: [PATCH 22/40] Export std::net_url::encode_component --- src/libstd/net_url.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstd/net_url.rs b/src/libstd/net_url.rs index 109e71a3eaa70..8ea9513d15518 100644 --- a/src/libstd/net_url.rs +++ b/src/libstd/net_url.rs @@ -95,7 +95,7 @@ pub fn encode(s: &str) -> ~str { * This function is compliant with RFC 3986. */ -fn encode_component(s: &str) -> ~str { +pub fn encode_component(s: &str) -> ~str { encode_inner(s, false) } From e94e82cb8ef0491667bc8041d370199aed838f2d Mon Sep 17 00:00:00 2001 From: Ben Striegel Date: Fri, 12 Oct 2012 19:41:16 -0400 Subject: [PATCH 23/40] Extraneous sigil patrol: ~"string literals" --- doc/tutorial.md | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/doc/tutorial.md b/doc/tutorial.md index 645e150b40a77..b411e1232b720 100644 --- a/doc/tutorial.md +++ b/doc/tutorial.md @@ -1502,7 +1502,7 @@ and [`core::str`]. Here are some examples. # fn unwrap_crayon(c: Crayon) -> int { 0 } # fn eat_crayon_wax(i: int) { } # fn store_crayon_in_nasal_cavity(i: uint, c: Crayon) { } -# fn crayon_to_str(c: Crayon) -> ~str { ~"" } +# fn crayon_to_str(c: Crayon) -> &str { "" } let crayons = &[Almond, AntiqueBrass, Apricot]; @@ -1649,11 +1649,11 @@ callers may pass any kind of closure. ~~~~ fn call_twice(f: fn()) { f(); f(); } -call_twice(|| { ~"I am an inferred stack closure"; } ); -call_twice(fn&() { ~"I am also a stack closure"; } ); -call_twice(fn@() { ~"I am a managed closure"; }); -call_twice(fn~() { ~"I am an owned closure"; }); -fn bare_function() { ~"I am a plain function"; } +call_twice(|| { "I am an inferred stack closure"; } ); +call_twice(fn&() { "I am also a stack closure"; } ); +call_twice(fn@() { "I am a managed closure"; }); +call_twice(fn~() { "I am an owned closure"; }); +fn bare_function() { "I am a plain function"; } call_twice(bare_function); ~~~~ @@ -1767,7 +1767,7 @@ And using this function to iterate over a vector: # use println = io::println; each(&[2, 4, 8, 5, 16], |n| { if *n % 2 != 0 { - println(~"found odd number!"); + println("found odd number!"); false } else { true } }); @@ -1784,7 +1784,7 @@ to the next iteration, write `loop`. # use println = io::println; for each(&[2, 4, 8, 5, 16]) |n| { if *n % 2 != 0 { - println(~"found odd number!"); + println("found odd number!"); break; } } @@ -1967,12 +1967,12 @@ impl int: Printable { fn print() { io::println(fmt!("%d", self)) } } -impl ~str: Printable { +impl &str: Printable { fn print() { io::println(self) } } # 1.print(); -# (~"foo").print(); +# ("foo").print(); ~~~~ Methods defined in an implementation of a trait may be called just like @@ -2162,8 +2162,8 @@ additional modules. ~~~~ mod farm { - pub fn chicken() -> ~str { ~"cluck cluck" } - pub fn cow() -> ~str { ~"mooo" } + pub fn chicken() -> &str { "cluck cluck" } + pub fn cow() -> &str { "mooo" } } fn main() { @@ -2360,13 +2360,13 @@ these two files: ~~~~ // world.rs #[link(name = "world", vers = "1.0")]; -pub fn explore() -> ~str { ~"world" } +pub fn explore() -> &str { "world" } ~~~~ ~~~~ {.xfail-test} // main.rs extern mod world; -fn main() { io::println(~"hello " + world::explore()); } +fn main() { io::println("hello " + world::explore()); } ~~~~ Now compile and run like this (adjust to your platform if necessary): From 5e1d0bab8075df5ce06543537296d7294440bd45 Mon Sep 17 00:00:00 2001 From: Ben Striegel Date: Fri, 12 Oct 2012 20:48:45 -0400 Subject: [PATCH 24/40] Sigil patrol: change fn@ fn& fn~ to @fn &fn ~fn This also involves removing references to the old long-form closure syntax, which pcwalton alleges is deprecated and which was never updated for the new forms, e.g. `@fn() {}` is illegal. --- doc/tutorial.md | 36 +++++++++++------------------------- 1 file changed, 11 insertions(+), 25 deletions(-) diff --git a/doc/tutorial.md b/doc/tutorial.md index b411e1232b720..67c9bcd828116 100644 --- a/doc/tutorial.md +++ b/doc/tutorial.md @@ -1569,7 +1569,7 @@ let bloop = |well, oh: mygoodness| -> what_the { fail oh(well) }; ~~~~ There are several forms of closure, each with its own role. The most -common, called a _stack closure_, has type `fn&` and can directly +common, called a _stack closure_, has type `&fn` and can directly access local variables in the enclosing scope. ~~~~ @@ -1591,7 +1591,7 @@ pervasively in Rust code. When you need to store a closure in a data structure, a stack closure will not do, since the compiler will refuse to let you store it. For this purpose, Rust provides a type of closure that has an arbitrary -lifetime, written `fn@` (boxed closure, analogous to the `@` pointer +lifetime, written `@fn` (boxed closure, analogous to the `@` pointer type described earlier). This type of closure *is* first-class. A managed closure does not directly access its environment, but merely @@ -1604,8 +1604,9 @@ returns it from a function, and then calls it: ~~~~ # extern mod std; -fn mk_appender(suffix: ~str) -> fn@(~str) -> ~str { - return fn@(s: ~str) -> ~str { s + suffix }; +fn mk_appender(suffix: ~str) -> @fn(~str) -> ~str { + // The compiler knows that we intend this closure to be of type @fn + return |s| s + suffix; } fn main() { @@ -1614,22 +1615,9 @@ fn main() { } ~~~~ -This example uses the long closure syntax, `fn@(s: ~str) ...`. Using -this syntax makes it explicit that we are declaring a boxed -closure. In practice, boxed closures are usually defined with the -short closure syntax introduced earlier, in which case the compiler -infers the type of closure. Thus our managed closure example could -also be written: - -~~~~ -fn mk_appender(suffix: ~str) -> fn@(~str) -> ~str { - return |s| s + suffix; -} -~~~~ - ## Owned closures -Owned closures, written `fn~` in analogy to the `~` pointer type, +Owned closures, written `~fn` in analogy to the `~` pointer type, hold on to things that can safely be sent between processes. They copy the values they close over, much like managed closures, but they also own them: that is, no other code can access @@ -1649,12 +1637,10 @@ callers may pass any kind of closure. ~~~~ fn call_twice(f: fn()) { f(); f(); } -call_twice(|| { "I am an inferred stack closure"; } ); -call_twice(fn&() { "I am also a stack closure"; } ); -call_twice(fn@() { "I am a managed closure"; }); -call_twice(fn~() { "I am an owned closure"; }); -fn bare_function() { "I am a plain function"; } -call_twice(bare_function); +let closure = || { "I'm a closure, and it doesn't matter what type I am"; }; +fn function() { "I'm a normal function"; } +call_twice(closure); +call_twice(function); ~~~~ > ***Note:*** Both the syntax and the semantics will be changing @@ -1715,7 +1701,7 @@ parentheses, where it looks more like a typical block of code. `do` is a convenient way to create tasks with the `task::spawn` -function. `spawn` has the signature `spawn(fn: fn~())`. In other +function. `spawn` has the signature `spawn(fn: ~fn())`. In other words, it is a function that takes an owned closure that takes no arguments. From f7ce3dc55f1bcc8a741951a4b9f090bad61769ae Mon Sep 17 00:00:00 2001 From: Ben Striegel Date: Fri, 12 Oct 2012 21:47:46 -0400 Subject: [PATCH 25/40] Extraneous sigil patrol: turn &[] literals into [] --- doc/tutorial.md | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/doc/tutorial.md b/doc/tutorial.md index 67c9bcd828116..02927c4ddd1b5 100644 --- a/doc/tutorial.md +++ b/doc/tutorial.md @@ -1504,7 +1504,7 @@ and [`core::str`]. Here are some examples. # fn store_crayon_in_nasal_cavity(i: uint, c: Crayon) { } # fn crayon_to_str(c: Crayon) -> &str { "" } -let crayons = &[Almond, AntiqueBrass, Apricot]; +let crayons = [Almond, AntiqueBrass, Apricot]; // Check the length of the vector assert crayons.len() == 3; @@ -1679,7 +1679,7 @@ structure. ~~~~ # fn each(v: &[int], op: fn(v: &int)) { } # fn do_some_work(i: &int) { } -each(&[1, 2, 3], |n| { +each([1, 2, 3], |n| { do_some_work(n); }); ~~~~ @@ -1690,7 +1690,7 @@ call that can be written more like a built-in control structure: ~~~~ # fn each(v: &[int], op: fn(v: &int)) { } # fn do_some_work(i: &int) { } -do each(&[1, 2, 3]) |n| { +do each([1, 2, 3]) |n| { do_some_work(n); } ~~~~ @@ -1751,7 +1751,7 @@ And using this function to iterate over a vector: ~~~~ # use each = vec::each; # use println = io::println; -each(&[2, 4, 8, 5, 16], |n| { +each([2, 4, 8, 5, 16], |n| { if *n % 2 != 0 { println("found odd number!"); false @@ -1768,7 +1768,7 @@ to the next iteration, write `loop`. ~~~~ # use each = vec::each; # use println = io::println; -for each(&[2, 4, 8, 5, 16]) |n| { +for each([2, 4, 8, 5, 16]) |n| { if *n % 2 != 0 { println("found odd number!"); break; @@ -2106,7 +2106,7 @@ impl @Rectangle: Drawable { fn draw() { ... } } let c: @Circle = @new_circle(); let r: @Rectangle = @new_rectangle(); -draw_all(&[c as @Drawable, r as @Drawable]); +draw_all([c as @Drawable, r as @Drawable]); ~~~~ We omit the code for `new_circle` and `new_rectangle`; imagine that From ca5506a5dee75eb3ee84eff1f0fa5f5e40f54544 Mon Sep 17 00:00:00 2001 From: Jyun-Yan You Date: Mon, 8 Oct 2012 16:36:09 +0800 Subject: [PATCH 26/40] fix the indentation of foreign constant --- src/libsyntax/print/pprust.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 5e37f7e18ece9..60b3e005f10a9 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -433,6 +433,7 @@ fn print_foreign_item(s: ps, item: @ast::foreign_item) { print_type(s, t); word(s.s, ~";"); end(s); // end the head-ibox + end(s); // end the outer cbox } } } From 66151d02f54300bb6f4c326728d6a2d875cdc80a Mon Sep 17 00:00:00 2001 From: Jyun-Yan You Date: Mon, 15 Oct 2012 00:19:54 +0800 Subject: [PATCH 27/40] remove duplicate visibility and fix indentation --- src/libsyntax/print/pprust.rs | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 60b3e005f10a9..9668c161ce3d2 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -444,7 +444,6 @@ fn print_item(s: ps, &&item: @ast::item) { print_outer_attributes(s, item.attrs); let ann_node = node_item(s, item); s.ann.pre(ann_node); - print_visibility(s, item.vis); match item.node { ast::item_const(ty, expr) => { head(s, visibility_qualified(item.vis, ~"const")); @@ -480,10 +479,10 @@ fn print_item(s: ps, &&item: @ast::item) { ast::named => { word_nbsp(s, ~"mod"); print_ident(s, item.ident); + nbsp(s); } ast::anonymous => {} } - nbsp(s); bopen(s); print_foreign_mod(s, nmod, item.attrs); bclose(s, item.span); @@ -491,7 +490,7 @@ fn print_item(s: ps, &&item: @ast::item) { ast::item_ty(ty, params) => { ibox(s, indent_unit); ibox(s, 0u); - word_nbsp(s, ~"type"); + word_nbsp(s, visibility_qualified(item.vis, ~"type")); print_ident(s, item.ident); print_type_params(s, params); end(s); // end the inner ibox @@ -503,15 +502,15 @@ fn print_item(s: ps, &&item: @ast::item) { end(s); // end the outer ibox } ast::item_enum(enum_definition, params) => { - print_enum_def(s, enum_definition, params, item.ident, item.span); + print_enum_def(s, enum_definition, params, item.ident, item.span, item.vis); } ast::item_class(struct_def, tps) => { - head(s, ~"struct"); + head(s, visibility_qualified(item.vis, ~"struct")); print_struct(s, struct_def, tps, item.ident, item.span); } ast::item_impl(tps, opt_trait, ty, methods) => { - head(s, ~"impl"); + head(s, visibility_qualified(item.vis, ~"impl")); if tps.is_not_empty() { print_type_params(s, tps); space(s.s); @@ -534,7 +533,7 @@ fn print_item(s: ps, &&item: @ast::item) { bclose(s, item.span); } ast::item_trait(tps, traits, methods) => { - head(s, ~"trait"); + head(s, visibility_qualified(item.vis, ~"trait")); print_ident(s, item.ident); print_type_params(s, tps); if vec::len(traits) != 0u { @@ -550,6 +549,7 @@ fn print_item(s: ps, &&item: @ast::item) { bclose(s, item.span); } ast::item_mac({node: ast::mac_invoc_tt(pth, tts), _}) => { + print_visibility(s, item.vis); print_path(s, pth, false); word(s.s, ~"! "); print_ident(s, item.ident); @@ -570,7 +570,7 @@ fn print_item(s: ps, &&item: @ast::item) { fn print_enum_def(s: ps, enum_definition: ast::enum_def, params: ~[ast::ty_param], ident: ast::ident, - span: ast::span) { + span: ast::span, visibility: ast::visibility) { let mut newtype = vec::len(enum_definition.variants) == 1u && ident == enum_definition.variants[0].node.name; @@ -582,9 +582,9 @@ fn print_enum_def(s: ps, enum_definition: ast::enum_def, } if newtype { ibox(s, indent_unit); - word_space(s, ~"enum"); + word_space(s, visibility_qualified(visibility, ~"enum")); } else { - head(s, ~"enum"); + head(s, visibility_qualified(visibility, ~"enum")); } print_ident(s, ident); @@ -877,7 +877,7 @@ fn print_possibly_embedded_block_(s: ps, blk: ast::blk, embedded: embed_type, indented: uint, attrs: ~[ast::attribute], close_box: bool) { match blk.node.rules { - ast::unsafe_blk => word(s.s, ~"unsafe"), + ast::unsafe_blk => word(s.s, ~"unsafe "), ast::default_blk => () } maybe_print_comment(s, blk.span.lo); From 7236472e67a526d996570333669beb9d2dfa2128 Mon Sep 17 00:00:00 2001 From: Tim Chevalier Date: Wed, 17 Oct 2012 11:21:45 -0700 Subject: [PATCH 28/40] word => word_space --- src/libsyntax/print/pprust.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 9668c161ce3d2..a57dd0bc5818e 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -877,7 +877,7 @@ fn print_possibly_embedded_block_(s: ps, blk: ast::blk, embedded: embed_type, indented: uint, attrs: ~[ast::attribute], close_box: bool) { match blk.node.rules { - ast::unsafe_blk => word(s.s, ~"unsafe "), + ast::unsafe_blk => word_space(s, ~"unsafe"), ast::default_blk => () } maybe_print_comment(s, blk.span.lo); From b532a8e5856d525bb7121f4bc00603b91bc2cc5d Mon Sep 17 00:00:00 2001 From: Tim Chevalier Date: Wed, 17 Oct 2012 11:23:02 -0700 Subject: [PATCH 29/40] Line length --- src/libsyntax/print/pprust.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index a57dd0bc5818e..3fc2bcda90182 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -502,7 +502,8 @@ fn print_item(s: ps, &&item: @ast::item) { end(s); // end the outer ibox } ast::item_enum(enum_definition, params) => { - print_enum_def(s, enum_definition, params, item.ident, item.span, item.vis); + print_enum_def(s, enum_definition, params, item.ident, + item.span, item.vis); } ast::item_class(struct_def, tps) => { head(s, visibility_qualified(item.vis, ~"struct")); From 32baf1c54c4214f5a50da53979008ef9bcdad359 Mon Sep 17 00:00:00 2001 From: Kevin Cantu Date: Thu, 11 Oct 2012 16:54:31 -0700 Subject: [PATCH 30/40] Add a module to getopts to support verbose option definition This is built on top of the existing functionality, but adds a `groups` module which defines functions allowing the user to specify whole short/long/description groups at once and provides a usage message. --- src/libcore/str.rs | 62 ++++++ src/libstd/getopts.rs | 412 ++++++++++++++++++++++++++++++++++++- src/rustc/driver/driver.rs | 92 ++++++--- src/rustc/driver/rustc.rs | 48 +---- 4 files changed, 546 insertions(+), 68 deletions(-) diff --git a/src/libcore/str.rs b/src/libcore/str.rs index aff4c50cfd2d4..c32858fc1e325 100644 --- a/src/libcore/str.rs +++ b/src/libcore/str.rs @@ -203,6 +203,13 @@ pub pure fn connect(v: &[~str], sep: &str) -> ~str { move s } +/// Given a string, make a new string with repeated copies of it +pub fn repeat(ss: &str, nn: uint) -> ~str { + let mut acc = ~""; + for nn.times { acc += ss; } + return acc; +} + /* Section: Adding to and removing from a string */ @@ -573,6 +580,40 @@ pub pure fn words(s: &str) -> ~[~str] { split_nonempty(s, |c| char::is_whitespace(c)) } +/** Split a string into a vector of substrings, + * each of which is less than a limit + */ +pub fn split_within(ss: &str, lim: uint) -> ~[~str] { + let words = str::words(ss); + + // empty? + if words == ~[] { return ~[]; } + + let mut rows : ~[~str] = ~[]; + let mut row : ~str = ~""; + + for words.each |wptr| { + let word = *wptr; + + // if adding this word to the row would go over the limit, + // then start a new row + if str::len(row) + str::len(word) + 1 > lim { + rows += [row]; // save previous row + row = word; // start a new one + } else { + if str::len(row) > 0 { row += ~" " } // separate words + row += word; // append to this row + } + } + + // save the last row + if row != ~"" { rows += [row]; } + + return rows; +} + + + /// Convert a string to lowercase. ASCII only pub pure fn to_lower(s: &str) -> ~str { map(s, @@ -2465,6 +2506,18 @@ mod tests { assert ~[] == words(~""); } + #[test] + fn test_split_within() { + assert split_within(~"", 0) == ~[]; + assert split_within(~"", 15) == ~[]; + assert split_within(~"hello", 15) == ~[~"hello"]; + + let data = ~"\nMary had a little lamb\nLittle lamb\n"; + assert split_within(data, 15) == ~[~"Mary had a little", + ~"lamb Little", + ~"lamb"]; + } + #[test] fn test_find_str() { // byte positions @@ -2540,6 +2593,15 @@ mod tests { t(~[~"hi"], ~" ", ~"hi"); } + #[test] + fn test_repeat() { + assert repeat(~"x", 4) == ~"xxxx"; + assert repeat(~"hi", 4) == ~"hihihihi"; + assert repeat(~"ไท华", 3) == ~"ไท华ไท华ไท华"; + assert repeat(~"", 4) == ~""; + assert repeat(~"hi", 0) == ~""; + } + #[test] fn test_to_upper() { // libc::toupper, and hence str::to_upper diff --git a/src/libstd/getopts.rs b/src/libstd/getopts.rs index 6da51571e34a2..8d77b88aba230 100644 --- a/src/libstd/getopts.rs +++ b/src/libstd/getopts.rs @@ -82,7 +82,7 @@ pub type Opt = {name: Name, hasarg: HasArg, occur: Occur}; fn mkname(nm: &str) -> Name { let unm = str::from_slice(nm); - return if str::len(nm) == 1u { + return if nm.len() == 1u { Short(str::char_at(unm, 0u)) } else { Long(unm) }; } @@ -114,6 +114,22 @@ impl Occur : Eq { pure fn ne(other: &Occur) -> bool { !self.eq(other) } } +impl HasArg : Eq { + pure fn eq(other: &HasArg) -> bool { + (self as uint) == ((*other) as uint) + } + pure fn ne(other: &HasArg) -> bool { !self.eq(other) } +} + +impl Opt : Eq { + pure fn eq(other: &Opt) -> bool { + self.name == (*other).name && + self.hasarg == (*other).hasarg && + self.occur == (*other).occur + } + pure fn ne(other: &Opt) -> bool { !self.eq(other) } +} + /// Create an option that is required and takes an argument pub fn reqopt(name: &str) -> Opt { return {name: mkname(name), hasarg: Yes, occur: Req}; @@ -150,8 +166,29 @@ enum Optval { Val(~str), Given, } */ pub type Matches = {opts: ~[Opt], vals: ~[~[Optval]], free: ~[~str]}; +impl Optval : Eq { + pure fn eq(other: &Optval) -> bool { + match self { + Val(ref s) => match *other { Val (ref os) => s == os, + Given => false }, + Given => match *other { Val(_) => false, + Given => true } + } + } + pure fn ne(other: &Optval) -> bool { !self.eq(other) } +} + +impl Matches : Eq { + pure fn eq(other: &Matches) -> bool { + self.opts == (*other).opts && + self.vals == (*other).vals && + self.free == (*other).free + } + pure fn ne(other: &Matches) -> bool { !self.eq(other) } +} + fn is_arg(arg: &str) -> bool { - return str::len(arg) > 1u && arg[0] == '-' as u8; + return arg.len() > 1u && arg[0] == '-' as u8; } fn name_str(nm: &Name) -> ~str { @@ -177,6 +214,35 @@ pub enum Fail_ { UnexpectedArgument(~str), } +impl Fail_ : Eq { + // this whole thing should be easy to infer... + pure fn eq(other: &Fail_) -> bool { + match self { + ArgumentMissing(ref s) => { + match *other { ArgumentMissing(ref so) => s == so, + _ => false } + } + UnrecognizedOption(ref s) => { + match *other { UnrecognizedOption(ref so) => s == so, + _ => false } + } + OptionMissing(ref s) => { + match *other { OptionMissing(ref so) => s == so, + _ => false } + } + OptionDuplicated(ref s) => { + match *other { OptionDuplicated(ref so) => s == so, + _ => false } + } + UnexpectedArgument(ref s) => { + match *other { UnexpectedArgument(ref so) => s == so, + _ => false } + } + } + } + pure fn ne(other: &Fail_) -> bool { !self.eq(other) } +} + /// Convert a `fail_` enum into an error string pub fn fail_str(f: Fail_) -> ~str { return match f { @@ -220,7 +286,7 @@ pub fn getopts(args: &[~str], opts: &[Opt]) -> Result unsafe { let mut i = 0u; while i < l { let cur = args[i]; - let curlen = str::len(cur); + let curlen = cur.len(); if !is_arg(cur) { free.push(cur); } else if cur == ~"--" { @@ -444,6 +510,194 @@ impl FailType : Eq { pure fn ne(other: &FailType) -> bool { !self.eq(other) } } +/** A module which provides a way to specify descriptions and + * groups of short and long option names, together. + */ +pub mod groups { + + /** one group of options, e.g., both -h and --help, along with + * their shared description and properties + */ + pub type OptGroup = { + short_name: ~str, + long_name: ~str, + hint: ~str, + desc: ~str, + hasarg: HasArg, + occur: Occur + }; + + impl OptGroup : Eq { + pure fn eq(other: &OptGroup) -> bool { + self.short_name == (*other).short_name && + self.long_name == (*other).long_name && + self.hint == (*other).hint && + self.desc == (*other).desc && + self.hasarg == (*other).hasarg && + self.occur == (*other).occur + } + pure fn ne(other: &OptGroup) -> bool { !self.eq(other) } + } + + /// Create a long option that is required and takes an argument + pub fn reqopt(short_name: &str, long_name: &str, + desc: &str, hint: &str) -> OptGroup { + let len = short_name.len(); + assert len == 1 || len == 0; + return {short_name: str::from_slice(short_name), + long_name: str::from_slice(long_name), + hint: str::from_slice(hint), + desc: str::from_slice(desc), + hasarg: Yes, + occur: Req}; + } + + /// Create a long option that is optional and takes an argument + pub fn optopt(short_name: &str, long_name: &str, + desc: &str, hint: &str) -> OptGroup { + let len = short_name.len(); + assert len == 1 || len == 0; + return {short_name: str::from_slice(short_name), + long_name: str::from_slice(long_name), + hint: str::from_slice(hint), + desc: str::from_slice(desc), + hasarg: Yes, + occur: Optional}; + } + + /// Create a long option that is optional and does not take an argument + pub fn optflag(short_name: &str, long_name: &str, + desc: &str) -> OptGroup { + let len = short_name.len(); + assert len == 1 || len == 0; + return {short_name: str::from_slice(short_name), + long_name: str::from_slice(long_name), + hint: ~"", + desc: str::from_slice(desc), + hasarg: No, + occur: Optional}; + } + + /// Create a long option that is optional and takes an optional argument + pub fn optflagopt(short_name: &str, long_name: &str, + desc: &str, hint: &str) -> OptGroup { + let len = short_name.len(); + assert len == 1 || len == 0; + return {short_name: str::from_slice(short_name), + long_name: str::from_slice(long_name), + hint: str::from_slice(hint), + desc: str::from_slice(desc), + hasarg: Maybe, + occur: Optional}; + } + + /** + * Create a long option that is optional, takes an argument, and may occur + * multiple times + */ + pub fn optmulti(short_name: &str, long_name: &str, + desc: &str, hint: &str) -> OptGroup { + let len = short_name.len(); + assert len == 1 || len == 0; + return {short_name: str::from_slice(short_name), + long_name: str::from_slice(long_name), + hint: str::from_slice(hint), + desc: str::from_slice(desc), + hasarg: Yes, + occur: Multi}; + } + + // translate OptGroup into Opt + // (both short and long names correspond to different Opts) + pub fn long_to_short(lopt: &OptGroup) -> ~[Opt] { + match ((*lopt).short_name.len(), + (*lopt).long_name.len()) { + + (0,0) => fail ~"this long-format option was given no name", + + (0,_) => ~[{name: Long(((*lopt).long_name)), + hasarg: (*lopt).hasarg, + occur: (*lopt).occur}], + + (1,0) => ~[{name: Short(str::char_at((*lopt).short_name, 0)), + hasarg: (*lopt).hasarg, + occur: (*lopt).occur}], + + (1,_) => ~[{name: Short(str::char_at((*lopt).short_name, 0)), + hasarg: (*lopt).hasarg, + occur: (*lopt).occur}, + {name: Long(((*lopt).long_name)), + hasarg: (*lopt).hasarg, + occur: (*lopt).occur}], + + (_,_) => fail ~"something is wrong with the long-form opt" + } + } + + /* + * Parse command line args with the provided long format options + */ + pub fn getopts(args: &[~str], opts: &[OptGroup]) -> Result { + ::getopts::getopts(args, vec::flat_map(opts, long_to_short)) + } + + /** + * Derive a usage message from a set of long options + */ + pub fn usage(brief: &str, opts: &[OptGroup]) -> ~str { + + let desc_sep = ~"\n" + str::repeat(~" ", 24); + + let rows = vec::map(opts, |optref| { + let short_name = (*optref).short_name; + let long_name = (*optref).long_name; + let hint = (*optref).hint; + let desc = (*optref).desc; + let hasarg = (*optref).hasarg; + + let mut row = str::repeat(~" ", 4); + + // short option + row += match short_name.len() { + 0 => ~"", + 1 => ~"-" + short_name + " ", + _ => fail ~"the short name should only be 1 char long", + }; + + // long option + row += match long_name.len() { + 0 => ~"", + _ => ~"--" + long_name + " ", + }; + + // arg + row += match hasarg { + No => ~"", + Yes => hint, + Maybe => ~"[" + hint + ~"]", + }; + + // here we just need to indent the start of the description + let rowlen = row.len(); + row += if rowlen < 24 { + str::repeat(~" ", 24 - rowlen) + } else { + desc_sep + }; + + // wrapped description + row += str::connect(str::split_within(desc, 54), desc_sep); + + row + }); + + return str::from_slice(brief) + + ~"\n\nOptions:\n" + + str::connect(rows, ~"\n") + + ~"\n\n"; + } +} // end groups module + #[cfg(test)] mod tests { #[legacy_exports]; @@ -943,6 +1197,158 @@ mod tests { assert opts_present(matches, ~[~"L"]); assert opts_str(matches, ~[~"L"]) == ~"foo"; } + + #[test] + fn test_groups_reqopt() { + let opt = groups::reqopt(~"b", ~"banana", ~"some bananas", ~"VAL"); + assert opt == { short_name: ~"b", + long_name: ~"banana", + hint: ~"VAL", + desc: ~"some bananas", + hasarg: Yes, + occur: Req } + } + + #[test] + fn test_groups_optopt() { + let opt = groups::optopt(~"a", ~"apple", ~"some apples", ~"VAL"); + assert opt == { short_name: ~"a", + long_name: ~"apple", + hint: ~"VAL", + desc: ~"some apples", + hasarg: Yes, + occur: Optional } + } + + #[test] + fn test_groups_optflag() { + let opt = groups::optflag(~"k", ~"kiwi", ~"some kiwis"); + assert opt == { short_name: ~"k", + long_name: ~"kiwi", + hint: ~"", + desc: ~"some kiwis", + hasarg: No, + occur: Optional } + } + + #[test] + fn test_groups_optflagopt() { + let opt = groups::optflagopt(~"p", ~"pineapple", + ~"some pineapples", ~"VAL"); + assert opt == { short_name: ~"p", + long_name: ~"pineapple", + hint: ~"VAL", + desc: ~"some pineapples", + hasarg: Maybe, + occur: Optional } + } + + #[test] + fn test_groups_optmulti() { + let opt = groups::optmulti(~"l", ~"lime", + ~"some limes", ~"VAL"); + assert opt == { short_name: ~"l", + long_name: ~"lime", + hint: ~"VAL", + desc: ~"some limes", + hasarg: Yes, + occur: Multi } + } + + #[test] + fn test_groups_long_to_short() { + let short = ~[reqopt(~"b"), reqopt(~"banana")]; + let verbose = groups::reqopt(~"b", ~"banana", + ~"some bananas", ~"VAL"); + + assert groups::long_to_short(&verbose) == short; + } + + #[test] + fn test_groups_getopts() { + let short = ~[ + reqopt(~"b"), reqopt(~"banana"), + optopt(~"a"), optopt(~"apple"), + optflag(~"k"), optflagopt(~"kiwi"), + optflagopt(~"p"), + optmulti(~"l") + ]; + + let verbose = ~[ + groups::reqopt(~"b", ~"banana", ~"Desc", ~"VAL"), + groups::optopt(~"a", ~"apple", ~"Desc", ~"VAL"), + groups::optflag(~"k", ~"kiwi", ~"Desc"), + groups::optflagopt(~"p", ~"", ~"Desc", ~"VAL"), + groups::optmulti(~"l", ~"", ~"Desc", ~"VAL"), + ]; + + let sample_args = ~[~"-k", ~"15", ~"--apple", ~"1", ~"k", + ~"-p", ~"16", ~"l", ~"35"]; + + // NOTE: we should sort before comparing + assert getopts(sample_args, short) + == groups::getopts(sample_args, verbose); + } + + #[test] + fn test_groups_usage() { + let optgroups = ~[ + groups::reqopt(~"b", ~"banana", ~"Desc", ~"VAL"), + groups::optopt(~"a", ~"012345678901234567890123456789", + ~"Desc", ~"VAL"), + groups::optflag(~"k", ~"kiwi", ~"Desc"), + groups::optflagopt(~"p", ~"", ~"Desc", ~"VAL"), + groups::optmulti(~"l", ~"", ~"Desc", ~"VAL"), + ]; + + let expected = +~"Usage: fruits + +Options: + -b --banana VAL Desc + -a --012345678901234567890123456789 VAL + Desc + -k --kiwi Desc + -p [VAL] Desc + -l VAL Desc + +"; + + let generated_usage = groups::usage(~"Usage: fruits", optgroups); + + debug!("expected: <<%s>>", expected); + debug!("generated: <<%s>>", generated_usage); + assert generated_usage == expected; + } + + #[test] + fn test_groups_usage_description_wrapping() { + // indentation should be 24 spaces + // lines wrap after 78: or rather descriptions wrap after 54 + + let optgroups = ~[ + groups::optflag(~"k", ~"kiwi", + ~"This is a long description which won't be wrapped..+.."), // 54 + groups::optflag(~"a", ~"apple", + ~"This is a long description which _will_ be wrapped..+.."), // 55 + ]; + + let expected = +~"Usage: fruits + +Options: + -k --kiwi This is a long description which won't be wrapped..+.. + -a --apple This is a long description which _will_ be + wrapped..+.. + +"; + + let usage = groups::usage(~"Usage: fruits", optgroups); + + debug!("expected: <<%s>>", expected); + debug!("generated: <<%s>>", usage); + assert usage == expected + } } // Local Variables: diff --git a/src/rustc/driver/driver.rs b/src/rustc/driver/driver.rs index e389f3a4bdf7d..1c79f91cf24aa 100644 --- a/src/rustc/driver/driver.rs +++ b/src/rustc/driver/driver.rs @@ -10,8 +10,10 @@ use util::ppaux; use back::link; use result::{Ok, Err}; use std::getopts; +use std::getopts::{opt_present}; +use std::getopts::groups; +use std::getopts::groups::{optopt, optmulti, optflag, optflagopt, getopts}; use io::WriterUtil; -use getopts::{optopt, optmulti, optflag, optflagopt, opt_present}; use back::{x86, x86_64}; use std::map::HashMap; use lib::llvm::llvm; @@ -624,27 +626,69 @@ fn parse_pretty(sess: session, &&name: ~str) -> pp_mode { } } -fn opts() -> ~[getopts::Opt] { - return ~[optflag(~"h"), optflag(~"help"), - optflag(~"v"), optflag(~"version"), - optflag(~"emit-llvm"), optflagopt(~"pretty"), - optflag(~"ls"), optflag(~"parse-only"), optflag(~"no-trans"), - optflag(~"O"), optopt(~"opt-level"), optmulti(~"L"), optflag(~"S"), - optopt(~"o"), optopt(~"out-dir"), optflag(~"xg"), - optflag(~"c"), optflag(~"g"), optflag(~"save-temps"), - optopt(~"sysroot"), optopt(~"target"), - optflag(~"jit"), - - optmulti(~"W"), optmulti(~"warn"), - optmulti(~"A"), optmulti(~"allow"), - optmulti(~"D"), optmulti(~"deny"), - optmulti(~"F"), optmulti(~"forbid"), - - optmulti(~"Z"), - - optmulti(~"cfg"), optflag(~"test"), - optflag(~"lib"), optflag(~"bin"), - optflag(~"static"), optflag(~"gc")]; +// rustc command line options +fn optgroups() -> ~[getopts::groups::OptGroup] { + ~[ + optflag(~"", ~"bin", ~"Compile an executable crate (default)"), + optflag(~"c", ~"", ~"Compile and assemble, but do not link"), + optmulti(~"", ~"cfg", ~"Configure the compilation + environment", ~"SPEC"), + optflag(~"", ~"emit-llvm", + ~"Produce an LLVM bitcode file"), + optflag(~"g", ~"", ~"Produce debug info (experimental)"), + optflag(~"", ~"gc", ~"Garbage collect shared data (experimental)"), + optflag(~"h", ~"help",~"Display this message"), + optmulti(~"L", ~"", ~"Add a directory to the library search path", + ~"PATH"), + optflag(~"", ~"lib", ~"Compile a library crate"), + optflag(~"", ~"ls", ~"List the symbols defined by a library crate"), + optflag(~"", ~"jit", ~"Execute using JIT (experimental)"), + optflag(~"", ~"no-trans", + ~"Run all passes except translation; no output"), + optflag(~"O", ~"", ~"Equivalent to --opt-level=2"), + optopt(~"o", ~"", ~"Write output to ", ~"FILENAME"), + optopt(~"", ~"opt-level", + ~"Optimize with possible levels 0-3", ~"LEVEL"), + optopt( ~"", ~"out-dir", + ~"Write output to compiler-chosen filename + in ", ~"DIR"), + optflag(~"", ~"parse-only", + ~"Parse only; do not compile, assemble, or link"), + optflagopt(~"", ~"pretty", + ~"Pretty-print the input instead of compiling; + valid types are: normal (un-annotated source), + expanded (crates expanded), + typed (crates expanded, with type annotations), + or identified (fully parenthesized, + AST nodes and blocks with IDs)", ~"TYPE"), + optflag(~"S", ~"", ~"Compile only; do not assemble or link"), + optflag(~"", ~"xg", ~"Extra debugging info (experimental)"), + optflag(~"", ~"save-temps", + ~"Write intermediate files (.bc, .opt.bc, .o) + in addition to normal output"), + optflag(~"", ~"static", + ~"Use or produce static libraries or binaries + (experimental)"), + optopt(~"", ~"sysroot", + ~"Override the system root", ~"PATH"), + optflag(~"", ~"test", ~"Build a test harness"), + optopt(~"", ~"target", + ~"Target triple cpu-manufacturer-kernel[-os] + to compile for (see + http://sources.redhat.com/autobook/autobook/autobook_17.html + for detail)", ~"TRIPLE"), + optmulti(~"W", ~"warn", + ~"Set lint warnings", ~"OPT"), + optmulti(~"A", ~"allow", + ~"Set lint allowed", ~"OPT"), + optmulti(~"D", ~"deny", + ~"Set lint denied", ~"OPT"), + optmulti(~"F", ~"forbid", + ~"Set lint forbidden", ~"OPT"), + optmulti(~"Z", ~"", ~"Set internal debugging options", "FLAG"), + optflag( ~"v", ~"version", + ~"Print version info and exit"), + ] } type output_filenames = @{out_filename:Path, obj_filename:Path}; @@ -742,7 +786,7 @@ mod test { #[test] fn test_switch_implies_cfg_test() { let matches = - match getopts::getopts(~[~"--test"], opts()) { + match getopts(~[~"--test"], optgroups()) { Ok(m) => m, Err(f) => fail ~"test_switch_implies_cfg_test: " + getopts::fail_str(f) @@ -759,7 +803,7 @@ mod test { #[test] fn test_switch_implies_cfg_test_unless_cfg_test() { let matches = - match getopts::getopts(~[~"--test", ~"--cfg=test"], opts()) { + match getopts(~[~"--test", ~"--cfg=test"], optgroups()) { Ok(m) => m, Err(f) => { fail ~"test_switch_implies_cfg_test_unless_cfg_test: " + diff --git a/src/rustc/driver/rustc.rs b/src/rustc/driver/rustc.rs index 5833723ec101b..b7783307cc318 100644 --- a/src/rustc/driver/rustc.rs +++ b/src/rustc/driver/rustc.rs @@ -16,6 +16,7 @@ use io::ReaderUtil; use std::getopts; use std::map::HashMap; use getopts::{opt_present}; +use getopts::groups; use rustc::driver::driver::*; use syntax::codemap; use syntax::diagnostic; @@ -31,46 +32,11 @@ fn version(argv0: &str) { } fn usage(argv0: &str) { - io::println(fmt!("Usage: %s [options] \n", argv0) + - ~" -Options: - - --bin Compile an executable crate (default) - -c Compile and assemble, but do not link - --cfg Configure the compilation environment - --emit-llvm Produce an LLVM bitcode file - -g Produce debug info (experimental) - --gc Garbage collect shared data (experimental/temporary) - -h --help Display this message - -L Add a directory to the library search path - --lib Compile a library crate - --ls List the symbols defined by a compiled library crate - --jit Execute using JIT (experimental) - --no-trans Run all passes except translation; no output - -O Equivalent to --opt-level=2 - -o Write output to - --opt-level Optimize with possible levels 0-3 - --out-dir Write output to compiler-chosen filename in - --parse-only Parse only; do not compile, assemble, or link - --pretty [type] Pretty-print the input instead of compiling; - valid types are: normal (un-annotated source), - expanded (crates expanded), typed (crates expanded, - with type annotations), or identified (fully - parenthesized, AST nodes and blocks with IDs) - -S Compile only; do not assemble or link - --save-temps Write intermediate files (.bc, .opt.bc, .o) - in addition to normal output - --static Use or produce static libraries or binaries - (experimental) - --sysroot Override the system root - --test Build a test harness - --target Target cpu-manufacturer-kernel[-os] to compile for - (default: host triple) - (see http://sources.redhat.com/autobook/autobook/ - autobook_17.html for detail) - -W help Print 'lint' options and default settings - -Z help Print internal options for debugging rustc - -v --version Print version info and exit + let message = fmt!("Usage: %s [OPTIONS] INPUT", argv0); + io::println(groups::usage(message, optgroups()) + + ~"Additional help: + -W help Print 'lint' options and default settings + -Z help Print internal options for debugging rustc "); } @@ -127,7 +93,7 @@ fn run_compiler(args: &~[~str], demitter: diagnostic::emitter) { if args.is_empty() { usage(binary); return; } let matches = - match getopts::getopts(args, opts()) { + match getopts::groups::getopts(args, optgroups()) { Ok(m) => m, Err(f) => { early_error(demitter, getopts::fail_str(f)) From f2544d8d80f20e9c1e39c3c455b18ebe1922d1d5 Mon Sep 17 00:00:00 2001 From: Tim Chevalier Date: Wed, 17 Oct 2012 12:16:57 -0700 Subject: [PATCH 31/40] Fix long lines --- src/cargo/cargo.rs | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/src/cargo/cargo.rs b/src/cargo/cargo.rs index 322d780042089..a06eb19910be9 100644 --- a/src/cargo/cargo.rs +++ b/src/cargo/cargo.rs @@ -1165,8 +1165,8 @@ fn sync_one_file(c: &Cargo, dir: &Path, src: @Source) -> bool { let r = pgp::verify(&c.root, &pkgfile, &sigfile); if !r { - error(fmt!("signature verification failed for source %s with key %s", - name, f)); + error(fmt!("signature verification failed for source %s with \ + key %s", name, f)); return false; } @@ -1174,8 +1174,8 @@ fn sync_one_file(c: &Cargo, dir: &Path, src: @Source) -> bool { let e = pgp::verify(&c.root, &srcfile, &srcsigfile); if !e { - error(fmt!("signature verification failed for source %s with key %s", - name, f)); + error(fmt!("signature verification failed for source %s \ + with key %s", name, f)); return false; } } @@ -1276,8 +1276,8 @@ fn sync_one_git(c: &Cargo, dir: &Path, src: @Source) -> bool { let r = pgp::verify(&c.root, &pkgfile, &sigfile); if !r { - error(fmt!("signature verification failed for source %s with key %s", - name, f)); + error(fmt!("signature verification failed for source %s with \ + key %s", name, f)); rollback(name, dir, false); return false; } @@ -1286,8 +1286,8 @@ fn sync_one_git(c: &Cargo, dir: &Path, src: @Source) -> bool { let e = pgp::verify(&c.root, &srcfile, &srcsigfile); if !e { - error(fmt!("signature verification failed for source %s with key %s", - name, f)); + error(fmt!("signature verification failed for source %s \ + with key %s", name, f)); rollback(name, dir, false); return false; } @@ -1373,8 +1373,8 @@ fn sync_one_curl(c: &Cargo, dir: &Path, src: @Source) -> bool { let r = pgp::verify(&c.root, &pkgfile, &sigfile); if !r { - error(fmt!("signature verification failed for source %s with key %s", - name, f)); + error(fmt!("signature verification failed for source %s with \ + key %s", name, f)); return false; } From fd6be2fa4eac4638174a484529ef782007d0fec1 Mon Sep 17 00:00:00 2001 From: Daniel Patterson Date: Thu, 4 Oct 2012 18:18:02 -0400 Subject: [PATCH 32/40] std::treemap - changing types to reflect constraints, adding equality check (space expensive) --- src/libstd/treemap.rs | 56 +++++++++++++++++++++++++++++++++++-------- 1 file changed, 46 insertions(+), 10 deletions(-) diff --git a/src/libstd/treemap.rs b/src/libstd/treemap.rs index 8ab0dc7f2e7da..f332c06d0bc3b 100644 --- a/src/libstd/treemap.rs +++ b/src/libstd/treemap.rs @@ -11,28 +11,28 @@ use core::cmp::{Eq, Ord}; use core::option::{Some, None}; use Option = core::Option; -pub type TreeMap = @mut TreeEdge; +pub type TreeMap = @mut TreeEdge; -type TreeEdge = Option<@TreeNode>; +type TreeEdge = Option<@TreeNode>; -enum TreeNode = { +struct TreeNode { key: K, mut value: V, mut left: TreeEdge, mut right: TreeEdge -}; +} /// Create a treemap -pub fn TreeMap() -> TreeMap { @mut None } +pub fn TreeMap() -> TreeMap { @mut None } /// Insert a value into the map pub fn insert(m: &mut TreeEdge, k: K, v: V) { match copy *m { None => { - *m = Some(@TreeNode({key: k, - mut value: v, - mut left: None, - mut right: None})); + *m = Some(@TreeNode {key: k, + mut value: v, + mut left: None, + mut right: None}); return; } Some(node) => { @@ -67,7 +67,8 @@ pub fn find(m: &const TreeEdge, k: K) } /// Visit all pairs in the map in order. -pub fn traverse(m: &const TreeEdge, f: fn((&K), (&V))) { +pub fn traverse(m: &const TreeEdge, + f: fn((&K), (&V))) { match copy *m { None => (), Some(node) => { @@ -79,6 +80,19 @@ pub fn traverse(m: &const TreeEdge, f: fn((&K), (&V))) { } } +/// Compare two treemaps and return true iff +/// they contain same keys and values +pub fn equals(t1: &const TreeEdge, + t2: &const TreeEdge) + -> bool { + let mut v1 = ~[]; + let mut v2 = ~[]; + traverse(t1, |k,v| { v1.push((copy *k, copy *v)) }); + traverse(t2, |k,v| { v2.push((copy *k, copy *v)) }); + return v1 == v2; +} + + #[cfg(test)] mod tests { #[legacy_exports]; @@ -127,6 +141,28 @@ mod tests { traverse(m, |x,y| t(n, *x, *y)); } + #[test] + fn equality() { + let m1 = TreeMap(); + insert(m1, 3, ()); + insert(m1, 0, ()); + insert(m1, 4, ()); + insert(m1, 2, ()); + insert(m1, 1, ()); + let m2 = TreeMap(); + insert(m2, 2, ()); + insert(m2, 1, ()); + insert(m2, 3, ()); + insert(m2, 0, ()); + insert(m2, 4, ()); + + assert equals(m1, m2); + + let m3 = TreeMap(); + assert !equals(m1,m3); + + } + #[test] fn u8_map() { let m = TreeMap(); From bbc90b6bf6082f32cccef1011ac6b862a02957c4 Mon Sep 17 00:00:00 2001 From: Tim Chevalier Date: Wed, 17 Oct 2012 12:59:10 -0700 Subject: [PATCH 33/40] Fix whitespace --- src/libstd/treemap.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/libstd/treemap.rs b/src/libstd/treemap.rs index f332c06d0bc3b..e4b6c9b5b9a95 100644 --- a/src/libstd/treemap.rs +++ b/src/libstd/treemap.rs @@ -67,7 +67,7 @@ pub fn find(m: &const TreeEdge, k: K) } /// Visit all pairs in the map in order. -pub fn traverse(m: &const TreeEdge, +pub fn traverse(m: &const TreeEdge, f: fn((&K), (&V))) { match copy *m { None => (), @@ -80,10 +80,10 @@ pub fn traverse(m: &const TreeEdge, } } -/// Compare two treemaps and return true iff +/// Compare two treemaps and return true iff /// they contain same keys and values pub fn equals(t1: &const TreeEdge, - t2: &const TreeEdge) + t2: &const TreeEdge) -> bool { let mut v1 = ~[]; let mut v2 = ~[]; From cf8bded7aae0673f9275de7948508da3c2b58650 Mon Sep 17 00:00:00 2001 From: Arkaitz Jimenez Date: Wed, 17 Oct 2012 00:12:07 +0200 Subject: [PATCH 34/40] Enable configure to detect 32 bit systems on 64 bit kernels These systems run 32 bit binaries so arch needs to be forced to 32 bits. --- configure | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/configure b/configure index e67ea3af5f382..40c6acead0efa 100755 --- a/configure +++ b/configure @@ -257,6 +257,16 @@ case $CFG_CPUTYPE in err "unknown CPU type: $CFG_CPUTYPE" esac +# Detect 64 bit linux systems with 32 bit userland and force 32 bit compilation +if [ $CFG_OSTYPE = unknown-linux-gnu -a $CFG_CPUTYPE = x86_64 ] +then + file -L "$SHELL" | grep -q "x86[_-]64" + if [ $? != 0 ]; then + CFG_CPUTYPE=i686 + fi +fi + + DEFAULT_HOST_TRIPLE="${CFG_CPUTYPE}-${CFG_OSTYPE}" CFG_SRC_DIR="$(cd $(dirname $0) && pwd)/" From d9f1426e69410f0eda9b4c1b2e87042a8bbda41d Mon Sep 17 00:00:00 2001 From: Tim Chevalier Date: Wed, 17 Oct 2012 13:47:24 -0700 Subject: [PATCH 35/40] Fix copy warnings in str --- src/libcore/str.rs | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/src/libcore/str.rs b/src/libcore/str.rs index da1defc38b18b..abd9621cf0f42 100644 --- a/src/libcore/str.rs +++ b/src/libcore/str.rs @@ -207,7 +207,7 @@ pub pure fn connect(v: &[~str], sep: &str) -> ~str { pub fn repeat(ss: &str, nn: uint) -> ~str { let mut acc = ~""; for nn.times { acc += ss; } - return acc; + move acc } /* @@ -593,23 +593,23 @@ pub fn split_within(ss: &str, lim: uint) -> ~[~str] { let mut row : ~str = ~""; for words.each |wptr| { - let word = *wptr; + let word = copy *wptr; // if adding this word to the row would go over the limit, // then start a new row - if str::len(row) + str::len(word) + 1 > lim { - rows += [row]; // save previous row - row = word; // start a new one + if row.len() + word.len() + 1 > lim { + rows.push(copy row); // save previous row + row = move word; // start a new one } else { - if str::len(row) > 0 { row += ~" " } // separate words + if row.len() > 0 { row += ~" " } // separate words row += word; // append to this row } } // save the last row - if row != ~"" { rows += [row]; } + if row != ~"" { rows.push(move row); } - return rows; + move rows } From 7dde840dc6b077974d48ae7c966ea16a0e36f3f6 Mon Sep 17 00:00:00 2001 From: Tim Chevalier Date: Wed, 17 Oct 2012 13:47:24 -0700 Subject: [PATCH 36/40] Fix copy warnings in str --- src/libcore/str.rs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/libcore/str.rs b/src/libcore/str.rs index abd9621cf0f42..26f29e388cb0a 100644 --- a/src/libcore/str.rs +++ b/src/libcore/str.rs @@ -2527,9 +2527,10 @@ mod tests { assert split_within(~"hello", 15) == ~[~"hello"]; let data = ~"\nMary had a little lamb\nLittle lamb\n"; - assert split_within(data, 15) == ~[~"Mary had a little", - ~"lamb Little", - ~"lamb"]; + error!("~~~~ %?", split_within(data, 15)); + assert split_within(data, 15) == ~[~"Mary had a", + ~"little lamb", + ~"Little lamb"]; } #[test] From e7e1bab27fa8341467b5160506bbd580b5f6bbb7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Philipp=20Br=C3=BCschweiler?= Date: Tue, 9 Oct 2012 11:59:03 +0200 Subject: [PATCH 37/40] libsyntax: refactor the parser to consider foreign items as items parse_item_or_view_item() would drop visibility if none of the conditions following it would hold. This was the case when parsing extern {} blocks, where the function was only used to parse view items, but discarded the visibility of the first not-view item. --- src/libcore/str.rs | 2 +- src/libsyntax/parse/parser.rs | 86 +++++++++++++------ src/test/compile-fail/duplicate-visibility.rs | 4 + 3 files changed, 65 insertions(+), 27 deletions(-) create mode 100644 src/test/compile-fail/duplicate-visibility.rs diff --git a/src/libcore/str.rs b/src/libcore/str.rs index 26f29e388cb0a..d59f36e068137 100644 --- a/src/libcore/str.rs +++ b/src/libcore/str.rs @@ -1952,7 +1952,7 @@ pub mod raw { } /// Converts a vector of bytes to a string. - pub pub unsafe fn from_bytes(v: &[const u8]) -> ~str { + pub unsafe fn from_bytes(v: &[const u8]) -> ~str { do vec::as_const_buf(v) |buf, len| { from_buf_len(buf, len) } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index e29620a7e79dd..cbad662800b5f 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -124,12 +124,13 @@ type item_info = (ident, item_, Option<~[attribute]>); enum item_or_view_item { iovi_none, iovi_item(@item), + iovi_foreign_item(@foreign_item), iovi_view_item(@view_item) } enum view_item_parse_mode { VIEW_ITEMS_AND_ITEMS_ALLOWED, - VIEW_ITEMS_ALLOWED, + VIEW_ITEMS_AND_FOREIGN_ITEMS_ALLOWED, IMPORTS_AND_ITEMS_ALLOWED } @@ -2184,7 +2185,7 @@ impl Parser { let item_attrs = vec::append(first_item_attrs, item_attrs); - match self.parse_item_or_view_item(item_attrs, true) { + match self.parse_item_or_view_item(item_attrs, true, false) { iovi_item(i) => { let mut hi = i.span.hi; let decl = @spanned(lo, hi, decl_item(i)); @@ -2194,6 +2195,9 @@ impl Parser { self.span_fatal(vi.span, ~"view items must be declared at \ the top of the block"); } + iovi_foreign_item(_) => { + self.fatal(~"foreign items are not allowed here"); + } iovi_none() => { /* fallthrough */ } } @@ -2259,7 +2263,7 @@ impl Parser { let mut stmts = ~[]; let mut expr = None; - let {attrs_remaining, view_items, items: items} = + let {attrs_remaining, view_items, items: items, _} = self.parse_items_and_view_items(first_item_attrs, IMPORTS_AND_ITEMS_ALLOWED); @@ -2844,7 +2848,7 @@ impl Parser { fn parse_mod_items(term: token::Token, +first_item_attrs: ~[attribute]) -> _mod { // Shouldn't be any view items since we've already parsed an item attr - let {attrs_remaining, view_items, items: starting_items} = + let {attrs_remaining, view_items, items: starting_items, _} = self.parse_items_and_view_items(first_item_attrs, VIEW_ITEMS_AND_ITEMS_ALLOWED); let mut items: ~[@item] = move starting_items; @@ -2858,7 +2862,7 @@ impl Parser { } debug!("parse_mod_items: parse_item_or_view_item(attrs=%?)", attrs); - match self.parse_item_or_view_item(attrs, true) { + match self.parse_item_or_view_item(attrs, true, false) { iovi_item(item) => items.push(item), iovi_view_item(view_item) => { self.span_fatal(view_item.span, ~"view items must be \ @@ -2958,11 +2962,11 @@ impl Parser { +first_item_attrs: ~[attribute]) -> foreign_mod { // Shouldn't be any view items since we've already parsed an item attr - let {attrs_remaining, view_items, items: _} = + let {attrs_remaining, view_items, items: _, foreign_items} = self.parse_items_and_view_items(first_item_attrs, - VIEW_ITEMS_ALLOWED); + VIEW_ITEMS_AND_FOREIGN_ITEMS_ALLOWED); - let mut items: ~[@foreign_item] = ~[]; + let mut items: ~[@foreign_item] = move foreign_items; let mut initial_attrs = attrs_remaining; while self.token != token::RBRACE { let attrs = vec::append(initial_attrs, @@ -2971,7 +2975,7 @@ impl Parser { items.push(self.parse_foreign_item(attrs)); } return {sort: sort, view_items: view_items, - items: items}; + items: items}; } fn parse_item_foreign_mod(lo: uint, @@ -3229,8 +3233,11 @@ impl Parser { } } - fn parse_item_or_view_item(+attrs: ~[attribute], items_allowed: bool) + fn parse_item_or_view_item(+attrs: ~[attribute], items_allowed: bool, + foreign_items_allowed: bool) -> item_or_view_item { + assert items_allowed != foreign_items_allowed; + maybe_whole!(iovi self,nt_item); let lo = self.span.lo; @@ -3248,6 +3255,9 @@ impl Parser { return iovi_item(self.mk_item(lo, self.last_span.hi, ident, item_, visibility, maybe_append(attrs, extra_attrs))); + } else if foreign_items_allowed && self.is_keyword(~"const") { + let item = self.parse_item_foreign_const(visibility, attrs); + return iovi_foreign_item(item); } else if items_allowed && self.is_keyword(~"fn") && !self.fn_expr_lookahead(self.look_ahead(1u)) { @@ -3262,6 +3272,10 @@ impl Parser { return iovi_item(self.mk_item(lo, self.last_span.hi, ident, item_, visibility, maybe_append(attrs, extra_attrs))); + } else if foreign_items_allowed && + (self.is_keyword(~"fn") || self.is_keyword(~"pure")) { + let item = self.parse_item_foreign_fn(visibility, attrs); + return iovi_foreign_item(item); } else if items_allowed && self.is_keyword(~"unsafe") && self.look_ahead(1u) != token::LBRACE { self.bump(); @@ -3348,16 +3362,24 @@ impl Parser { return iovi_item(self.mk_item(lo, self.last_span.hi, id, item_, visibility, attrs)); } else { + if visibility != inherited { + let mut s = ~"unmatched visibility `"; + s += if visibility == public { ~"pub" } else { ~"priv" }; + s += ~"`"; + self.span_fatal(copy self.last_span, s); + } return iovi_none; }; } fn parse_item(+attrs: ~[attribute]) -> Option<@ast::item> { - match self.parse_item_or_view_item(attrs, true) { + match self.parse_item_or_view_item(attrs, true, false) { iovi_none => None, iovi_view_item(_) => self.fatal(~"view items are not allowed here"), + iovi_foreign_item(_) => + self.fatal(~"foreign items are not allowed here"), iovi_item(item) => Some(item) } @@ -3492,28 +3514,35 @@ impl Parser { mode: view_item_parse_mode) -> {attrs_remaining: ~[attribute], view_items: ~[@view_item], - items: ~[@item]} { + items: ~[@item], + foreign_items: ~[@foreign_item]} { let mut attrs = vec::append(first_item_attrs, self.parse_outer_attributes()); - let items_allowed; - match mode { - VIEW_ITEMS_AND_ITEMS_ALLOWED | IMPORTS_AND_ITEMS_ALLOWED => - items_allowed = true, - VIEW_ITEMS_ALLOWED => - items_allowed = false - } + let items_allowed = match mode { + VIEW_ITEMS_AND_ITEMS_ALLOWED | IMPORTS_AND_ITEMS_ALLOWED => true, + VIEW_ITEMS_AND_FOREIGN_ITEMS_ALLOWED => false + }; - let (view_items, items) = (DVec(), DVec()); + let restricted_to_imports = match mode { + IMPORTS_AND_ITEMS_ALLOWED => true, + VIEW_ITEMS_AND_ITEMS_ALLOWED | + VIEW_ITEMS_AND_FOREIGN_ITEMS_ALLOWED => false + }; + + let foreign_items_allowed = match mode { + VIEW_ITEMS_AND_FOREIGN_ITEMS_ALLOWED => true, + VIEW_ITEMS_AND_ITEMS_ALLOWED | IMPORTS_AND_ITEMS_ALLOWED => false + }; + + let (view_items, items, foreign_items) = (DVec(), DVec(), DVec()); loop { - match self.parse_item_or_view_item(attrs, items_allowed) { + match self.parse_item_or_view_item(attrs, items_allowed, + foreign_items_allowed) { iovi_none => break, iovi_view_item(view_item) => { - match mode { - VIEW_ITEMS_AND_ITEMS_ALLOWED | - VIEW_ITEMS_ALLOWED => {} - IMPORTS_AND_ITEMS_ALLOWED => + if restricted_to_imports { match view_item.node { view_item_import(_) => {} view_item_export(_) | view_item_use(*) => @@ -3528,13 +3557,18 @@ impl Parser { assert items_allowed; items.push(item) } + iovi_foreign_item(foreign_item) => { + assert foreign_items_allowed; + foreign_items.push(foreign_item); + } } attrs = self.parse_outer_attributes(); } {attrs_remaining: attrs, view_items: dvec::unwrap(move view_items), - items: dvec::unwrap(move items)} + items: dvec::unwrap(move items), + foreign_items: dvec::unwrap(move foreign_items)} } // Parses a source module as a crate diff --git a/src/test/compile-fail/duplicate-visibility.rs b/src/test/compile-fail/duplicate-visibility.rs new file mode 100644 index 0000000000000..32997fcce31a6 --- /dev/null +++ b/src/test/compile-fail/duplicate-visibility.rs @@ -0,0 +1,4 @@ +// error-pattern:unmatched visibility `pub` +extern { + pub pub fn foo(); +} From fdd7b4d8f4df135f30a3f04ff2f777bce1277d2a Mon Sep 17 00:00:00 2001 From: Graydon Hoare Date: Wed, 17 Oct 2012 16:40:18 -0700 Subject: [PATCH 38/40] core: first working sketch of a condition system. --- src/libcore/condition.rs | 117 +++++++++++++++++++++++++++++++++++++++ src/libcore/core.rc | 1 + 2 files changed, 118 insertions(+) create mode 100644 src/libcore/condition.rs diff --git a/src/libcore/condition.rs b/src/libcore/condition.rs new file mode 100644 index 0000000000000..5f61f2cd53b17 --- /dev/null +++ b/src/libcore/condition.rs @@ -0,0 +1,117 @@ +// helper for transmutation, shown below. +type RustClosure = (int,int); + +struct Condition { + key: task::local_data::LocalDataKey +} + +struct Handler { + // Handler should link to previous handler and + // reinstall it when popped. + handle: RustClosure +} + + +struct ProtectBlock { + cond: &Condition, + inner: RustClosure +} + +struct PopHandler { + cond: &Condition, + drop { + unsafe { + task::local_data::local_data_pop(self.cond.key); + } + } +} + +struct HandleBlock { + pb: &ProtectBlock, + handler: @Handler, + drop { + unsafe { + task::local_data::local_data_set(self.pb.cond.key, + self.handler); + let _pop = PopHandler { cond: self.pb.cond }; + // transmutation to avoid copying non-copyable, should + // be fixable by tracking closure pointees in regionck. + let f : &fn() = ::cast::transmute(self.pb.inner); + f(); + } + } +} + +impl ProtectBlock { + fn handle(&self, h: &self/fn(&T) ->U) -> HandleBlock/&self { + unsafe { + let p : *RustClosure = ::cast::transmute(&h); + HandleBlock { pb: self, + handler: @Handler{handle: *p} } + } + } +} + + +impl Condition { + + fn protect(&self, inner: &self/fn()) -> ProtectBlock/&self { + unsafe { + // transmutation to avoid copying non-copyable, should + // be fixable by tracking closure pointees in regionck. + let p : *RustClosure = ::cast::transmute(&inner); + ProtectBlock { cond: self, + inner: *p } } + } + + fn raise(t:&T) -> U { + unsafe { + match task::local_data::local_data_get(self.key) { + None => fail, + Some(handler) => { + io::println("got handler"); + let f : &fn(&T) -> U = ::cast::transmute(handler.handle); + f(t) + } + } + } + } +} + + +#[test] +fn happiness_key(_x: @Handler) { } + +#[test] +fn sadness_key(_x: @Handler) { } + +#[test] +fn trouble(i: int) { + // Condition should work as a const, just limitations in consts. + let sadness_condition : Condition = Condition { key: sadness_key }; + io::println("raising"); + let j = sadness_condition.raise(&i); + io::println(fmt!("handler recovered with %d", j)); +} + +#[test] +fn test() { + + let sadness_condition : Condition = Condition { key: sadness_key }; + + let mut i = 10; + + let b = do sadness_condition.protect { + io::println("in protected block"); + trouble(1); + trouble(2); + trouble(3); + }; + + do b.handle |j| { + i += *j; + i + }; + + assert i == 16; +} \ No newline at end of file diff --git a/src/libcore/core.rc b/src/libcore/core.rc index 484c58750328d..9ebdad3521e53 100644 --- a/src/libcore/core.rc +++ b/src/libcore/core.rc @@ -200,6 +200,7 @@ pub mod flate; pub mod repr; pub mod cleanup; pub mod reflect; +pub mod condition; // Modules supporting compiler-generated code // Exported but not part of the public interface From 0f2fc71d799eb3a804fc469a381a56cf6311e7a7 Mon Sep 17 00:00:00 2001 From: Paul Stansifer Date: Wed, 17 Oct 2012 21:40:39 -0400 Subject: [PATCH 39/40] Add examples to the parsing limitations section of the macro tutorial. (Thanks to bstrie for pointing them out!) --- doc/tutorial-macros.md | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/doc/tutorial-macros.md b/doc/tutorial-macros.md index 995944b2fbd89..c7e1ada648e9b 100644 --- a/doc/tutorial-macros.md +++ b/doc/tutorial-macros.md @@ -155,17 +155,22 @@ If it is under more, it'll be repeated, as appropriate. ## Parsing limitations -The macro parser will parse Rust syntax with two limitations: -1. The parser will always parse as much as possible. For example, if the comma -were omitted from the syntax of `early_return!` above, `input_1 [` would've -been interpreted as the beginning of an array index. In fact, invoking the -macro would have been impossible. -2. The parser must have eliminated all ambiguity by the time it reaches a +For technical reasons, there are two limitations to the treatment of syntax +fragments by the macro parser: + +1. The parser will always parse as much as possible of a Rust syntactic +fragment. For example, if the comma were omitted from the syntax of +`early_return!` above, `input_1 [` would've been interpreted as the beginning +of an array index. In fact, invoking the macro would have been impossible. +2. The parser must have eliminated all ambiguity by the time it reaches a `$name:fragment_specifier` declaration. This limitation can result in parse errors when declarations occur at the beginning of, or immediately after, -a `$(...)*`. Changing the invocation syntax to require a distinctive -token in front can solve the problem. +a `$(...)*`. For example, the grammar `$($t:ty)* $e:expr` will always fail to +parse because the parser would be forced to choose between parsing `t` and +parsing `e`. Changing the invocation syntax to require a distinctive token in +front can solve the problem. In the above example, `$(T $t:ty)* E $e:exp` +solves the problem. ## A final note From 2c1ed186fc2a4ccfa49d8395fe1aae85b10e402b Mon Sep 17 00:00:00 2001 From: Tony Young Date: Thu, 18 Oct 2012 22:56:22 +1300 Subject: [PATCH 40/40] Check pandoc's version correctly for >=1.10. --- configure | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/configure b/configure index 40c6acead0efa..cd500b6ec43a1 100755 --- a/configure +++ b/configure @@ -362,10 +362,10 @@ fi if [ ! -z "$CFG_PANDOC" ] then - PV=$(pandoc --version | awk '/^pandoc/ {print $2}') - if [ "$PV" \< "1.8" ] + read PV_MAJOR PV_MINOR <<<$(pandoc --version | awk '/^pandoc/ {split($2, PV, "."); print PV[1] " " PV[2]}') + if [ "$PV_MAJOR" -lt "1" ] || [ "$PV_MINOR" -lt "8" ] then - step_msg "pandoc $PV is too old. disabling" + step_msg "pandoc $PV_MAJOR.$PV_MINOR is too old. disabling" BAD_PANDOC=1 fi fi