Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

register snapshots #11565

Merged
merged 2 commits into from
Jan 16, 2014
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 0 additions & 24 deletions src/libnative/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,30 +39,6 @@ static OS_DEFAULT_STACK_ESTIMATE: uint = 1 << 20;
#[cfg(unix, not(android))]
static OS_DEFAULT_STACK_ESTIMATE: uint = 2 * (1 << 20);


// XXX: this should not exist here
#[cfg(stage0, nativestart)]
#[lang = "start"]
pub fn lang_start(main: *u8, argc: int, argv: **u8) -> int {
use std::cast;
use std::task;

do start(argc, argv) {
// Instead of invoking main directly on this thread, invoke it on
// another spawned thread that we are guaranteed to know the size of the
// stack of. Currently, we do not have a method of figuring out the size
// of the main thread's stack, so for stack overflow detection to work
// we must spawn the task in a subtask which we know the stack size of.
let main: extern "Rust" fn() = unsafe { cast::transmute(main) };
let mut task = task::task();
task.name("<main>");
match do task.try { main() } {
Ok(()) => { os::set_exit_status(0); }
Err(..) => { os::set_exit_status(rt::DEFAULT_ERROR_CODE); }
}
}
}

/// Executes the given procedure after initializing the runtime with the given
/// argc/argv.
///
Expand Down
44 changes: 16 additions & 28 deletions src/libstd/cleanup.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@ use unstable::raw;

type DropGlue<'a> = 'a |**TyDesc, *c_void|;

static RC_IMMORTAL : uint = 0x77777777;

/*
* Box annihilation
*
Expand All @@ -25,24 +27,21 @@ type DropGlue<'a> = 'a |**TyDesc, *c_void|;

struct AnnihilateStats {
n_total_boxes: uint,
n_unique_boxes: uint,
n_bytes_freed: uint
}

unsafe fn each_live_alloc(read_next_before: bool,
f: |alloc: *mut raw::Box<()>, uniq: bool| -> bool)
f: |alloc: *mut raw::Box<()>| -> bool)
-> bool {
//! Walks the internal list of allocations

use managed;
use rt::local_heap;

let mut alloc = local_heap::live_allocs();
while alloc != ptr::mut_null() {
let next_before = (*alloc).next;
let uniq = (*alloc).ref_count == managed::RC_MANAGED_UNIQUE;

if !f(alloc, uniq) {
if !f(alloc) {
return false;
}

Expand Down Expand Up @@ -70,25 +69,19 @@ fn debug_mem() -> bool {
pub unsafe fn annihilate() {
use rt::local_heap::local_free;
use mem;
use managed;

let mut stats = AnnihilateStats {
n_total_boxes: 0,
n_unique_boxes: 0,
n_bytes_freed: 0
};

// Pass 1: Make all boxes immortal.
//
// In this pass, nothing gets freed, so it does not matter whether
// we read the next field before or after the callback.
each_live_alloc(true, |alloc, uniq| {
each_live_alloc(true, |alloc| {
stats.n_total_boxes += 1;
if uniq {
stats.n_unique_boxes += 1;
} else {
(*alloc).ref_count = managed::RC_IMMORTAL;
}
(*alloc).ref_count = RC_IMMORTAL;
true
});

Expand All @@ -97,12 +90,10 @@ pub unsafe fn annihilate() {
// In this pass, unique-managed boxes may get freed, but not
// managed boxes, so we must read the `next` field *after* the
// callback, as the original value may have been freed.
each_live_alloc(false, |alloc, uniq| {
if !uniq {
let tydesc = (*alloc).type_desc;
let data = &(*alloc).data as *();
((*tydesc).drop_glue)(data as *i8);
}
each_live_alloc(false, |alloc| {
let tydesc = (*alloc).type_desc;
let data = &(*alloc).data as *();
((*tydesc).drop_glue)(data as *i8);
true
});

Expand All @@ -112,22 +103,19 @@ pub unsafe fn annihilate() {
// unique-managed boxes, though I think that none of those are
// left), so we must read the `next` field before, since it will
// not be valid after.
each_live_alloc(true, |alloc, uniq| {
if !uniq {
stats.n_bytes_freed +=
(*((*alloc).type_desc)).size
+ mem::size_of::<raw::Box<()>>();
local_free(alloc as *i8);
}
each_live_alloc(true, |alloc| {
stats.n_bytes_freed +=
(*((*alloc).type_desc)).size
+ mem::size_of::<raw::Box<()>>();
local_free(alloc as *i8);
true
});

if debug_mem() {
// We do logging here w/o allocation.
debug!("annihilator stats:\n \
total boxes: {}\n \
unique boxes: {}\n \
bytes freed: {}",
stats.n_total_boxes, stats.n_unique_boxes, stats.n_bytes_freed);
stats.n_total_boxes, stats.n_bytes_freed);
}
}
3 changes: 0 additions & 3 deletions src/libstd/managed.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,6 @@ use ptr::to_unsafe_ptr;

#[cfg(not(test))] use cmp::*;

pub static RC_MANAGED_UNIQUE : uint = (-2) as uint;
pub static RC_IMMORTAL : uint = 0x77777777;

/// Returns the refcount of a shared box (as just before calling this)
#[inline]
pub fn refcount<T>(t: @T) -> uint {
Expand Down
16 changes: 0 additions & 16 deletions src/libstd/reflect.rs
Original file line number Diff line number Diff line change
Expand Up @@ -227,14 +227,6 @@ impl<V:TyVisitor + MovePtr> TyVisitor for MovePtrAdaptor<V> {
true
}

#[cfg(stage0)]
fn visit_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
self.align_to::<~u8>();
if ! self.inner.visit_uniq_managed(mtbl, inner) { return false; }
self.bump_past::<~u8>();
true
}

fn visit_ptr(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
self.align_to::<*u8>();
if ! self.inner.visit_ptr(mtbl, inner) { return false; }
Expand Down Expand Up @@ -276,14 +268,6 @@ impl<V:TyVisitor + MovePtr> TyVisitor for MovePtrAdaptor<V> {
true
}

#[cfg(stage0)]
fn visit_evec_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
self.align_to::<~[@u8]>();
if ! self.inner.visit_evec_uniq_managed(mtbl, inner) { return false; }
self.bump_past::<~[@u8]>();
true
}

fn visit_evec_slice(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
self.align_to::<&'static [u8]>();
if ! self.inner.visit_evec_slice(mtbl, inner) { return false; }
Expand Down
17 changes: 0 additions & 17 deletions src/libstd/repr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -310,15 +310,6 @@ impl<'a> TyVisitor for ReprVisitor<'a> {
})
}

#[cfg(stage0)]
fn visit_uniq_managed(&mut self, _mtbl: uint, inner: *TyDesc) -> bool {
self.writer.write(['~' as u8]);
self.get::<&raw::Box<()>>(|this, b| {
let p = ptr::to_unsafe_ptr(&b.data) as *c_void;
this.visit_ptr_inner(p, inner);
})
}

fn visit_ptr(&mut self, mtbl: uint, _inner: *TyDesc) -> bool {
self.get::<*c_void>(|this, p| {
write!(this.writer, "({} as *", *p);
Expand Down Expand Up @@ -359,14 +350,6 @@ impl<'a> TyVisitor for ReprVisitor<'a> {
})
}

#[cfg(stage0)]
fn visit_evec_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
self.get::<&raw::Box<raw::Vec<()>>>(|this, b| {
this.writer.write(['~' as u8]);
this.write_unboxed_vec_repr(mtbl, &b.data, inner);
})
}

fn visit_evec_slice(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
self.get::<raw::Slice<()>>(|this, s| {
this.writer.write(['&' as u8]);
Expand Down
39 changes: 1 addition & 38 deletions src/libstd/unstable/intrinsics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,41 +47,8 @@ pub use realstd::unstable::intrinsics::{TyDesc, Opaque, TyVisitor, TypeId};

pub type GlueFn = extern "Rust" fn(*i8);

// NOTE remove after next snapshot
#[lang="ty_desc"]
#[cfg(not(test), stage0)]
pub struct TyDesc {
// sizeof(T)
size: uint,

// alignof(T)
align: uint,

// Called on a copy of a value of type `T` *after* memcpy
take_glue: GlueFn,

// Called when a value of type `T` is no longer needed
drop_glue: GlueFn,

// Called by drop glue when a value of type `T` can be freed
free_glue: GlueFn,

// Called by reflection visitor to visit a value of type `T`
visit_glue: GlueFn,

// If T represents a box pointer (`@U` or `~U`), then
// `borrow_offset` is the amount that the pointer must be adjusted
// to find the payload. This is always derivable from the type
// `U`, but in the case of `@Trait` or `~Trait` objects, the type
// `U` is unknown.
borrow_offset: uint,

// Name corresponding to the type
name: &'static str
}

#[lang="ty_desc"]
#[cfg(not(test), not(stage0))]
#[cfg(not(test))]
pub struct TyDesc {
// sizeof(T)
size: uint,
Expand Down Expand Up @@ -139,17 +106,13 @@ pub trait TyVisitor {

fn visit_box(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
fn visit_uniq(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
#[cfg(stage0)]
fn visit_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
fn visit_ptr(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
fn visit_rptr(&mut self, mtbl: uint, inner: *TyDesc) -> bool;

fn visit_vec(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
fn visit_unboxed_vec(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
fn visit_evec_box(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
fn visit_evec_uniq(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
#[cfg(stage0)]
fn visit_evec_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
fn visit_evec_slice(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
fn visit_evec_fixed(&mut self, n: uint, sz: uint, align: uint,
mtbl: uint, inner: *TyDesc) -> bool;
Expand Down
Loading