finalization

This commit is contained in:
playX18 2025-02-13 22:32:44 +07:00
parent e20870c604
commit ea9c1887c7
10 changed files with 321 additions and 51 deletions

View file

@ -26,7 +26,7 @@ sysinfo = "0.33.1"
default = ["uncooperative"]
uncooperative = ["cooperative", "mmtk/immix_non_moving"]
uncooperative = ["cooperative", "mmtk/immix_non_moving", "mmtk/immix_zero_on_release"]
# VMKit is built for use in cooperative runtime. Such runtime
# would be able to use write barriers and safepoints. Such environment

View file

@ -1,4 +1,5 @@
use mmtk::util::Address;
use mmtk::vm::slot::UnimplementedMemorySlice;
use mmtk::{util::options::PlanSelector, vm::slot::SimpleSlot, AllocationSemantics, MMTKBuilder};
use std::cell::RefCell;
use std::mem::offset_of;
@ -65,6 +66,7 @@ impl VirtualMachine for BenchVM {
type Metadata = &'static GCMetadata<Self>;
type Slot = SimpleSlot;
type ThreadContext = ThreadBenchContext;
type MemorySlice = UnimplementedMemorySlice;
fn get() -> &'static Self {
VM.get().unwrap()
}

View file

@ -34,7 +34,10 @@ use crate::{
use easy_bitfield::*;
use mmtk::{
util::Address,
vm::{slot::SimpleSlot, ObjectTracer},
vm::{
slot::{SimpleSlot, UnimplementedMemorySlice},
ObjectTracer,
},
AllocationSemantics, MMTKBuilder,
};
use parking_lot::{Mutex, Once};
@ -150,6 +153,7 @@ impl VirtualMachine for BDWGC {
type BlockAdapterList = (GCBlockAdapter, ());
type Metadata = BDWGCMetadata;
type Slot = SimpleSlot;
type MemorySlice = UnimplementedMemorySlice<Self::Slot>;
fn get() -> &'static Self {
BDWGC_VM.get().expect("GC is not initialized")
@ -501,7 +505,16 @@ static INIT: Once = Once::new();
#[no_mangle]
pub static mut GC_VERBOSE: i32 = 0;
static BUILDER: LazyLock<Mutex<MMTKBuilder>> = LazyLock::new(|| Mutex::new(MMTKBuilder::new()));
static BUILDER: LazyLock<Mutex<MMTKBuilder>> = LazyLock::new(|| {
Mutex::new({
let mut builder = MMTKBuilder::new();
builder
.options
.plan
.set(mmtk::util::options::PlanSelector::Immix);
builder
})
});
#[no_mangle]
pub extern "C-unwind" fn GC_get_parallel() -> libc::c_int {
@ -1135,61 +1148,97 @@ pub extern "C-unwind" fn GC_general_register_disappearing_link(
#[no_mangle]
pub extern "C-unwind" fn GC_register_finalizer(
obj: *mut libc::c_void,
finalizer: extern "C" fn(*mut libc::c_void),
finalizer: extern "C-unwind" fn(*mut libc::c_void, *mut libc::c_void),
cd: *mut libc::c_void,
ofn: *mut extern "C" fn(*mut libc::c_void),
ocd: *mut *mut libc::c_void,
) {
let _ = obj; // TBD
let _ = finalizer; // TBD
let _ = cd; // TBD
let _ = ofn; // TBD
let _ = ocd; // TBD
let object = GC_base(obj);
if object.is_null() {
return;
}
let _ = ofn;
let _ = ocd;
let cd = Address::from_mut_ptr(cd);
let object = VMKitObject::from_address(Address::from_mut_ptr(object));
MemoryManager::<BDWGC>::register_finalizer(
object,
Box::new(move |object| {
finalizer(object.as_address().to_mut_ptr(), cd.to_mut_ptr());
}),
);
}
#[no_mangle]
pub extern "C-unwind" fn GC_register_finalizer_ignore_self(
obj: *mut libc::c_void,
finalizer: extern "C" fn(*mut libc::c_void),
finalizer: extern "C" fn(*mut libc::c_void, *mut libc::c_void),
cd: *mut libc::c_void,
ofn: *mut extern "C" fn(*mut libc::c_void),
ocd: *mut *mut libc::c_void,
) {
let _ = obj; // TBD
let _ = finalizer; // TBD
let _ = cd; // TBD
let _ = ofn; // TBD
let _ = ocd; // TBD
let object = GC_base(obj);
if object.is_null() {
return;
}
let _ = ofn;
let _ = ocd;
let cd = Address::from_mut_ptr(cd);
let object = VMKitObject::from_address(Address::from_mut_ptr(object));
MemoryManager::<BDWGC>::register_finalizer(
object,
Box::new(move |object| {
finalizer(object.as_address().to_mut_ptr(), cd.to_mut_ptr());
}),
);
}
#[no_mangle]
pub extern "C-unwind" fn GC_register_finalizer_no_order(
obj: *mut libc::c_void,
finalizer: extern "C" fn(*mut libc::c_void),
finalizer: extern "C" fn(*mut libc::c_void, *mut libc::c_void),
cd: *mut libc::c_void,
ofn: *mut extern "C" fn(*mut libc::c_void),
ocd: *mut *mut libc::c_void,
) {
let _ = obj; // TBD
let _ = finalizer; // TBD
let _ = cd; // TBD
let _ = ofn; // TBD
let _ = ocd; // TBD
let object = GC_base(obj);
if object.is_null() {
return;
}
let _ = ofn;
let _ = ocd;
let cd = Address::from_mut_ptr(cd);
let object = VMKitObject::from_address(Address::from_mut_ptr(object));
MemoryManager::<BDWGC>::register_finalizer(
object,
Box::new(move |object| {
finalizer(object.as_address().to_mut_ptr(), cd.to_mut_ptr());
}),
);
}
#[no_mangle]
pub extern "C-unwind" fn GC_register_finalizer_unreachable(
obj: *mut libc::c_void,
finalizer: extern "C" fn(*mut libc::c_void),
finalizer: extern "C" fn(*mut libc::c_void, *mut libc::c_void),
cd: *mut libc::c_void,
ofn: *mut extern "C" fn(*mut libc::c_void),
ocd: *mut *mut libc::c_void,
) {
let _ = obj; // TBD
let _ = finalizer; // TBD
let _ = cd; // TBD
let _ = ofn; // TBD
let _ = ocd; // TBD
let object = GC_base(obj);
if object.is_null() {
return;
}
let _ = ofn;
let _ = ocd;
let cd = Address::from_mut_ptr(cd);
let object = VMKitObject::from_address(Address::from_mut_ptr(object));
MemoryManager::<BDWGC>::register_finalizer(
object,
Box::new(move |object| {
finalizer(object.as_address().to_mut_ptr(), cd.to_mut_ptr());
}),
);
}
#[no_mangle]
@ -1441,3 +1490,8 @@ pub unsafe extern "C-unwind" fn GC_malloc_explicitly_typed_ignore_off_page(
let _ = descr;
GC_malloc_ignore_off_page(size)
}
#[no_mangle]
pub extern "C-unwind" fn GC_invoke_finalizers() -> usize {
MemoryManager::<BDWGC>::run_finalizers()
}

View file

@ -4,8 +4,9 @@ use std::{
};
use mm::{aslr::aslr_vm_layout, traits::SlotExtra, MemoryManager};
use mmtk::{MMTKBuilder, MMTK};
use threading::{initialize_threading, ThreadManager};
use mmtk::{vm::slot::MemorySlice, MMTKBuilder, MMTK};
use object_model::object::VMKitObject;
use threading::{initialize_threading, Thread, ThreadManager};
pub mod machine_context;
pub mod mm;
@ -24,7 +25,7 @@ pub trait VirtualMachine: Sized + 'static + Send + Sync {
type BlockAdapterList: threading::BlockAdapterList<Self>;
type Metadata: object_model::metadata::Metadata<Self>;
type Slot: SlotExtra;
type MemorySlice: MemorySlice<SlotType = Self::Slot>;
const ALIGNMENT_VALUE: u32 = 0xdead_beef;
const MAX_ALIGNMENT: usize = 32;
const MIN_ALIGNMENT: usize = 8;
@ -113,6 +114,38 @@ pub trait VirtualMachine: Sized + 'static + Send + Sync {
eprintln!("Out of memory: {:?}", err_kind);
std::process::exit(1);
}
/// Weak and soft references always clear the referent before enqueueing.
fn clear_referent(new_reference: VMKitObject) {
let _ = new_reference;
unimplemented!()
}
/// Get the referent from a weak reference object.
fn get_referent(object: VMKitObject) -> VMKitObject {
let _ = object;
unimplemented!()
}
/// Set the referent in a weak reference object.
fn set_referent(reff: VMKitObject, referent: VMKitObject) {
let _ = reff;
let _ = referent;
unimplemented!()
}
/// For weak reference types, if the referent is cleared during GC, the reference
/// will be added to a queue, and MMTk will call this method to inform
/// the VM about the changes for those references. This method is used
/// to implement Java's ReferenceQueue.
/// Note that this method is called for each type of weak references during GC, and
/// the references slice will be cleared after this call is returned. That means
/// MMTk will no longer keep these references alive once this method is returned.
fn enqueue_references(references: impl Iterator<Item = VMKitObject>, tls: &Thread<Self>) {
let _ = references;
let _ = tls;
unimplemented!()
}
}
pub struct VMKit<VM: VirtualMachine> {

View file

@ -21,7 +21,8 @@ use mmtk::{
},
AllocationSemantics, BarrierSelector, MutatorContext,
};
use std::marker::PhantomData;
use ref_glue::Finalizer;
use std::{marker::PhantomData, panic::AssertUnwindSafe};
#[derive(Clone, Copy)]
pub struct MemoryManager<VM: VirtualMachine>(PhantomData<VM>);
@ -487,6 +488,40 @@ impl<VM: VirtualMachine> MemoryManager<VM> {
.load(atomic::Ordering::SeqCst)
== 0
}
pub fn register_finalizer(object: VMKitObject, callback: Box<dyn FnOnce(VMKitObject) + Send>) {
let finalizer = Finalizer {
object,
callback: Some(callback),
};
let vm = VM::get();
mmtk::memory_manager::add_finalizer(&vm.vmkit().mmtk, finalizer);
}
pub fn run_finalizers() -> usize {
let vm = VM::get();
let mut count = 0;
while let Some(mut finalizer) = mmtk::memory_manager::get_finalized_object(&vm.vmkit().mmtk) {
let _ = std::panic::catch_unwind(AssertUnwindSafe(|| finalizer.run()));
count += 1;
}
count
}
pub fn get_finalizers_for(object: VMKitObject) -> Vec<Finalizer> {
if object.is_null() {
return vec![];
}
let vm = VM::get();
mmtk::memory_manager::get_finalizers_for(&vm.vmkit().mmtk, unsafe {
object.as_object_unchecked()
})
}
pub fn get_finalized_object() -> Option<Finalizer> {
let vm = VM::get();
mmtk::memory_manager::get_finalized_object(&vm.vmkit().mmtk)
}
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]

View file

@ -37,6 +37,7 @@ impl ConservativeRoots {
return;
};
println!("found {}", start);
self.roots.insert(start);
}

View file

@ -4,46 +4,72 @@ use std::marker::PhantomData;
use mmtk::vm::{Finalizable, ReferenceGlue};
use crate::{object_model::object::VMKitObject, VirtualMachine};
use crate::{object_model::object::VMKitObject, threading::Thread, VirtualMachine};
use super::MemoryManager;
pub struct VMKitReferenceGlue<VM: VirtualMachine>(PhantomData<VM>);
impl<VM: VirtualMachine> ReferenceGlue<MemoryManager<VM>> for VMKitReferenceGlue<VM> {
type FinalizableType = VMKitObject;
type FinalizableType = Finalizer;
fn clear_referent(new_reference: mmtk::util::ObjectReference) {
VM::clear_referent(new_reference.into());
}
fn enqueue_references(references: &[mmtk::util::ObjectReference], tls: mmtk::util::VMWorkerThread) {
fn enqueue_references(
references: &[mmtk::util::ObjectReference],
tls: mmtk::util::VMWorkerThread,
) {
VM::enqueue_references(
references.iter().copied().map(VMKitObject::from),
Thread::<VM>::from_vm_worker_thread(tls),
);
}
fn get_referent(object: mmtk::util::ObjectReference) -> Option<mmtk::util::ObjectReference> {
todo!()
VM::get_referent(object.into()).try_into().ok()
}
fn set_referent(reff: mmtk::util::ObjectReference, referent: mmtk::util::ObjectReference) {
todo!()
VM::set_referent(reff.into(), referent.into());
}
}
impl Finalizable for VMKitObject {
impl Finalizable for Finalizer {
fn get_reference(&self) -> mmtk::util::ObjectReference {
todo!()
unsafe { self.object.as_object_unchecked() }
}
fn keep_alive<E: mmtk::scheduler::ProcessEdgesWork>(&mut self, trace: &mut E) {
unsafe {
let mmtk_obj = self.object.as_object_unchecked();
let new = trace.trace_object(mmtk_obj);
self.object = VMKitObject::from(new);
}
}
fn set_reference(&mut self, object: mmtk::util::ObjectReference) {
self.object = VMKitObject::from(object);
}
}
}
pub struct Finalizer {
pub object: VMKitObject,
pub callback: Option<Box<dyn FnOnce(VMKitObject) + Send>>,
}
impl Finalizer {
pub fn run(&mut self) {
match self.callback.take() {
Some(callback) => callback(self.object),
None => {}
}
}
}
impl std::fmt::Debug for Finalizer {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "#<finalizer {:x}>", self.object.as_address(),)
}
}

View file

@ -100,6 +100,7 @@ pub trait SlotExtra: Slot {
/// as internally we use `VMKitObject` to represent all objects.
fn from_vmkit_object(object: &VMKitObject) -> Self;
fn from_address(address: Address) -> Self;
fn as_address(&self) -> Address;
/// Construct a slot from an `InternalPointer`. VMs are not required to implement
/// this as InternalPointer can also be traced.
@ -135,6 +136,10 @@ impl SlotExtra for SimpleSlot {
let _ = pointer;
unimplemented!("SimpleSlot does not support internal pointers")
}
fn as_address(&self) -> Address {
SimpleSlot::as_address(self)
}
}
impl SlotExtra for Address {
@ -150,6 +155,10 @@ impl SlotExtra for Address {
let _ = pointer;
unimplemented!("Address does not support internal pointers")
}
fn as_address(&self) -> Address {
*self
}
}
/// Trait to check if type can be enqueued as a slot of an object.

View file

@ -1,11 +1,15 @@
use std::fmt;
use crate::mm::traits::SlotExtra;
use crate::threading::Thread;
use crate::{mm::MemoryManager, VirtualMachine};
use atomic::Atomic;
use core::ops::Range;
use std::hash::Hash;
use mmtk::util::{
constants::LOG_BYTES_IN_ADDRESS, conversions::raw_align_up, Address, ObjectReference,
};
use mmtk::vm::slot::{MemorySlice, SimpleSlot, Slot};
use std::fmt;
use std::marker::PhantomData;
use super::{
compression::CompressedOps,
@ -152,7 +156,7 @@ impl VMKitObject {
align_of::<usize>(),
) + overhead
};
res
}
@ -606,3 +610,108 @@ impl VMKitNarrow {
self.to_object().object_start::<VM>()
}
}
pub struct SimpleMemorySlice<SL: Slot = SimpleSlot> {
range: Range<SL>,
}
impl<SL: SlotExtra> SimpleMemorySlice<SL> {
pub fn from(value: Range<SL>) -> Self {
Self { range: value }
}
}
impl<SL: SlotExtra> fmt::Debug for SimpleMemorySlice<SL> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "SimpleMemorySlice({:?})", self.range)
}
}
impl<SL: SlotExtra> Hash for SimpleMemorySlice<SL> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.range.start.as_address().hash(state);
self.range.end.as_address().hash(state);
}
}
impl<SL: SlotExtra> PartialEq for SimpleMemorySlice<SL> {
fn eq(&self, other: &Self) -> bool {
self.range == other.range
}
}
impl<SL: SlotExtra> Eq for SimpleMemorySlice<SL> {}
impl<SL: SlotExtra> Clone for SimpleMemorySlice<SL> {
fn clone(&self) -> Self {
Self { range: self.range.clone() }
}
}
pub struct SimpleMemorySliceRangeIterator<SL: SlotExtra = SimpleSlot> {
cursor: Address,
end: Address,
marker: PhantomData<SL>,
}
impl<SL: SlotExtra> Iterator for SimpleMemorySliceRangeIterator<SL> {
type Item = SL;
fn next(&mut self) -> Option<Self::Item> {
if self.cursor < self.end {
let res = self.cursor;
self.cursor = self.cursor + size_of::<SL>();
Some(SL::from_address(res))
} else {
None
}
}
}
impl<SL: SlotExtra> From<SimpleMemorySlice<SL>> for SimpleMemorySliceRangeIterator<SL> {
fn from(value: SimpleMemorySlice<SL>) -> Self {
let start = value.range.start.as_address();
let end = value.range.end.as_address();
Self {
cursor: start,
end,
marker: PhantomData,
}
}
}
impl<SL: SlotExtra> MemorySlice for SimpleMemorySlice<SL> {
type SlotType = SL;
type SlotIterator = SimpleMemorySliceRangeIterator<SL>;
fn iter_slots(&self) -> Self::SlotIterator {
SimpleMemorySliceRangeIterator {
cursor: self.range.start.as_address(),
end: self.range.end.as_address(),
marker: PhantomData,
}
}
fn object(&self) -> Option<ObjectReference> {
None
}
fn start(&self) -> Address {
self.range.start.as_address()
}
fn bytes(&self) -> usize {
self.range.end.as_address() - self.range.start.as_address()
}
fn copy(src: &Self, tgt: &Self) {
unsafe {
let bytes = tgt.bytes();
let src = src.start().to_ptr::<u8>();
let dst = tgt.start().to_mut_ptr::<u8>();
std::ptr::copy(src, dst, bytes);
}
}
}

View file

@ -1864,6 +1864,7 @@ extern "C-unwind" fn signal_handler_suspend_resume<VM: VirtualMachine>(
return;
}
thread
.stack_pointer
.store(approximate_stack_pointer.as_usize(), Ordering::Release);