use std::marker::PhantomData; use crate::{mm::MemoryManager, VirtualMachine}; use easy_bitfield::BitFieldTrait; use header::{HashState, HashStateField, HASHCODE_OFFSET, OBJECT_HEADER_OFFSET, OBJECT_REF_OFFSET}; use mmtk::{ util::{alloc::fill_alignment_gap, constants::LOG_BYTES_IN_ADDRESS, ObjectReference}, vm::*, }; use object::{MoveTarget, VMKitObject}; pub mod header; pub mod metadata; pub mod object; pub mod compression; pub struct VMKitObjectModel(PhantomData); pub const LOGGING_SIDE_METADATA_SPEC: VMGlobalLogBitSpec = VMGlobalLogBitSpec::side_first(); pub const FORWARDING_POINTER_METADATA_SPEC: VMLocalForwardingPointerSpec = VMLocalForwardingPointerSpec::in_header(0); pub const FORWARDING_BITS_METADATA_SPEC: VMLocalForwardingBitsSpec = VMLocalForwardingBitsSpec::in_header(HashStateField::NEXT_BIT as _); pub const MARKING_METADATA_SPEC: VMLocalMarkBitSpec = VMLocalMarkBitSpec::side_first(); pub const LOS_METADATA_SPEC: VMLocalLOSMarkNurserySpec = VMLocalLOSMarkNurserySpec::in_header(HashStateField::NEXT_BIT as _); impl ObjectModel> for VMKitObjectModel { const GLOBAL_LOG_BIT_SPEC: mmtk::vm::VMGlobalLogBitSpec = LOGGING_SIDE_METADATA_SPEC; const LOCAL_FORWARDING_POINTER_SPEC: mmtk::vm::VMLocalForwardingPointerSpec = FORWARDING_POINTER_METADATA_SPEC; const LOCAL_FORWARDING_BITS_SPEC: mmtk::vm::VMLocalForwardingBitsSpec = FORWARDING_BITS_METADATA_SPEC; const LOCAL_MARK_BIT_SPEC: mmtk::vm::VMLocalMarkBitSpec = MARKING_METADATA_SPEC; const LOCAL_LOS_MARK_NURSERY_SPEC: mmtk::vm::VMLocalLOSMarkNurserySpec = LOS_METADATA_SPEC; const OBJECT_REF_OFFSET_LOWER_BOUND: isize = OBJECT_REF_OFFSET; const UNIFIED_OBJECT_REFERENCE_ADDRESS: bool = false; const VM_WORST_CASE_COPY_EXPANSION: f64 = 1.3; #[cfg(feature = "cooperative")] const NEED_VO_BITS_DURING_TRACING: bool = VM::CONSERVATIVE_TRACING; fn copy( from: mmtk::util::ObjectReference, semantics: mmtk::util::copy::CopySemantics, copy_context: &mut mmtk::util::copy::GCWorkerCopyContext>, ) -> mmtk::util::ObjectReference { let vmkit_from = VMKitObject::from(from); let bytes = vmkit_from.bytes_required_when_copied::(); let align = vmkit_from.alignment::(); let offset = vmkit_from.get_offset_for_alignment::(); let addr = copy_context.alloc_copy(from, bytes, align, offset, semantics); let vmkit_to_obj = Self::move_object(vmkit_from, MoveTarget::ToAddress(addr), bytes); let to_obj = ObjectReference::from_raw_address(vmkit_to_obj.as_address()).unwrap(); copy_context.post_copy(to_obj, bytes, semantics); to_obj } fn copy_to( from: mmtk::util::ObjectReference, to: mmtk::util::ObjectReference, region: mmtk::util::Address, ) -> mmtk::util::Address { let vmkit_from = VMKitObject::from(from); let copy = from != to; let bytes = if copy { let vmkit_to = VMKitObject::from(to); let bytes = vmkit_to.bytes_required_when_copied::(); Self::move_object(vmkit_from, MoveTarget::ToObject(vmkit_to), bytes); bytes } else { vmkit_from.bytes_used::() }; let start = Self::ref_to_object_start(to); fill_alignment_gap::>(region, start); start + bytes } fn get_reference_when_copied_to( from: mmtk::util::ObjectReference, to: mmtk::util::Address, ) -> mmtk::util::ObjectReference { let vmkit_from = VMKitObject::from(from); let res_addr = to + OBJECT_REF_OFFSET + vmkit_from.hashcode_overhead::(); debug_assert!(!res_addr.is_zero()); // SAFETY: we just checked that the address is not zero unsafe { ObjectReference::from_raw_address_unchecked(res_addr) } } fn get_type_descriptor(_reference: mmtk::util::ObjectReference) -> &'static [i8] { unreachable!() } fn get_align_offset_when_copied(object: mmtk::util::ObjectReference) -> usize { VMKitObject::from_objref_nullable(Some(object)).get_offset_for_alignment::() } fn get_align_when_copied(object: mmtk::util::ObjectReference) -> usize { VMKitObject::from(object).alignment::() } fn get_current_size(object: mmtk::util::ObjectReference) -> usize { VMKitObject::from_objref_nullable(Some(object)).get_current_size::() } fn get_size_when_copied(object: mmtk::util::ObjectReference) -> usize { VMKitObject::from_objref_nullable(Some(object)).get_size_when_copied::() } fn ref_to_object_start(reference: mmtk::util::ObjectReference) -> mmtk::util::Address { VMKitObject::from_objref_nullable(Some(reference)).object_start::() } fn ref_to_header(reference: mmtk::util::ObjectReference) -> mmtk::util::Address { reference.to_raw_address().offset(OBJECT_HEADER_OFFSET) } fn dump_object(object: mmtk::util::ObjectReference) { let _ = object; } } impl VMKitObjectModel { fn move_object(from_obj: VMKitObject, mut to: MoveTarget, num_bytes: usize) -> VMKitObject { log::trace!("move_object: from_obj: {}, to: {}, bytes={}", from_obj.as_address(), to, num_bytes); let mut copy_bytes = num_bytes; let mut obj_ref_offset = OBJECT_REF_OFFSET; let hash_state = from_obj.header::().hash_state(); // Adjust copy bytes and object reference offset based on hash state match hash_state { HashState::Hashed => { copy_bytes -= size_of::(); // Exclude hash code from copy if let MoveTarget::ToAddress(ref mut addr) = to { *addr += size_of::(); // Adjust address for hash code } } HashState::HashedAndMoved => { obj_ref_offset += size_of::() as isize; // Adjust for larger header } _ => {} } // Determine target address and object based on MoveTarget let (to_address, to_obj) = match to { MoveTarget::ToAddress(addr) => { let obj = VMKitObject::from_address(addr + obj_ref_offset); (addr, obj) } MoveTarget::ToObject(object) => { let addr = object.as_address() + (-obj_ref_offset); (addr, object) } }; let from_address = from_obj.as_address() + (-obj_ref_offset); // Perform the memory copy unsafe { std::ptr::copy( from_address.to_ptr::(), to_address.to_mut_ptr::(), copy_bytes, ); } // Update hash state if necessary if hash_state == HashState::Hashed { unsafe { let hash_code = from_obj.as_address().as_usize() >> LOG_BYTES_IN_ADDRESS; to_obj .as_address() .offset(HASHCODE_OFFSET) .store(hash_code as u64); to_obj .header::() .set_hash_state(HashState::HashedAndMoved); } } else { to_obj.header::().set_hash_state(HashState::Hashed); } to_obj } }