From 57ae451644ffb2d97cda0caaee989fa7abbfb83c Mon Sep 17 00:00:00 2001 From: Nick Fitzgerald Date: Fri, 13 Sep 2024 11:38:35 -0700 Subject: [PATCH] Implement the `struct.*` GC instructions This commit implements the `struct.*` instructions for the GC proposal. These instructions allow allocating new structs and getting and setting their fields. The implemented instructions are: * `struct.new` * `struct.new_default` * `struct.get` * `struct.get_s` * `struct.get_u` * `struct.set` The `struct.new*` instructions are also allowed in constant expressions, but support for that is not yet implemented. Co-Authored-By: Trevor Elliott --- cranelift/wasm/src/code_translator.rs | 86 ++- cranelift/wasm/src/environ/mod.rs | 2 +- cranelift/wasm/src/environ/spec.rs | 59 ++ cranelift/wasm/src/lib.rs | 4 +- crates/cranelift/src/func_environ.rs | 102 +++- crates/cranelift/src/gc.rs | 106 +++- crates/cranelift/src/gc/disabled.rs | 91 ++- crates/cranelift/src/gc/enabled.rs | 554 +++++++++++++++++- crates/environ/src/builtin.rs | 11 + crates/environ/src/gc.rs | 45 +- crates/environ/src/gc/drc.rs | 17 +- crates/wasmtime/src/runtime/vm/gc.rs | 5 + crates/wasmtime/src/runtime/vm/gc/gc_ref.rs | 1 + crates/wasmtime/src/runtime/vm/instance.rs | 1 - crates/wasmtime/src/runtime/vm/libcalls.rs | 52 ++ crates/wast/src/core.rs | 16 + crates/wast/src/wast.rs | 2 + tests/disas/gc/struct-get.wat | 231 ++++++++ tests/disas/gc/struct-new-default.wat | 107 ++++ tests/disas/gc/struct-new.wat | 119 ++++ tests/disas/gc/struct-set.wat | 205 +++++++ .../gc/struct-instructions.wast | 131 +++++ 22 files changed, 1891 insertions(+), 56 deletions(-) create mode 100644 tests/disas/gc/struct-get.wat create mode 100644 tests/disas/gc/struct-new-default.wat create mode 100644 tests/disas/gc/struct-new.wat create mode 100644 tests/disas/gc/struct-set.wat create mode 100644 tests/misc_testsuite/gc/struct-instructions.wast diff --git a/cranelift/wasm/src/code_translator.rs b/cranelift/wasm/src/code_translator.rs index 333b5d6924bf..a4e81ef87b80 100644 --- a/cranelift/wasm/src/code_translator.rs +++ b/cranelift/wasm/src/code_translator.rs @@ -74,7 +74,7 @@ mod bounds_checks; use super::{hash_map, HashMap}; -use crate::environ::{FuncEnvironment, GlobalVariable}; +use crate::environ::{FuncEnvironment, GlobalVariable, StructFieldsVec}; use crate::state::{ControlStackFrame, ElseData, FuncTranslationState}; use crate::translation_utils::{ block_with_params, blocktype_params_results, f32_translation, f64_translation, @@ -2501,6 +2501,82 @@ pub fn translate_operator( state.push1(val); } + Operator::StructNew { struct_type_index } => { + let struct_type_index = TypeIndex::from_u32(*struct_type_index); + let arity = environ.struct_fields_len(struct_type_index)?; + let fields: StructFieldsVec = state.peekn(arity).iter().copied().collect(); + state.popn(arity); + let struct_ref = environ.translate_struct_new(builder, struct_type_index, fields)?; + state.push1(struct_ref); + } + + Operator::StructNewDefault { struct_type_index } => { + let struct_type_index = TypeIndex::from_u32(*struct_type_index); + let struct_ref = environ.translate_struct_new_default(builder, struct_type_index)?; + state.push1(struct_ref); + } + + Operator::StructSet { + struct_type_index, + field_index, + } => { + let struct_type_index = TypeIndex::from_u32(*struct_type_index); + let val = state.pop1(); + let struct_ref = state.pop1(); + environ.translate_struct_set( + builder, + struct_type_index, + *field_index, + struct_ref, + val, + )?; + } + + Operator::StructGetS { + struct_type_index, + field_index, + } => { + let struct_type_index = TypeIndex::from_u32(*struct_type_index); + let struct_ref = state.pop1(); + let val = environ.translate_struct_get_s( + builder, + struct_type_index, + *field_index, + struct_ref, + )?; + state.push1(val); + } + + Operator::StructGetU { + struct_type_index, + field_index, + } => { + let struct_type_index = TypeIndex::from_u32(*struct_type_index); + let struct_ref = state.pop1(); + let val = environ.translate_struct_get_u( + builder, + struct_type_index, + *field_index, + struct_ref, + )?; + state.push1(val); + } + + Operator::StructGet { + struct_type_index, + field_index, + } => { + let struct_type_index = TypeIndex::from_u32(*struct_type_index); + let struct_ref = state.pop1(); + let val = environ.translate_struct_get( + builder, + struct_type_index, + *field_index, + struct_ref, + )?; + state.push1(val); + } + Operator::TryTable { .. } | Operator::ThrowRef => { return Err(wasm_unsupported!( "exception operators are not yet implemented" @@ -2529,13 +2605,7 @@ pub fn translate_operator( | Operator::ArrayFill { .. } | Operator::ArrayCopy { .. } | Operator::ArrayInitData { .. } - | Operator::ArrayInitElem { .. } - | Operator::StructNew { .. } - | Operator::StructNewDefault { .. } - | Operator::StructGetS { .. } - | Operator::StructGetU { .. } - | Operator::StructSet { .. } - | Operator::StructGet { .. } => { + | Operator::ArrayInitElem { .. } => { return Err(wasm_unsupported!("GC operators are not yet implemented")); } diff --git a/cranelift/wasm/src/environ/mod.rs b/cranelift/wasm/src/environ/mod.rs index 665a17a9d5f9..b5e5aa04bb84 100644 --- a/cranelift/wasm/src/environ/mod.rs +++ b/cranelift/wasm/src/environ/mod.rs @@ -4,5 +4,5 @@ mod spec; pub use crate::environ::spec::{ - FuncEnvironment, GlobalVariable, ModuleEnvironment, TargetEnvironment, + FuncEnvironment, GlobalVariable, ModuleEnvironment, StructFieldsVec, TargetEnvironment, }; diff --git a/cranelift/wasm/src/environ/spec.rs b/cranelift/wasm/src/environ/spec.rs index 7a561953a6b0..2a46fb4d9210 100644 --- a/cranelift/wasm/src/environ/spec.rs +++ b/cranelift/wasm/src/environ/spec.rs @@ -18,6 +18,7 @@ use cranelift_codegen::ir::{self, InstBuilder, Type}; use cranelift_codegen::isa::{TargetFrontendConfig, TargetIsa}; use cranelift_entity::PrimaryMap; use cranelift_frontend::FunctionBuilder; +use smallvec::SmallVec; use std::boxed::Box; use std::string::ToString; use wasmparser::{FuncValidator, FunctionBody, Operator, ValidatorResources, WasmFeatures}; @@ -74,6 +75,9 @@ pub trait TargetEnvironment: TypeConvert { fn reference_type(&self, ty: WasmHeapType) -> (ir::Type, bool); } +/// A smallvec that holds the IR values for a struct's fields. +pub type StructFieldsVec = SmallVec<[ir::Value; 4]>; + /// Environment affecting the translation of a single WebAssembly function. /// /// A `FuncEnvironment` trait object is required to translate a WebAssembly function to Cranelift @@ -516,6 +520,61 @@ pub trait FuncEnvironment: TargetEnvironment { i31ref: ir::Value, ) -> WasmResult; + /// Get the number of fields in a struct type. + fn struct_fields_len(&mut self, struct_type_index: TypeIndex) -> WasmResult; + + /// Translate a `struct.new` instruction. + fn translate_struct_new( + &mut self, + builder: &mut FunctionBuilder, + struct_type_index: TypeIndex, + fields: StructFieldsVec, + ) -> WasmResult; + + /// Translate a `struct.new_default` instruction. + fn translate_struct_new_default( + &mut self, + builder: &mut FunctionBuilder, + struct_type_index: TypeIndex, + ) -> WasmResult; + + /// Translate a `struct.set` instruction. + fn translate_struct_set( + &mut self, + builder: &mut FunctionBuilder, + struct_type_index: TypeIndex, + field_index: u32, + struct_ref: ir::Value, + value: ir::Value, + ) -> WasmResult<()>; + + /// Translate a `struct.get` instruction. + fn translate_struct_get( + &mut self, + builder: &mut FunctionBuilder, + struct_type_index: TypeIndex, + field_index: u32, + struct_ref: ir::Value, + ) -> WasmResult; + + /// Translate a `struct.get_s` instruction. + fn translate_struct_get_s( + &mut self, + builder: &mut FunctionBuilder, + struct_type_index: TypeIndex, + field_index: u32, + struct_ref: ir::Value, + ) -> WasmResult; + + /// Translate a `struct.get_u` instruction. + fn translate_struct_get_u( + &mut self, + builder: &mut FunctionBuilder, + struct_type_index: TypeIndex, + field_index: u32, + struct_ref: ir::Value, + ) -> WasmResult; + /// Emit code at the beginning of every wasm loop. /// /// This can be used to insert explicit interrupt or safepoint checking at diff --git a/cranelift/wasm/src/lib.rs b/cranelift/wasm/src/lib.rs index bd3092fa3418..92965e79a4a1 100644 --- a/cranelift/wasm/src/lib.rs +++ b/cranelift/wasm/src/lib.rs @@ -39,7 +39,9 @@ mod state; mod table; mod translation_utils; -pub use crate::environ::{FuncEnvironment, GlobalVariable, ModuleEnvironment, TargetEnvironment}; +pub use crate::environ::{ + FuncEnvironment, GlobalVariable, ModuleEnvironment, StructFieldsVec, TargetEnvironment, +}; pub use crate::func_translator::FuncTranslator; pub use crate::heap::{Heap, HeapData, HeapStyle}; pub use crate::module_translator::translate_module; diff --git a/crates/cranelift/src/func_environ.rs b/crates/cranelift/src/func_environ.rs index 4cc8449b8ff8..52943c26a3d2 100644 --- a/crates/cranelift/src/func_environ.rs +++ b/crates/cranelift/src/func_environ.rs @@ -13,9 +13,9 @@ use cranelift_frontend::FunctionBuilder; use cranelift_frontend::Variable; use cranelift_wasm::{ EngineOrModuleTypeIndex, FuncEnvironment as _, FuncIndex, FuncTranslationState, GlobalIndex, - GlobalVariable, Heap, HeapData, HeapStyle, IndexType, Memory, MemoryIndex, Table, TableData, - TableIndex, TableSize, TargetEnvironment, TypeIndex, WasmFuncType, WasmHeapTopType, - WasmHeapType, WasmResult, + GlobalVariable, Heap, HeapData, HeapStyle, IndexType, Memory, MemoryIndex, StructFieldsVec, + Table, TableData, TableIndex, TableSize, TargetEnvironment, TypeIndex, WasmCompositeType, + WasmFuncType, WasmHeapTopType, WasmHeapType, WasmResult, WasmValType, }; use smallvec::SmallVec; use std::mem; @@ -84,11 +84,17 @@ wasmtime_environ::foreach_builtin_function!(declare_function_signatures); /// The `FuncEnvironment` implementation for use by the `ModuleEnvironment`. pub struct FuncEnvironment<'module_environment> { isa: &'module_environment (dyn TargetIsa + 'module_environment), - module: &'module_environment Module, - types: &'module_environment ModuleTypesBuilder, + pub(crate) module: &'module_environment Module, + pub(crate) types: &'module_environment ModuleTypesBuilder, wasm_func_ty: &'module_environment WasmFuncType, sig_ref_to_ty: SecondaryMap>, + #[cfg(feature = "gc")] + pub(crate) ty_to_struct_layout: std::collections::HashMap< + wasmtime_environ::ModuleInternedTypeIndex, + wasmtime_environ::GcStructLayout, + >, + #[cfg(feature = "wmemcheck")] translation: &'module_environment ModuleTranslation<'module_environment>, @@ -175,6 +181,9 @@ impl<'module_environment> FuncEnvironment<'module_environment> { wasm_func_ty, sig_ref_to_ty: SecondaryMap::default(), + #[cfg(feature = "gc")] + ty_to_struct_layout: std::collections::HashMap::new(), + heaps: PrimaryMap::default(), tables: SecondaryMap::default(), vmctx: None, @@ -1839,6 +1848,9 @@ impl<'module_environment> cranelift_wasm::FuncEnvironment for FuncEnvironment<'m builder: &mut FunctionBuilder, i31ref: ir::Value, ) -> WasmResult { + // TODO: If we knew we have a `(ref i31)` here, instead of maybe a `(ref + // null i31)`, we could omit the `trapz`. But plumbing that type info + // from `wasmparser` and through to here is a bit funky. self.trapz(builder, i31ref, ir::TrapCode::NullI31Ref); Ok(builder.ins().sshr_imm(i31ref, 1)) } @@ -1848,10 +1860,86 @@ impl<'module_environment> cranelift_wasm::FuncEnvironment for FuncEnvironment<'m builder: &mut FunctionBuilder, i31ref: ir::Value, ) -> WasmResult { + // TODO: If we knew we have a `(ref i31)` here, instead of maybe a `(ref + // null i31)`, we could omit the `trapz`. But plumbing that type info + // from `wasmparser` and through to here is a bit funky. self.trapz(builder, i31ref, ir::TrapCode::NullI31Ref); Ok(builder.ins().ushr_imm(i31ref, 1)) } + fn struct_fields_len(&mut self, struct_type_index: TypeIndex) -> WasmResult { + let ty = self.module.types[struct_type_index]; + match &self.types[ty].composite_type { + WasmCompositeType::Struct(s) => Ok(s.fields.len()), + _ => unreachable!(), + } + } + + fn translate_struct_new( + &mut self, + builder: &mut FunctionBuilder, + struct_type_index: TypeIndex, + fields: StructFieldsVec, + ) -> WasmResult { + gc::translate_struct_new(self, builder, struct_type_index, &fields) + } + + fn translate_struct_new_default( + &mut self, + builder: &mut FunctionBuilder, + struct_type_index: TypeIndex, + ) -> WasmResult { + gc::translate_struct_new_default(self, builder, struct_type_index) + } + + fn translate_struct_get( + &mut self, + builder: &mut FunctionBuilder, + struct_type_index: TypeIndex, + field_index: u32, + struct_ref: ir::Value, + ) -> WasmResult { + gc::translate_struct_get(self, builder, struct_type_index, field_index, struct_ref) + } + + fn translate_struct_get_s( + &mut self, + builder: &mut FunctionBuilder, + struct_type_index: TypeIndex, + field_index: u32, + struct_ref: ir::Value, + ) -> WasmResult { + gc::translate_struct_get_s(self, builder, struct_type_index, field_index, struct_ref) + } + + fn translate_struct_get_u( + &mut self, + builder: &mut FunctionBuilder, + struct_type_index: TypeIndex, + field_index: u32, + struct_ref: ir::Value, + ) -> WasmResult { + gc::translate_struct_get_u(self, builder, struct_type_index, field_index, struct_ref) + } + + fn translate_struct_set( + &mut self, + builder: &mut FunctionBuilder, + struct_type_index: TypeIndex, + field_index: u32, + struct_ref: ir::Value, + value: ir::Value, + ) -> WasmResult<()> { + gc::translate_struct_set( + self, + builder, + struct_type_index, + field_index, + struct_ref, + value, + ) + } + fn translate_ref_null( &mut self, mut pos: cranelift_codegen::cursor::FuncCursor, @@ -1898,7 +1986,7 @@ impl<'module_environment> cranelift_wasm::FuncEnvironment for FuncEnvironment<'m ty.is_vmgcref_type(), "We only use GlobalVariable::Custom for VMGcRef types" ); - let cranelift_wasm::WasmValType::Ref(ty) = ty else { + let WasmValType::Ref(ty) = ty else { unreachable!() }; @@ -1926,7 +2014,7 @@ impl<'module_environment> cranelift_wasm::FuncEnvironment for FuncEnvironment<'m ty.is_vmgcref_type(), "We only use GlobalVariable::Custom for VMGcRef types" ); - let cranelift_wasm::WasmValType::Ref(ty) = ty else { + let WasmValType::Ref(ty) = ty else { unreachable!() }; diff --git a/crates/cranelift/src/gc.rs b/crates/cranelift/src/gc.rs index 96aaedff77e4..29006ac836e2 100644 --- a/crates/cranelift/src/gc.rs +++ b/crates/cranelift/src/gc.rs @@ -8,7 +8,7 @@ use crate::func_environ::FuncEnvironment; use cranelift_codegen::ir; use cranelift_frontend::FunctionBuilder; -use cranelift_wasm::{WasmHeapType, WasmRefType, WasmResult}; +use cranelift_wasm::{TypeIndex, WasmHeapType, WasmRefType, WasmResult}; #[cfg(feature = "gc")] mod enabled; @@ -48,12 +48,114 @@ pub fn gc_ref_table_fill_builtin( imp::gc_ref_table_fill_builtin(ty, func_env, func) } +/// Translate a `struct.new` instruction. +pub fn translate_struct_new( + func_env: &mut FuncEnvironment<'_>, + builder: &mut FunctionBuilder<'_>, + struct_type_index: TypeIndex, + fields: &[ir::Value], +) -> WasmResult { + imp::translate_struct_new(func_env, builder, struct_type_index, fields) +} + +/// Translate a `struct.new_default` instruction. +pub fn translate_struct_new_default( + func_env: &mut FuncEnvironment<'_>, + builder: &mut FunctionBuilder<'_>, + struct_type_index: TypeIndex, +) -> WasmResult { + imp::translate_struct_new_default(func_env, builder, struct_type_index) +} + +/// Translate a `struct.get` instruction. +pub fn translate_struct_get( + func_env: &mut FuncEnvironment<'_>, + builder: &mut FunctionBuilder<'_>, + struct_type_index: TypeIndex, + field_index: u32, + struct_ref: ir::Value, +) -> WasmResult { + imp::translate_struct_get( + func_env, + builder, + struct_type_index, + field_index, + struct_ref, + ) +} + +/// Translate a `struct.get_s` instruction. +pub fn translate_struct_get_s( + func_env: &mut FuncEnvironment<'_>, + builder: &mut FunctionBuilder<'_>, + struct_type_index: TypeIndex, + field_index: u32, + struct_ref: ir::Value, +) -> WasmResult { + imp::translate_struct_get_s( + func_env, + builder, + struct_type_index, + field_index, + struct_ref, + ) +} + +/// Translate a `struct.get_u` instruction. +pub fn translate_struct_get_u( + func_env: &mut FuncEnvironment<'_>, + builder: &mut FunctionBuilder<'_>, + struct_type_index: TypeIndex, + field_index: u32, + struct_ref: ir::Value, +) -> WasmResult { + imp::translate_struct_get_u( + func_env, + builder, + struct_type_index, + field_index, + struct_ref, + ) +} + +/// Translate a `struct.set` instruction. +pub fn translate_struct_set( + func_env: &mut FuncEnvironment<'_>, + builder: &mut FunctionBuilder<'_>, + struct_type_index: TypeIndex, + field_index: u32, + struct_ref: ir::Value, + new_val: ir::Value, +) -> WasmResult<()> { + imp::translate_struct_set( + func_env, + builder, + struct_type_index, + field_index, + struct_ref, + new_val, + ) +} + /// A trait for different collectors to emit any GC barriers they might require. pub trait GcCompiler { /// Get the GC type layouts for this GC compiler. - #[allow(dead_code)] // Used in future PRs. + #[cfg_attr(not(feature = "gc"), allow(dead_code))] fn layouts(&self) -> &dyn GcTypeLayouts; + /// Emit code to allocate a new struct. + /// + /// The struct should be of the given type and its fields initialized to the + /// given values. + #[cfg_attr(not(feature = "gc"), allow(dead_code))] + fn alloc_struct( + &mut self, + func_env: &mut FuncEnvironment<'_>, + builder: &mut FunctionBuilder<'_>, + struct_type_index: TypeIndex, + fields: &[ir::Value], + ) -> WasmResult; + /// Emit a read barrier for when we are cloning a GC reference onto the Wasm /// stack. /// diff --git a/crates/cranelift/src/gc/disabled.rs b/crates/cranelift/src/gc/disabled.rs index cc625068120b..d489ef898ebb 100644 --- a/crates/cranelift/src/gc/disabled.rs +++ b/crates/cranelift/src/gc/disabled.rs @@ -5,7 +5,7 @@ use crate::func_environ::FuncEnvironment; use cranelift_codegen::ir; use cranelift_frontend::FunctionBuilder; use cranelift_wasm::{wasm_unsupported, WasmHeapType, WasmRefType, WasmResult}; -use wasmtime_environ::GcTypeLayouts; +use wasmtime_environ::{GcTypeLayouts, TypeIndex}; /// Get the default GC compiler. pub fn gc_compiler(_: &FuncEnvironment<'_>) -> Box { @@ -34,6 +34,82 @@ pub fn gc_ref_table_fill_builtin( )) } +pub fn translate_struct_new( + _func_env: &mut FuncEnvironment<'_>, + _builder: &mut FunctionBuilder<'_>, + _struct_type_index: TypeIndex, + _fields: &[ir::Value], +) -> WasmResult { + Err(wasm_unsupported!( + "support for GC references disabled at compile time because the `gc` cargo \ + feature was not enabled" + )) +} + +pub fn translate_struct_new_default( + _func_env: &mut FuncEnvironment<'_>, + _builder: &mut FunctionBuilder<'_>, + _struct_type_index: TypeIndex, +) -> WasmResult { + Err(wasm_unsupported!( + "support for GC references disabled at compile time because the `gc` cargo \ + feature was not enabled" + )) +} + +pub fn translate_struct_get( + _func_env: &mut FuncEnvironment<'_>, + _builder: &mut FunctionBuilder<'_>, + _struct_type_index: TypeIndex, + _field_index: u32, + _struct_ref: ir::Value, +) -> WasmResult { + Err(wasm_unsupported!( + "support for GC references disabled at compile time because the `gc` cargo \ + feature was not enabled" + )) +} + +pub fn translate_struct_get_s( + _func_env: &mut FuncEnvironment<'_>, + _builder: &mut FunctionBuilder<'_>, + _struct_type_index: TypeIndex, + _field_index: u32, + _struct_ref: ir::Value, +) -> WasmResult { + Err(wasm_unsupported!( + "support for GC references disabled at compile time because the `gc` cargo \ + feature was not enabled" + )) +} + +pub fn translate_struct_get_u( + _func_env: &mut FuncEnvironment<'_>, + _builder: &mut FunctionBuilder<'_>, + _struct_type_index: TypeIndex, + _field_index: u32, + _struct_ref: ir::Value, +) -> WasmResult { + Err(wasm_unsupported!( + "support for GC references disabled at compile time because the `gc` cargo \ + feature was not enabled" + )) +} + +pub fn translate_struct_set( + _func_env: &mut FuncEnvironment<'_>, + _builder: &mut FunctionBuilder<'_>, + _struct_type_index: TypeIndex, + _field_index: u32, + _struct_ref: ir::Value, + _new_val: ir::Value, +) -> WasmResult<()> { + Err(wasm_unsupported!( + "support for GC references disabled at compile time because the `gc` cargo \ + feature was not enabled" + )) +} + struct DisabledGcCompiler; impl GcCompiler for DisabledGcCompiler { @@ -44,6 +120,19 @@ impl GcCompiler for DisabledGcCompiler { ) } + fn alloc_struct( + &mut self, + _func_env: &mut FuncEnvironment<'_>, + _builder: &mut FunctionBuilder<'_>, + _struct_type_index: TypeIndex, + _fields: &[ir::Value], + ) -> WasmResult { + Err(wasm_unsupported!( + "support for GC types disabled at compile time because the `gc` cargo \ + feature was not enabled" + )) + } + fn translate_read_gc_reference( &mut self, _func_env: &mut FuncEnvironment<'_>, diff --git a/crates/cranelift/src/gc/enabled.rs b/crates/cranelift/src/gc/enabled.rs index 58de96489d73..f08f113a7c66 100644 --- a/crates/cranelift/src/gc/enabled.rs +++ b/crates/cranelift/src/gc/enabled.rs @@ -1,10 +1,18 @@ use super::GcCompiler; use crate::func_environ::FuncEnvironment; -use cranelift_codegen::ir::{self, condcodes::IntCC, InstBuilder}; +use cranelift_codegen::{ + cursor::FuncCursor, + ir::{self, condcodes::IntCC, InstBuilder}, +}; use cranelift_frontend::FunctionBuilder; -use cranelift_wasm::{TargetEnvironment, WasmHeapTopType, WasmHeapType, WasmRefType, WasmResult}; +use cranelift_wasm::{ + ModuleInternedTypeIndex, StructFieldsVec, TargetEnvironment, TypeIndex, WasmCompositeType, + WasmHeapTopType, WasmHeapType, WasmRefType, WasmResult, WasmStorageType, WasmValType, +}; +use smallvec::SmallVec; use wasmtime_environ::{ - drc::DrcTypeLayouts, GcTypeLayouts, PtrSize, I31_DISCRIMINANT, NON_NULL_NON_I31_MASK, + drc::DrcTypeLayouts, GcStructLayout, GcTypeLayouts, PtrSize, VMGcKind, I31_DISCRIMINANT, + NON_NULL_NON_I31_MASK, }; /// Get the default GC compiler. @@ -56,7 +64,309 @@ pub fn gc_ref_table_fill_builtin( Ok(func_env.builtin_functions.table_fill_gc_ref(func)) } +pub fn translate_struct_new( + func_env: &mut FuncEnvironment<'_>, + builder: &mut FunctionBuilder<'_>, + struct_type_index: TypeIndex, + fields: &[ir::Value], +) -> WasmResult { + gc_compiler(func_env).alloc_struct(func_env, builder, struct_type_index, &fields) +} + +fn default_value( + cursor: &mut FuncCursor, + func_env: &FuncEnvironment<'_>, + ty: &WasmStorageType, +) -> ir::Value { + match ty { + WasmStorageType::I8 | WasmStorageType::I16 => cursor.ins().iconst(ir::types::I32, 0), + WasmStorageType::Val(v) => match v { + WasmValType::I32 => cursor.ins().iconst(ir::types::I32, 0), + WasmValType::I64 => cursor.ins().iconst(ir::types::I64, 0), + WasmValType::F32 => cursor.ins().f32const(0.0), + WasmValType::F64 => cursor.ins().f64const(0.0), + WasmValType::V128 => cursor.ins().iconst(ir::types::I128, 0), + WasmValType::Ref(r) => { + assert!(r.nullable); + let (ty, needs_stack_map) = func_env.reference_type(r.heap_type); + + // NB: The collector doesn't need to know about null references. + let _ = needs_stack_map; + + cursor.ins().iconst(ty, 0) + } + }, + } +} + +pub fn translate_struct_new_default( + func_env: &mut FuncEnvironment<'_>, + builder: &mut FunctionBuilder<'_>, + struct_type_index: TypeIndex, +) -> WasmResult { + let interned_ty = func_env.module.types[struct_type_index]; + let struct_ty = match &func_env.types[interned_ty].composite_type { + WasmCompositeType::Struct(s) => s, + _ => unreachable!(), + }; + let fields = struct_ty + .fields + .iter() + .map(|f| default_value(&mut builder.cursor(), func_env, &f.element_type)) + .collect::(); + gc_compiler(func_env).alloc_struct(func_env, builder, struct_type_index, &fields) +} + +pub fn translate_struct_get( + func_env: &mut FuncEnvironment<'_>, + builder: &mut FunctionBuilder<'_>, + struct_type_index: TypeIndex, + field_index: u32, + struct_ref: ir::Value, +) -> WasmResult { + // TODO: If we know we have a `(ref $my_struct)` here, instead of maybe a + // `(ref null $my_struct)`, we could omit the `trapz`. But plumbing that + // type info from `wasmparser` and through to here is a bit funky. + builder.ins().trapz(struct_ref, ir::TrapCode::NullReference); + + let field_index = usize::try_from(field_index).unwrap(); + let interned_type_index = func_env.module.types[struct_type_index]; + + let struct_layout = func_env.struct_layout(interned_type_index); + let field_offset = struct_layout.fields[field_index]; + + let field_ty = match &func_env.types[interned_type_index].composite_type { + WasmCompositeType::Struct(s) => &s.fields[field_index], + _ => unreachable!(), + }; + + let field_size = wasmtime_environ::byte_size_of_wasm_ty_in_gc_heap(&field_ty.element_type); + + // TODO: We should claim we are accessing the whole object here so that + // repeated accesses to different fields can have their bounds checks + // deduped by GVN. This is a bit tricky to do right now because the last + // parameter of `prepare_gc_ref_access` is the size of the access, and is + // relative to `gc_ref[offset]`, rather than the size of the object itself, + // and relative to `gc_ref[0]`. + let field_addr = func_env.prepare_gc_ref_access(builder, struct_ref, field_offset, field_size); + + let field_val = match field_ty.element_type { + WasmStorageType::Val(v) => match v { + WasmValType::I32 => { + builder + .ins() + .load(ir::types::I32, ir::MemFlags::trusted(), field_addr, 0) + } + WasmValType::I64 => { + builder + .ins() + .load(ir::types::I64, ir::MemFlags::trusted(), field_addr, 0) + } + WasmValType::F32 => { + builder + .ins() + .load(ir::types::F32, ir::MemFlags::trusted(), field_addr, 0) + } + WasmValType::F64 => { + builder + .ins() + .load(ir::types::F64, ir::MemFlags::trusted(), field_addr, 0) + } + WasmValType::V128 => { + builder + .ins() + .load(ir::types::I128, ir::MemFlags::trusted(), field_addr, 0) + } + WasmValType::Ref(r) => match r.heap_type.top() { + WasmHeapTopType::Any | WasmHeapTopType::Extern => gc_compiler(func_env) + .translate_read_gc_reference( + func_env, + builder, + r, + field_addr, + ir::MemFlags::trusted(), + )?, + WasmHeapTopType::Func => { + unimplemented!("funcrefs inside the GC heap") + } + }, + }, + WasmStorageType::I8 | WasmStorageType::I16 => { + unreachable!() + } + }; + + Ok(field_val) +} + +enum Extension { + Sign, + Zero, +} + +fn translate_struct_get_and_extend( + func_env: &mut FuncEnvironment<'_>, + builder: &mut FunctionBuilder<'_>, + struct_type_index: TypeIndex, + field_index: u32, + struct_ref: ir::Value, + extension: Extension, +) -> WasmResult { + // TODO: See comment in `translate_struct_get` about the `trapz`. + builder.ins().trapz(struct_ref, ir::TrapCode::NullReference); + + let field_index = usize::try_from(field_index).unwrap(); + let interned_type_index = func_env.module.types[struct_type_index]; + + let struct_layout = func_env.struct_layout(interned_type_index); + let field_offset = struct_layout.fields[field_index]; + + let field_ty = match &func_env.types[interned_type_index].composite_type { + WasmCompositeType::Struct(s) => &s.fields[field_index], + _ => unreachable!(), + }; + + let field_size = wasmtime_environ::byte_size_of_wasm_ty_in_gc_heap(&field_ty.element_type); + + // TODO: See comment in `translate_struct_get` about the `prepare_gc_ref_access`. + let field_addr = func_env.prepare_gc_ref_access(builder, struct_ref, field_offset, field_size); + + let field_val = match field_ty.element_type { + WasmStorageType::I8 => { + builder + .ins() + .load(ir::types::I8, ir::MemFlags::trusted(), field_addr, 0) + } + WasmStorageType::I16 => { + builder + .ins() + .load(ir::types::I16, ir::MemFlags::trusted(), field_addr, 0) + } + WasmStorageType::Val(_) => unreachable!(), + }; + + let extended = match extension { + Extension::Sign => builder.ins().sextend(ir::types::I32, field_val), + Extension::Zero => builder.ins().uextend(ir::types::I32, field_val), + }; + + Ok(extended) +} + +pub fn translate_struct_get_s( + func_env: &mut FuncEnvironment<'_>, + builder: &mut FunctionBuilder<'_>, + struct_type_index: TypeIndex, + field_index: u32, + struct_ref: ir::Value, +) -> WasmResult { + translate_struct_get_and_extend( + func_env, + builder, + struct_type_index, + field_index, + struct_ref, + Extension::Sign, + ) +} + +pub fn translate_struct_get_u( + func_env: &mut FuncEnvironment<'_>, + builder: &mut FunctionBuilder<'_>, + struct_type_index: TypeIndex, + field_index: u32, + struct_ref: ir::Value, +) -> WasmResult { + translate_struct_get_and_extend( + func_env, + builder, + struct_type_index, + field_index, + struct_ref, + Extension::Zero, + ) +} + +pub fn translate_struct_set( + func_env: &mut FuncEnvironment<'_>, + builder: &mut FunctionBuilder<'_>, + struct_type_index: TypeIndex, + field_index: u32, + struct_ref: ir::Value, + new_val: ir::Value, +) -> WasmResult<()> { + // TODO: See comment in `translate_struct_get` about the `trapz`. + builder.ins().trapz(struct_ref, ir::TrapCode::NullReference); + + let field_index = usize::try_from(field_index).unwrap(); + let interned_type_index = func_env.module.types[struct_type_index]; + + let struct_layout = func_env.struct_layout(interned_type_index); + let field_offset = struct_layout.fields[field_index]; + + let field_ty = match &func_env.types[interned_type_index].composite_type { + WasmCompositeType::Struct(s) => &s.fields[field_index], + _ => unreachable!(), + }; + + let field_size = wasmtime_environ::byte_size_of_wasm_ty_in_gc_heap(&field_ty.element_type); + + // TODO: See comment in `translate_struct_get` about the `prepare_gc_ref_access`. + let field_addr = func_env.prepare_gc_ref_access(builder, struct_ref, field_offset, field_size); + + match &field_ty.element_type { + WasmStorageType::I8 => { + builder + .ins() + .istore8(ir::MemFlags::trusted(), new_val, field_addr, 0); + } + WasmStorageType::I16 => { + builder + .ins() + .istore16(ir::MemFlags::trusted(), new_val, field_addr, 0); + } + WasmStorageType::Val(WasmValType::Ref(r)) if r.heap_type.top() == WasmHeapTopType::Func => { + unimplemented!("funcrefs inside the GC heap") + } + WasmStorageType::Val(WasmValType::Ref(r)) => { + gc_compiler(func_env).translate_write_gc_reference( + func_env, + builder, + *r, + field_addr, + new_val, + ir::MemFlags::trusted(), + )?; + } + WasmStorageType::Val(_) => { + assert_eq!(builder.func.dfg.value_type(new_val).bytes(), field_size); + builder + .ins() + .store(ir::MemFlags::trusted(), new_val, field_addr, 0); + } + } + + Ok(()) +} + impl FuncEnvironment<'_> { + /// Get the `GcStructLayout` for the struct type at the given `type_index`. + fn struct_layout(&mut self, type_index: ModuleInternedTypeIndex) -> &GcStructLayout { + // Lazily compute and cache the struct layout. Note that we can't use + // the entry API because of borrowck shenanigans. + if !self.ty_to_struct_layout.contains_key(&type_index) { + let ty = &self.types[type_index]; + let WasmCompositeType::Struct(s) = &ty.composite_type else { + panic!("{type_index:?} is not a struct type: {ty:?}") + }; + let s = s.clone(); + let layout = gc_compiler(self).layouts().struct_layout(&s); + self.ty_to_struct_layout.insert(type_index, layout); + } + + self.ty_to_struct_layout.get(&type_index).unwrap() + } + /// Get the GC heap's base pointer and bound. fn get_gc_heap_base_bound(&mut self, builder: &mut FunctionBuilder) -> (ir::Value, ir::Value) { let ptr_ty = self.pointer_type(); @@ -79,6 +389,15 @@ impl FuncEnvironment<'_> { /// Get the raw pointer of `gc_ref[offset]` bounds checked for an access of /// `size` bytes. + /// + /// The given `gc_ref` must be a non-null, non-i31 GC reference. + /// + /// Returns the raw pointer to `gc_ref[offset]` -- not a raw pointer to the + /// GC object itself (unless `offset == 0`). This raw pointer may be used to + /// read or write up to `size` bytes. Do NOT attempt accesses larger than + /// `size` bytes; that may lead to unchecked out-of-bounds accesses. + /// + /// This method is collector-agnostic. fn prepare_gc_ref_access( &mut self, builder: &mut FunctionBuilder, @@ -132,8 +451,8 @@ impl FuncEnvironment<'_> { /// Takes advantage of static information based on `ty` as to whether the GC /// reference is nullable or can ever be an `i31`. /// - /// Returns an `ir::Value` that will be non-zero if the GC reference is null - /// or is an `i31ref`. + /// Returns an `ir::Value` that is an `i32` will be non-zero if the GC + /// reference is null or is an `i31ref`; otherwise, it will be zero. /// /// This method is collector-agnostic. fn gc_ref_is_null_or_i31( @@ -144,9 +463,33 @@ impl FuncEnvironment<'_> { ) -> ir::Value { assert!(ty.is_vmgcref_type_and_not_i31()); - let might_be_i31 = match ty.heap_type.top() { - WasmHeapTopType::Any => true, - WasmHeapTopType::Extern | WasmHeapTopType::Func => false, + let might_be_i31 = match ty.heap_type { + // If we are definitely dealing with an i31, we shouldn't be + // emitting dynamic checks for it, and the caller shouldn't call + // this function. Should have been caught by the assertion at the + // start of the function. + WasmHeapType::I31 => unreachable!(), + + // Could potentially be an i31. + WasmHeapType::Any | WasmHeapType::Eq => true, + + // If it is definitely a struct, array, or uninhabited type, then it + // is definitely not an i31. + WasmHeapType::Array + | WasmHeapType::ConcreteArray(_) + | WasmHeapType::Struct + | WasmHeapType::ConcreteStruct(_) + | WasmHeapType::None => false, + + // Wrong type hierarchy: cannot be an i31. + WasmHeapType::Extern | WasmHeapType::NoExtern => false, + + // Wrong type hierarchy, and also funcrefs are not GC-managed + // types. Should have been caught by the assertion at the start of + // the function. + WasmHeapType::Func | WasmHeapType::ConcreteFunc(_) | WasmHeapType::NoFunc => { + unreachable!() + } }; match (ty.nullable, might_be_i31) { @@ -272,6 +615,107 @@ impl DrcCompiler { ); (activations_table, next, end) } + + /// Write to an uninitialized GC reference field, initializing it. + /// + /// ```text + /// *dst = new_val + /// ``` + /// + /// Doesn't need to do a full write barrier: we don't have an old reference + /// that is being overwritten and needs its refcount decremented, just a new + /// reference whose count should be incremented. + fn translate_init_gc_reference( + &mut self, + func_env: &mut FuncEnvironment<'_>, + builder: &mut FunctionBuilder, + ty: WasmRefType, + dst: ir::Value, + new_val: ir::Value, + flags: ir::MemFlags, + ) -> WasmResult<()> { + let (ref_ty, needs_stack_map) = func_env.reference_type(ty.heap_type); + debug_assert!(needs_stack_map); + + // Special case for references to uninhabited bottom types: see + // `translate_write_gc_reference` for details. + if let WasmHeapType::None = ty.heap_type { + if ty.nullable { + let null = builder.ins().iconst(ref_ty, 0); + builder.ins().store(flags, null, dst, 0); + } else { + let zero = builder.ins().iconst(ir::types::I32, 0); + builder + .ins() + .trapz(zero, ir::TrapCode::User(crate::DEBUG_ASSERT_TRAP_CODE)); + } + return Ok(()); + }; + + // Special case for `i31ref`s: no need for any barriers. + if let WasmHeapType::I31 = ty.heap_type { + return unbarriered_store_gc_ref(builder, ty.heap_type, dst, new_val, flags); + } + + // Our initialization barrier for GC references being copied out of the + // stack and initializing a table/global/struct field/etc... is roughly + // equivalent to the following pseudo-CLIF: + // + // ``` + // current_block: + // ... + // let new_val_is_null_or_i31 = ... + // brif new_val_is_null_or_i31, continue_block, inc_ref_block + // + // inc_ref_block: + // let ref_count = load new_val.ref_count + // let new_ref_count = iadd_imm ref_count, 1 + // store new_val.ref_count, new_ref_count + // jump check_old_val_block + // + // continue_block: + // store dst, new_val + // ... + // ``` + // + // This write barrier is responsible for ensuring that the new value's + // ref count is incremented now that the table/global/struct/etc... is + // holding onto it. + + let current_block = builder.current_block().unwrap(); + let inc_ref_block = builder.create_block(); + let continue_block = builder.create_block(); + + builder.ensure_inserted_block(); + builder.insert_block_after(inc_ref_block, current_block); + builder.insert_block_after(continue_block, inc_ref_block); + + // Current block: check whether the new value is non-null and + // non-i31. If so, branch to the `inc_ref_block`. + let new_val_is_null_or_i31 = func_env.gc_ref_is_null_or_i31(builder, ty, new_val); + builder.ins().brif( + new_val_is_null_or_i31, + continue_block, + &[], + inc_ref_block, + &[], + ); + + // Block to increment the ref count of the new value when it is non-null + // and non-i31. + builder.switch_to_block(inc_ref_block); + builder.seal_block(inc_ref_block); + self.mutate_ref_count(func_env, builder, new_val, 1); + builder.ins().jump(continue_block, &[]); + + // Join point after we're done with the GC barrier: do the actual store + // to initialize the field. + builder.switch_to_block(continue_block); + builder.seal_block(continue_block); + unbarriered_store_gc_ref(builder, ty.heap_type, dst, new_val, flags)?; + + Ok(()) + } } impl GcCompiler for DrcCompiler { @@ -279,6 +723,100 @@ impl GcCompiler for DrcCompiler { &self.layouts } + fn alloc_struct( + &mut self, + func_env: &mut FuncEnvironment<'_>, + builder: &mut FunctionBuilder<'_>, + struct_type_index: TypeIndex, + field_vals: &[ir::Value], + ) -> WasmResult { + // First, call the `gc_alloc_raw` builtin libcall to allocate the + // struct. + + let gc_alloc_raw_builtin = func_env.builtin_functions.gc_alloc_raw(builder.func); + let vmctx = func_env.vmctx_val(&mut builder.cursor()); + let kind = builder + .ins() + .iconst(ir::types::I32, i64::from(VMGcKind::StructRef.as_u32())); + + let interned_type_index = func_env.module.types[struct_type_index]; + let interned_type_index_val = builder + .ins() + .iconst(ir::types::I32, i64::from(interned_type_index.as_u32())); + + let struct_layout = func_env.struct_layout(interned_type_index); + let struct_size = struct_layout.size; + let field_offsets: SmallVec<[_; 8]> = struct_layout.fields.iter().copied().collect(); + assert_eq!(field_vals.len(), field_offsets.len()); + + let size = builder + .ins() + .iconst(ir::types::I32, i64::from(struct_layout.size)); + let align = builder + .ins() + .iconst(ir::types::I32, i64::from(struct_layout.align)); + + let call_inst = builder.ins().call( + gc_alloc_raw_builtin, + &[vmctx, kind, interned_type_index_val, size, align], + ); + let struct_ref = builder.inst_results(call_inst)[0]; + + let struct_ty = match &func_env.types[interned_type_index].composite_type { + WasmCompositeType::Struct(s) => s, + _ => unreachable!(), + }; + let field_types: SmallVec<[_; 8]> = struct_ty.fields.iter().cloned().collect(); + assert_eq!(field_vals.len(), field_types.len()); + + // Second, initialize each of the newly-allocated struct's fields. + + for ((ty, val), offset) in field_types.into_iter().zip(field_vals).zip(field_offsets) { + let size_of_access = + wasmtime_environ::byte_size_of_wasm_ty_in_gc_heap(&ty.element_type); + assert!(offset + size_of_access <= struct_size); + + let field_addr = + func_env.prepare_gc_ref_access(builder, struct_ref, offset, size_of_access); + + match &ty.element_type { + WasmStorageType::Val(WasmValType::Ref(r)) + if r.heap_type.top() == WasmHeapTopType::Func => + { + unimplemented!("funcrefs inside the GC heap") + } + WasmStorageType::Val(WasmValType::Ref(r)) => { + self.translate_init_gc_reference( + func_env, + builder, + *r, + field_addr, + *val, + ir::MemFlags::trusted(), + )?; + } + WasmStorageType::I8 => { + builder + .ins() + .istore8(ir::MemFlags::trusted(), *val, field_addr, 0); + } + WasmStorageType::I16 => { + builder + .ins() + .istore16(ir::MemFlags::trusted(), *val, field_addr, 0); + } + WasmStorageType::Val(_) => { + assert_eq!(builder.func.dfg.value_type(*val).bytes(), size_of_access); + builder + .ins() + .store(ir::MemFlags::trusted(), *val, field_addr, 0); + } + } + } + + Ok(struct_ref) + } + fn translate_read_gc_reference( &mut self, func_env: &mut FuncEnvironment<'_>, diff --git a/crates/environ/src/builtin.rs b/crates/environ/src/builtin.rs index 03a9e776c5b3..35639471a665 100644 --- a/crates/environ/src/builtin.rs +++ b/crates/environ/src/builtin.rs @@ -79,6 +79,17 @@ macro_rules! foreach_builtin_function { #[cfg(feature = "gc")] gc(vmctx: vmctx, root: reference) -> reference; + // Allocate a new, uninitialized GC object and return a reference to + // it. + #[cfg(feature = "gc")] + gc_alloc_raw( + vmctx: vmctx, + kind: i32, + module_interned_type_index: i32, + size: i32, + align: i32 + ) -> reference; + // Returns an index for Wasm's `table.grow` instruction for GC references. #[cfg(feature = "gc")] table_grow_gc_ref(vmctx: vmctx, table: i32, delta: i64, init: reference) -> pointer; diff --git a/crates/environ/src/gc.rs b/crates/environ/src/gc.rs index 65454e7a956c..569e7a3884e0 100644 --- a/crates/environ/src/gc.rs +++ b/crates/environ/src/gc.rs @@ -14,7 +14,7 @@ pub mod drc; use crate::prelude::*; use core::alloc::Layout; -use wasmtime_types::{WasmArrayType, WasmStructType}; +use wasmtime_types::{WasmArrayType, WasmStorageType, WasmStructType, WasmValType}; /// Discriminant to check whether GC reference is an `i31ref` or not. pub const I31_DISCRIMINANT: u64 = 1; @@ -23,6 +23,20 @@ pub const I31_DISCRIMINANT: u64 = 1; /// with a single bitwise-and operation. pub const NON_NULL_NON_I31_MASK: u64 = !I31_DISCRIMINANT; +/// Get the byte size of the given Wasm type when it is stored inside the GC +/// heap. +pub fn byte_size_of_wasm_ty_in_gc_heap(ty: &WasmStorageType) -> u32 { + match ty { + WasmStorageType::I8 => 1, + WasmStorageType::I16 => 2, + WasmStorageType::Val(ty) => match ty { + WasmValType::I32 | WasmValType::F32 | WasmValType::Ref(_) => 4, + WasmValType::I64 | WasmValType::F64 => 8, + WasmValType::V128 => 16, + }, + } +} + /// A trait for getting the layout of a Wasm GC struct or array inside a /// particular collector. pub trait GcTypeLayouts { @@ -141,14 +155,14 @@ impl GcArrayLayout { /// header (for example) is included in the offset. #[derive(Clone, Debug)] pub struct GcStructLayout { - /// The size of this struct. + /// The size (in bytes) of this struct. pub size: u32, - /// The alignment of this struct. + /// The alignment (in bytes) of this struct. pub align: u32, /// The fields of this struct. The `i`th entry is the `i`th struct field's - /// offset in the struct. + /// offset (in bytes) in the struct. pub fields: Vec, } @@ -213,13 +227,13 @@ impl VMGcKind { pub fn from_high_bits_of_u32(val: u32) -> VMGcKind { let masked = val & Self::MASK; match masked { - x if x == Self::ExternRef as u32 => Self::ExternRef, - x if x == Self::ExternOfAnyRef as u32 => Self::ExternOfAnyRef, - x if x == Self::AnyRef as u32 => Self::AnyRef, - x if x == Self::AnyOfExternRef as u32 => Self::AnyOfExternRef, - x if x == Self::EqRef as u32 => Self::EqRef, - x if x == Self::ArrayRef as u32 => Self::ArrayRef, - x if x == Self::StructRef as u32 => Self::StructRef, + x if x == Self::ExternRef.as_u32() => Self::ExternRef, + x if x == Self::ExternOfAnyRef.as_u32() => Self::ExternOfAnyRef, + x if x == Self::AnyRef.as_u32() => Self::AnyRef, + x if x == Self::AnyOfExternRef.as_u32() => Self::AnyOfExternRef, + x if x == Self::EqRef.as_u32() => Self::EqRef, + x if x == Self::ArrayRef.as_u32() => Self::ArrayRef, + x if x == Self::StructRef.as_u32() => Self::StructRef, _ => panic!("invalid `VMGcKind`: {masked:#032b}"), } } @@ -227,8 +241,15 @@ impl VMGcKind { /// Does this kind match the other kind? /// /// That is, is this kind a subtype of the other kind? + #[inline] pub fn matches(self, other: Self) -> bool { - (self as u32) & (other as u32) == (other as u32) + (self.as_u32() & other.as_u32()) == other.as_u32() + } + + /// TODO FITZGEN + #[inline] + pub fn as_u32(self) -> u32 { + self as u32 } } diff --git a/crates/environ/src/gc/drc.rs b/crates/environ/src/gc/drc.rs index 80c683d1b11a..1bc88b76b524 100644 --- a/crates/environ/src/gc/drc.rs +++ b/crates/environ/src/gc/drc.rs @@ -1,7 +1,6 @@ //! Layout of Wasm GC objects in the deferred reference-counting collector. use super::*; -use wasmtime_types::{WasmStorageType, WasmValType}; /// The size of the `VMDrcHeader` header for GC objects. pub const HEADER_SIZE: u32 = 16; @@ -31,18 +30,6 @@ fn field(size: &mut u32, align: &mut u32, bytes: u32) -> u32 { offset } -fn size_of_wasm_ty(ty: &WasmStorageType) -> u32 { - match ty { - WasmStorageType::I8 => 1, - WasmStorageType::I16 => 2, - WasmStorageType::Val(ty) => match ty { - WasmValType::I32 | WasmValType::F32 | WasmValType::Ref(_) => 4, - WasmValType::I64 | WasmValType::F64 => 8, - WasmValType::V128 => 16, - }, - } -} - /// The layout of Wasm GC objects in the deferred reference-counting collector. #[derive(Default)] pub struct DrcTypeLayouts; @@ -55,7 +42,7 @@ impl GcTypeLayouts for DrcTypeLayouts { let length_field_offset = field(&mut size, &mut align, 4); debug_assert_eq!(length_field_offset, ARRAY_LENGTH_OFFSET); - let elem_size = size_of_wasm_ty(&ty.0.element_type); + let elem_size = byte_size_of_wasm_ty_in_gc_heap(&ty.0.element_type); let elems_offset = align_up(&mut size, &mut align, elem_size); GcArrayLayout { @@ -82,7 +69,7 @@ impl GcTypeLayouts for DrcTypeLayouts { .fields .iter() .map(|f| { - let field_size = size_of_wasm_ty(&f.element_type); + let field_size = byte_size_of_wasm_ty_in_gc_heap(&f.element_type); field(&mut size, &mut align, field_size) }) .collect(); diff --git a/crates/wasmtime/src/runtime/vm/gc.rs b/crates/wasmtime/src/runtime/vm/gc.rs index 5d026a943be7..3ef0f4e36422 100644 --- a/crates/wasmtime/src/runtime/vm/gc.rs +++ b/crates/wasmtime/src/runtime/vm/gc.rs @@ -180,6 +180,11 @@ impl GcStore { self.host_data_table.get_mut(host_data_id) } + /// Allocate a raw object with the given header and layout. + pub fn alloc_raw(&mut self, header: VMGcHeader, layout: Layout) -> Result> { + self.gc_heap.alloc_raw(header, layout) + } + /// Allocate an uninitialized struct with the given type index and layout. /// /// This does NOT check that the index is currently allocated in the types diff --git a/crates/wasmtime/src/runtime/vm/gc/gc_ref.rs b/crates/wasmtime/src/runtime/vm/gc/gc_ref.rs index a916e3888786..3876e475e7d0 100644 --- a/crates/wasmtime/src/runtime/vm/gc/gc_ref.rs +++ b/crates/wasmtime/src/runtime/vm/gc/gc_ref.rs @@ -29,6 +29,7 @@ use wasmtime_environ::{VMGcKind, VMSharedTypeIndex}; /// } /// ``` #[repr(align(8))] +#[derive(Debug, Clone, Copy)] pub struct VMGcHeader(u64); unsafe impl GcHeapObject for VMGcHeader { diff --git a/crates/wasmtime/src/runtime/vm/instance.rs b/crates/wasmtime/src/runtime/vm/instance.rs index 6d10edd9258d..607ed9b08a7f 100644 --- a/crates/wasmtime/src/runtime/vm/instance.rs +++ b/crates/wasmtime/src/runtime/vm/instance.rs @@ -263,7 +263,6 @@ impl Instance { self.runtime_info.env_module() } - #[allow(dead_code)] // TODO: used in forthcoming patches pub(crate) fn runtime_module(&self) -> Option<&crate::Module> { match &self.runtime_info { ModuleRuntimeInfo::Module(m) => Some(m), diff --git a/crates/wasmtime/src/runtime/vm/libcalls.rs b/crates/wasmtime/src/runtime/vm/libcalls.rs index ecc94f62fc66..6fed7cfd82a1 100644 --- a/crates/wasmtime/src/runtime/vm/libcalls.rs +++ b/crates/wasmtime/src/runtime/vm/libcalls.rs @@ -437,6 +437,58 @@ unsafe fn gc(instance: &mut Instance, gc_ref: u32) -> Result { } } +/// Allocate a raw, unininitialized GC object for Wasm code. +/// +/// The Wasm code is responsible for initializing the object. +#[cfg(feature = "gc")] +unsafe fn gc_alloc_raw( + instance: &mut Instance, + kind: u32, + module_interned_type_index: u32, + size: u32, + align: u32, +) -> Result { + use crate::{vm::VMGcHeader, GcHeapOutOfMemory}; + use core::alloc::Layout; + use wasmtime_environ::{ModuleInternedTypeIndex, VMGcKind}; + + debug_assert_eq!(VMGcKind::UNUSED_MASK & kind, 0); + let kind = VMGcKind::from_high_bits_of_u32(kind); + + let module = instance + .runtime_module() + .expect("should never allocate GC types defined in a dummy module"); + + let module_interned_type_index = ModuleInternedTypeIndex::from_u32(module_interned_type_index); + let shared_type_index = module + .signatures() + .shared_type(module_interned_type_index) + .expect("should have engine type index for module type index"); + + let header = VMGcHeader::from_kind_and_index(kind, shared_type_index); + + let size = usize::try_from(size).unwrap(); + let align = usize::try_from(align).unwrap(); + let layout = Layout::from_size_align(size, align).unwrap(); + + let gc_ref = match (*instance.store()).gc_store().alloc_raw(header, layout)? { + Some(r) => r, + None => { + // If the allocation failed, do a GC to hopefully clean up space. + (*instance.store()).gc(None)?; + + // And then try again. + (*instance.store()) + .gc_store() + .alloc_raw(header, layout)? + .ok_or_else(|| GcHeapOutOfMemory::new(())) + .err2anyhow()? + } + }; + + Ok(gc_ref.as_raw_u32()) +} + // Implementation of `memory.atomic.notify` for locally defined memories. #[cfg(feature = "threads")] fn memory_atomic_notify( diff --git a/crates/wast/src/core.rs b/crates/wast/src/core.rs index ea6fd75a4e7a..6916afd5f456 100644 --- a/crates/wast/src/core.rs +++ b/crates/wast/src/core.rs @@ -22,6 +22,14 @@ pub fn val(store: &mut Store, v: &WastArgCore<'_>) -> Result { ty: AbstractHeapType::Func, shared: false, }) => Val::FuncRef(None), + RefNull(HeapType::Abstract { + ty: AbstractHeapType::Any, + shared: false, + }) => Val::AnyRef(None), + RefNull(HeapType::Abstract { + shared: false, + ty: AbstractHeapType::None, + }) => Val::AnyRef(None), RefExtern(x) => Val::ExternRef(Some(ExternRef::new(store, *x)?)), other => bail!("couldn't convert {:?} to a runtime value", other), }) @@ -105,6 +113,7 @@ pub fn match_val(store: &Store, actual: &Val, expected: &WastRetCore) -> R } } + (Val::AnyRef(Some(_)), WastRetCore::RefAny) => Ok(()), (Val::AnyRef(Some(x)), WastRetCore::RefI31) => { if x.is_i31(store)? { Ok(()) @@ -112,6 +121,13 @@ pub fn match_val(store: &Store, actual: &Val, expected: &WastRetCore) -> R bail!("expected a `(ref i31)`, found {x:?}"); } } + (Val::AnyRef(Some(x)), WastRetCore::RefStruct) => { + if x.is_struct(store)? { + Ok(()) + } else { + bail!("expected a struct reference, found {x:?}") + } + } _ => bail!( "don't know how to compare {:?} and {:?} yet", diff --git a/crates/wast/src/wast.rs b/crates/wast/src/wast.rs index 1e6304de4ce7..bffe315f6ed8 100644 --- a/crates/wast/src/wast.rs +++ b/crates/wast/src/wast.rs @@ -388,6 +388,8 @@ where || (expected.contains("uninitialized element 2") && actual.contains("uninitialized element")) // function references call_ref || (expected.contains("null function") && (actual.contains("uninitialized element") || actual.contains("null reference"))) + // GC tests say "null $kind reference" but we just say "null reference". + || (expected.contains("null") && expected.contains("reference") && actual.contains("null reference")) { return Ok(()); } diff --git a/tests/disas/gc/struct-get.wat b/tests/disas/gc/struct-get.wat new file mode 100644 index 000000000000..e446c7b3d45d --- /dev/null +++ b/tests/disas/gc/struct-get.wat @@ -0,0 +1,231 @@ +;;! target = "x86_64" +;;! flags = "-W function-references,gc" +;;! test = "optimize" + +(module + (type $ty (struct (field (mut f32)) + (field (mut i8)) + (field (mut anyref)))) + + (func (param (ref null $ty)) (result f32) + (struct.get $ty 0 (local.get 0)) + ) + + (func (param (ref null $ty)) (result i32) + (struct.get_s $ty 1 (local.get 0)) + ) + + (func (param (ref null $ty)) (result i32) + (struct.get_u $ty 1 (local.get 0)) + ) + + (func (param (ref null $ty)) (result anyref) + (struct.get $ty 2 (local.get 0)) + ) +) +;; function u0:0(i64 vmctx, i64, i32) -> f32 tail { +;; gv0 = vmctx +;; gv1 = load.i64 notrap aligned readonly gv0+8 +;; gv2 = load.i64 notrap aligned gv1 +;; gv3 = vmctx +;; stack_limit = gv2 +;; +;; block0(v0: i64, v1: i64, v2: i32): +;; @0033 brif v2, block3, block2 +;; +;; block2 cold: +;; @0033 trap null_reference +;; +;; block3: +;; @0033 v7 = uextend.i64 v2 +;; @0033 v8 = iconst.i64 16 +;; @0033 v9 = uadd_overflow_trap v7, v8, user65535 ; v8 = 16 +;; @0033 v10 = iconst.i64 4 +;; @0033 v11 = uadd_overflow_trap v9, v10, user65535 ; v10 = 4 +;; @0033 v6 = load.i64 notrap aligned readonly v0+48 +;; @0033 v12 = icmp ult v11, v6 +;; @0033 brif v12, block5, block4 +;; +;; block4 cold: +;; @0033 trap user65535 +;; +;; block5: +;; @0033 v5 = load.i64 notrap aligned readonly v0+40 +;; @0033 v13 = iadd v5, v9 +;; @0033 v14 = load.f32 notrap aligned v13 +;; @0037 jump block1 +;; +;; block1: +;; @0037 return v14 +;; } +;; +;; function u0:1(i64 vmctx, i64, i32) -> i32 tail { +;; gv0 = vmctx +;; gv1 = load.i64 notrap aligned readonly gv0+8 +;; gv2 = load.i64 notrap aligned gv1 +;; gv3 = vmctx +;; stack_limit = gv2 +;; +;; block0(v0: i64, v1: i64, v2: i32): +;; @003c brif v2, block3, block2 +;; +;; block2 cold: +;; @003c trap null_reference +;; +;; block3: +;; @003c v7 = uextend.i64 v2 +;; @003c v8 = iconst.i64 20 +;; @003c v9 = uadd_overflow_trap v7, v8, user65535 ; v8 = 20 +;; @003c v10 = iconst.i64 1 +;; @003c v11 = uadd_overflow_trap v9, v10, user65535 ; v10 = 1 +;; @003c v6 = load.i64 notrap aligned readonly v0+48 +;; @003c v12 = icmp ult v11, v6 +;; @003c brif v12, block5, block4 +;; +;; block4 cold: +;; @003c trap user65535 +;; +;; block5: +;; @003c v5 = load.i64 notrap aligned readonly v0+40 +;; @003c v13 = iadd v5, v9 +;; @003c v14 = load.i8 notrap aligned v13 +;; @0040 jump block1 +;; +;; block1: +;; @003c v15 = sextend.i32 v14 +;; @0040 return v15 +;; } +;; +;; function u0:2(i64 vmctx, i64, i32) -> i32 tail { +;; gv0 = vmctx +;; gv1 = load.i64 notrap aligned readonly gv0+8 +;; gv2 = load.i64 notrap aligned gv1 +;; gv3 = vmctx +;; stack_limit = gv2 +;; +;; block0(v0: i64, v1: i64, v2: i32): +;; @0045 brif v2, block3, block2 +;; +;; block2 cold: +;; @0045 trap null_reference +;; +;; block3: +;; @0045 v7 = uextend.i64 v2 +;; @0045 v8 = iconst.i64 20 +;; @0045 v9 = uadd_overflow_trap v7, v8, user65535 ; v8 = 20 +;; @0045 v10 = iconst.i64 1 +;; @0045 v11 = uadd_overflow_trap v9, v10, user65535 ; v10 = 1 +;; @0045 v6 = load.i64 notrap aligned readonly v0+48 +;; @0045 v12 = icmp ult v11, v6 +;; @0045 brif v12, block5, block4 +;; +;; block4 cold: +;; @0045 trap user65535 +;; +;; block5: +;; @0045 v5 = load.i64 notrap aligned readonly v0+40 +;; @0045 v13 = iadd v5, v9 +;; @0045 v14 = load.i8 notrap aligned v13 +;; @0049 jump block1 +;; +;; block1: +;; @0045 v15 = uextend.i32 v14 +;; @0049 return v15 +;; } +;; +;; function u0:3(i64 vmctx, i64, i32) -> i32 tail { +;; ss0 = explicit_slot 4, align = 4 +;; gv0 = vmctx +;; gv1 = load.i64 notrap aligned readonly gv0+8 +;; gv2 = load.i64 notrap aligned gv1 +;; gv3 = vmctx +;; sig0 = (i64 vmctx, i32) -> i32 system_v +;; fn0 = colocated u1:26 sig0 +;; stack_limit = gv2 +;; +;; block0(v0: i64, v1: i64, v2: i32): +;; @004e brif v2, block7, block6 +;; +;; block6 cold: +;; @004e trap null_reference +;; +;; block7: +;; @004e v7 = uextend.i64 v2 +;; @004e v8 = iconst.i64 24 +;; @004e v9 = uadd_overflow_trap v7, v8, user65535 ; v8 = 24 +;; @004e v10 = iconst.i64 4 +;; @004e v11 = uadd_overflow_trap v9, v10, user65535 ; v10 = 4 +;; @004e v6 = load.i64 notrap aligned readonly v0+48 +;; @004e v12 = icmp ult v11, v6 +;; @004e brif v12, block9, block8 +;; +;; block8 cold: +;; @004e trap user65535 +;; +;; block9: +;; @004e v5 = load.i64 notrap aligned readonly v0+40 +;; @004e v13 = iadd v5, v9 +;; @004e v14 = load.i32 notrap aligned v13 +;; v54 = stack_addr.i64 ss0 +;; store notrap v14, v54 +;; @004e v15 = iconst.i32 -2 +;; @004e v16 = band v14, v15 ; v15 = -2 +;; v56 = iconst.i32 0 +;; @004e v17 = icmp eq v16, v56 ; v56 = 0 +;; @004e brif v17, block5, block2 +;; +;; block2: +;; @004e v19 = load.i64 notrap aligned v0+56 +;; @004e v20 = load.i64 notrap aligned v19 +;; @004e v21 = load.i64 notrap aligned v19+8 +;; @004e v22 = icmp eq v20, v21 +;; @004e brif v22, block3, block4 +;; +;; block4: +;; @004e v26 = uextend.i64 v14 +;; @004e v27 = iconst.i64 8 +;; @004e v28 = uadd_overflow_trap v26, v27, user65535 ; v27 = 8 +;; @004e v30 = uadd_overflow_trap v28, v27, user65535 ; v27 = 8 +;; @004e v31 = icmp ult v30, v6 +;; @004e brif v31, block11, block10 +;; +;; block10 cold: +;; @004e trap user65535 +;; +;; block11: +;; @004e v32 = iadd.i64 v5, v28 +;; @004e v33 = load.i64 notrap aligned v32 +;; v51 = load.i32 notrap v54 +;; @004e v38 = uextend.i64 v51 +;; v64 = iconst.i64 8 +;; @004e v40 = uadd_overflow_trap v38, v64, user65535 ; v64 = 8 +;; @004e v42 = uadd_overflow_trap v40, v64, user65535 ; v64 = 8 +;; @004e v43 = icmp ult v42, v6 +;; @004e brif v43, block13, block12 +;; +;; block12 cold: +;; @004e trap user65535 +;; +;; block13: +;; v58 = iconst.i64 1 +;; @004e v34 = iadd.i64 v33, v58 ; v58 = 1 +;; @004e v44 = iadd.i64 v5, v40 +;; @004e store notrap aligned v34, v44 +;; v50 = load.i32 notrap v54 +;; @004e store notrap aligned v50, v20 +;; v65 = iconst.i64 4 +;; v66 = iadd.i64 v20, v65 ; v65 = 4 +;; @004e store notrap aligned v66, v19 +;; @004e jump block5 +;; +;; block3 cold: +;; @004e v47 = call fn0(v0, v14), stack_map=[i32 @ ss0+0] +;; @004e jump block5 +;; +;; block5: +;; v48 = load.i32 notrap v54 +;; @0052 jump block1 +;; +;; block1: +;; @0052 return v48 +;; } diff --git a/tests/disas/gc/struct-new-default.wat b/tests/disas/gc/struct-new-default.wat new file mode 100644 index 000000000000..65b19ca6fad1 --- /dev/null +++ b/tests/disas/gc/struct-new-default.wat @@ -0,0 +1,107 @@ +;;! target = "x86_64" +;;! flags = "-W function-references,gc" +;;! test = "optimize" + +(module + (type $ty (struct (field (mut f32)) + (field (mut i8)) + (field (mut anyref)))) + + (func (result (ref $ty)) + (struct.new_default $ty) + ) +) +;; function u0:0(i64 vmctx, i64) -> i32 tail { +;; gv0 = vmctx +;; gv1 = load.i64 notrap aligned readonly gv0+8 +;; gv2 = load.i64 notrap aligned gv1 +;; gv3 = vmctx +;; sig0 = (i64 vmctx, i32 uext, i32 uext, i32 uext, i32 uext) -> i32 system_v +;; fn0 = colocated u1:27 sig0 +;; stack_limit = gv2 +;; +;; block0(v0: i64, v1: i64): +;; @0021 v7 = iconst.i32 -1476395008 +;; @0021 v4 = iconst.i32 0 +;; @0021 v9 = iconst.i32 32 +;; @0021 v10 = iconst.i32 8 +;; @0021 v11 = call fn0(v0, v7, v4, v9, v10) ; v7 = -1476395008, v4 = 0, v9 = 32, v10 = 8 +;; @0021 v15 = uextend.i64 v11 +;; @0021 v16 = iconst.i64 16 +;; @0021 v17 = uadd_overflow_trap v15, v16, user65535 ; v16 = 16 +;; @0021 v18 = iconst.i64 4 +;; @0021 v19 = uadd_overflow_trap v17, v18, user65535 ; v18 = 4 +;; @0021 v14 = load.i64 notrap aligned readonly v0+48 +;; @0021 v20 = icmp ult v19, v14 +;; @0021 brif v20, block5, block4 +;; +;; block4 cold: +;; @0021 trap user65535 +;; +;; block5: +;; @0021 v3 = f32const 0.0 +;; @0021 v13 = load.i64 notrap aligned readonly v0+40 +;; @0021 v21 = iadd v13, v17 +;; @0021 store notrap aligned v3, v21 ; v3 = 0.0 +;; @0021 v26 = iconst.i64 20 +;; @0021 v27 = uadd_overflow_trap.i64 v15, v26, user65535 ; v26 = 20 +;; @0021 v28 = iconst.i64 1 +;; @0021 v29 = uadd_overflow_trap v27, v28, user65535 ; v28 = 1 +;; @0021 v30 = icmp ult v29, v14 +;; @0021 brif v30, block7, block6 +;; +;; block6 cold: +;; @0021 trap user65535 +;; +;; block7: +;; v83 = iconst.i32 0 +;; @0021 v31 = iadd.i64 v13, v27 +;; @0021 istore8 notrap aligned v83, v31 ; v83 = 0 +;; @0021 v36 = iconst.i64 24 +;; @0021 v37 = uadd_overflow_trap.i64 v15, v36, user65535 ; v36 = 24 +;; v84 = iconst.i64 4 +;; @0021 v39 = uadd_overflow_trap v37, v84, user65535 ; v84 = 4 +;; @0021 v40 = icmp ult v39, v14 +;; @0021 brif v40, block9, block8 +;; +;; block8 cold: +;; @0021 trap user65535 +;; +;; block9: +;; v75 = iconst.i8 1 +;; @0021 brif v75, block3, block2 ; v75 = 1 +;; +;; block2: +;; v82 = iconst.i64 0 +;; @0021 v49 = iconst.i64 8 +;; @0021 v50 = uadd_overflow_trap v82, v49, user65535 ; v82 = 0, v49 = 8 +;; @0021 v52 = uadd_overflow_trap v50, v49, user65535 ; v49 = 8 +;; @0021 v53 = icmp ult v52, v14 +;; @0021 brif v53, block11, block10 +;; +;; block10 cold: +;; @0021 trap user65535 +;; +;; block11: +;; @0021 v54 = iadd.i64 v13, v50 +;; @0021 v55 = load.i64 notrap aligned v54 +;; @0021 brif.i8 v53, block13, block12 +;; +;; block12 cold: +;; @0021 trap user65535 +;; +;; block13: +;; v85 = iconst.i64 1 +;; v86 = iadd.i64 v55, v85 ; v85 = 1 +;; @0021 store notrap aligned v86, v54 +;; @0021 jump block3 +;; +;; block3: +;; v87 = iconst.i32 0 +;; @0021 v41 = iadd.i64 v13, v37 +;; @0021 store notrap aligned v87, v41 ; v87 = 0 +;; @0024 jump block1 +;; +;; block1: +;; @0024 return v11 +;; } diff --git a/tests/disas/gc/struct-new.wat b/tests/disas/gc/struct-new.wat new file mode 100644 index 000000000000..f8848e7cab70 --- /dev/null +++ b/tests/disas/gc/struct-new.wat @@ -0,0 +1,119 @@ +;;! target = "x86_64" +;;! flags = "-W function-references,gc" +;;! test = "optimize" + +(module + (type $ty (struct (field (mut f32)) + (field (mut i8)) + (field (mut anyref)))) + + (func (param f32 i32 anyref) (result (ref $ty)) + (struct.new $ty (local.get 0) (local.get 1) (local.get 2)) + ) +) +;; function u0:0(i64 vmctx, i64, f32, i32, i32) -> i32 tail { +;; ss0 = explicit_slot 4, align = 4 +;; gv0 = vmctx +;; gv1 = load.i64 notrap aligned readonly gv0+8 +;; gv2 = load.i64 notrap aligned gv1 +;; gv3 = vmctx +;; sig0 = (i64 vmctx, i32 uext, i32 uext, i32 uext, i32 uext) -> i32 system_v +;; fn0 = colocated u1:27 sig0 +;; stack_limit = gv2 +;; +;; block0(v0: i64, v1: i64, v2: f32, v3: i32, v4: i32): +;; v71 = stack_addr.i64 ss0 +;; store notrap v4, v71 +;; @002a v7 = iconst.i32 -1476395008 +;; @002a v8 = iconst.i32 0 +;; @002a v9 = iconst.i32 32 +;; @002a v10 = iconst.i32 8 +;; @002a v11 = call fn0(v0, v7, v8, v9, v10), stack_map=[i32 @ ss0+0] ; v7 = -1476395008, v8 = 0, v9 = 32, v10 = 8 +;; @002a v15 = uextend.i64 v11 +;; @002a v16 = iconst.i64 16 +;; @002a v17 = uadd_overflow_trap v15, v16, user65535 ; v16 = 16 +;; @002a v18 = iconst.i64 4 +;; @002a v19 = uadd_overflow_trap v17, v18, user65535 ; v18 = 4 +;; @002a v14 = load.i64 notrap aligned readonly v0+48 +;; @002a v20 = icmp ult v19, v14 +;; @002a brif v20, block5, block4 +;; +;; block4 cold: +;; @002a trap user65535 +;; +;; block5: +;; @002a v13 = load.i64 notrap aligned readonly v0+40 +;; @002a v21 = iadd v13, v17 +;; @002a store.f32 notrap aligned v2, v21 +;; @002a v26 = iconst.i64 20 +;; @002a v27 = uadd_overflow_trap.i64 v15, v26, user65535 ; v26 = 20 +;; @002a v28 = iconst.i64 1 +;; @002a v29 = uadd_overflow_trap v27, v28, user65535 ; v28 = 1 +;; @002a v30 = icmp ult v29, v14 +;; @002a brif v30, block7, block6 +;; +;; block6 cold: +;; @002a trap user65535 +;; +;; block7: +;; @002a v31 = iadd.i64 v13, v27 +;; @002a istore8.i32 notrap aligned v3, v31 +;; @002a v36 = iconst.i64 24 +;; @002a v37 = uadd_overflow_trap.i64 v15, v36, user65535 ; v36 = 24 +;; v78 = iconst.i64 4 +;; @002a v39 = uadd_overflow_trap v37, v78, user65535 ; v78 = 4 +;; @002a v40 = icmp ult v39, v14 +;; @002a brif v40, block9, block8 +;; +;; block8 cold: +;; @002a trap user65535 +;; +;; block9: +;; v70 = load.i32 notrap v71 +;; @002a v42 = iconst.i32 -2 +;; @002a v43 = band v70, v42 ; v42 = -2 +;; v79 = iconst.i32 0 +;; v80 = icmp eq v43, v79 ; v79 = 0 +;; @002a brif v80, block3, block2 +;; +;; block2: +;; @002a v48 = uextend.i64 v70 +;; @002a v49 = iconst.i64 8 +;; @002a v50 = uadd_overflow_trap v48, v49, user65535 ; v49 = 8 +;; @002a v52 = uadd_overflow_trap v50, v49, user65535 ; v49 = 8 +;; @002a v53 = icmp ult v52, v14 +;; @002a brif v53, block11, block10 +;; +;; block10 cold: +;; @002a trap user65535 +;; +;; block11: +;; @002a v54 = iadd.i64 v13, v50 +;; @002a v55 = load.i64 notrap aligned v54 +;; v68 = load.i32 notrap v71 +;; @002a v60 = uextend.i64 v68 +;; v81 = iconst.i64 8 +;; @002a v62 = uadd_overflow_trap v60, v81, user65535 ; v81 = 8 +;; @002a v64 = uadd_overflow_trap v62, v81, user65535 ; v81 = 8 +;; @002a v65 = icmp ult v64, v14 +;; @002a brif v65, block13, block12 +;; +;; block12 cold: +;; @002a trap user65535 +;; +;; block13: +;; v82 = iconst.i64 1 +;; v83 = iadd.i64 v55, v82 ; v82 = 1 +;; @002a v66 = iadd.i64 v13, v62 +;; @002a store notrap aligned v83, v66 +;; @002a jump block3 +;; +;; block3: +;; v67 = load.i32 notrap v71 +;; @002a v41 = iadd.i64 v13, v37 +;; @002a store notrap aligned v67, v41 +;; @002d jump block1 +;; +;; block1: +;; @002d return v11 +;; } diff --git a/tests/disas/gc/struct-set.wat b/tests/disas/gc/struct-set.wat new file mode 100644 index 000000000000..1567f81f7b2d --- /dev/null +++ b/tests/disas/gc/struct-set.wat @@ -0,0 +1,205 @@ +;;! target = "x86_64" +;;! flags = "-W function-references,gc" +;;! test = "optimize" + +(module + (type $ty (struct (field (mut f32)) + (field (mut i8)) + (field (mut anyref)))) + + (func (param (ref null $ty) f32) + (struct.set $ty 0 (local.get 0) (local.get 1)) + ) + + (func (param (ref null $ty) i32) + (struct.set $ty 1 (local.get 0) (local.get 1)) + ) + + (func (param (ref null $ty) anyref) + (struct.set $ty 2 (local.get 0) (local.get 1)) + ) +) +;; function u0:0(i64 vmctx, i64, i32, f32) tail { +;; gv0 = vmctx +;; gv1 = load.i64 notrap aligned readonly gv0+8 +;; gv2 = load.i64 notrap aligned gv1 +;; gv3 = vmctx +;; stack_limit = gv2 +;; +;; block0(v0: i64, v1: i64, v2: i32, v3: f32): +;; @0034 brif v2, block3, block2 +;; +;; block2 cold: +;; @0034 trap null_reference +;; +;; block3: +;; @0034 v7 = uextend.i64 v2 +;; @0034 v8 = iconst.i64 16 +;; @0034 v9 = uadd_overflow_trap v7, v8, user65535 ; v8 = 16 +;; @0034 v10 = iconst.i64 4 +;; @0034 v11 = uadd_overflow_trap v9, v10, user65535 ; v10 = 4 +;; @0034 v6 = load.i64 notrap aligned readonly v0+48 +;; @0034 v12 = icmp ult v11, v6 +;; @0034 brif v12, block5, block4 +;; +;; block4 cold: +;; @0034 trap user65535 +;; +;; block5: +;; @0034 v5 = load.i64 notrap aligned readonly v0+40 +;; @0034 v13 = iadd v5, v9 +;; @0034 store.f32 notrap aligned v3, v13 +;; @0038 jump block1 +;; +;; block1: +;; @0038 return +;; } +;; +;; function u0:1(i64 vmctx, i64, i32, i32) tail { +;; gv0 = vmctx +;; gv1 = load.i64 notrap aligned readonly gv0+8 +;; gv2 = load.i64 notrap aligned gv1 +;; gv3 = vmctx +;; stack_limit = gv2 +;; +;; block0(v0: i64, v1: i64, v2: i32, v3: i32): +;; @003f brif v2, block3, block2 +;; +;; block2 cold: +;; @003f trap null_reference +;; +;; block3: +;; @003f v7 = uextend.i64 v2 +;; @003f v8 = iconst.i64 20 +;; @003f v9 = uadd_overflow_trap v7, v8, user65535 ; v8 = 20 +;; @003f v10 = iconst.i64 1 +;; @003f v11 = uadd_overflow_trap v9, v10, user65535 ; v10 = 1 +;; @003f v6 = load.i64 notrap aligned readonly v0+48 +;; @003f v12 = icmp ult v11, v6 +;; @003f brif v12, block5, block4 +;; +;; block4 cold: +;; @003f trap user65535 +;; +;; block5: +;; @003f v5 = load.i64 notrap aligned readonly v0+40 +;; @003f v13 = iadd v5, v9 +;; @003f istore8.i32 notrap aligned v3, v13 +;; @0043 jump block1 +;; +;; block1: +;; @0043 return +;; } +;; +;; function u0:2(i64 vmctx, i64, i32, i32) tail { +;; gv0 = vmctx +;; gv1 = load.i64 notrap aligned readonly gv0+8 +;; gv2 = load.i64 notrap aligned gv1 +;; gv3 = vmctx +;; sig0 = (i64 vmctx, i32 uext) system_v +;; fn0 = colocated u1:25 sig0 +;; stack_limit = gv2 +;; +;; block0(v0: i64, v1: i64, v2: i32, v3: i32): +;; @004a brif v2, block9, block8 +;; +;; block8 cold: +;; @004a trap null_reference +;; +;; block9: +;; @004a v7 = uextend.i64 v2 +;; @004a v8 = iconst.i64 24 +;; @004a v9 = uadd_overflow_trap v7, v8, user65535 ; v8 = 24 +;; @004a v10 = iconst.i64 4 +;; @004a v11 = uadd_overflow_trap v9, v10, user65535 ; v10 = 4 +;; @004a v6 = load.i64 notrap aligned readonly v0+48 +;; @004a v12 = icmp ult v11, v6 +;; @004a brif v12, block11, block10 +;; +;; block10 cold: +;; @004a trap user65535 +;; +;; block11: +;; @004a v5 = load.i64 notrap aligned readonly v0+40 +;; @004a v13 = iadd v5, v9 +;; @004a v14 = load.i32 notrap aligned v13 +;; @004a v15 = iconst.i32 -2 +;; @004a v16 = band.i32 v3, v15 ; v15 = -2 +;; v67 = iconst.i32 0 +;; @004a v17 = icmp eq v16, v67 ; v67 = 0 +;; @004a brif v17, block3, block2 +;; +;; block2: +;; @004a v21 = uextend.i64 v3 +;; @004a v47 = iconst.i64 8 +;; @004a v23 = uadd_overflow_trap v21, v47, user65535 ; v47 = 8 +;; @004a v25 = uadd_overflow_trap v23, v47, user65535 ; v47 = 8 +;; @004a v26 = icmp ult v25, v6 +;; @004a brif v26, block13, block12 +;; +;; block12 cold: +;; @004a trap user65535 +;; +;; block13: +;; @004a v27 = iadd.i64 v5, v23 +;; @004a v28 = load.i64 notrap aligned v27 +;; @004a brif.i8 v26, block15, block14 +;; +;; block14 cold: +;; @004a trap user65535 +;; +;; block15: +;; v68 = iconst.i64 1 +;; @004a v29 = iadd.i64 v28, v68 ; v68 = 1 +;; @004a store notrap aligned v29, v27 +;; @004a jump block3 +;; +;; block3: +;; @004a store.i32 notrap aligned v3, v13 +;; v72 = iconst.i32 -2 +;; v73 = band.i32 v14, v72 ; v72 = -2 +;; v74 = iconst.i32 0 +;; v75 = icmp eq v73, v74 ; v74 = 0 +;; @004a brif v75, block7, block4 +;; +;; block4: +;; @004a v46 = uextend.i64 v14 +;; v76 = iconst.i64 8 +;; @004a v48 = uadd_overflow_trap v46, v76, user65535 ; v76 = 8 +;; @004a v50 = uadd_overflow_trap v48, v76, user65535 ; v76 = 8 +;; @004a v51 = icmp ult v50, v6 +;; @004a brif v51, block17, block16 +;; +;; block16 cold: +;; @004a trap user65535 +;; +;; block17: +;; @004a v52 = iadd.i64 v5, v48 +;; @004a v53 = load.i64 notrap aligned v52 +;; v70 = iconst.i64 -1 +;; @004a v54 = iadd v53, v70 ; v70 = -1 +;; v71 = iconst.i64 0 +;; @004a v55 = icmp eq v54, v71 ; v71 = 0 +;; @004a brif v55, block5, block6 +;; +;; block5 cold: +;; @004a call fn0(v0, v14) +;; @004a jump block7 +;; +;; block6: +;; @004a brif.i8 v51, block19, block18 +;; +;; block18 cold: +;; @004a trap user65535 +;; +;; block19: +;; v77 = iadd.i64 v53, v70 ; v70 = -1 +;; @004a store notrap aligned v77, v52 +;; @004a jump block7 +;; +;; block7: +;; @004e jump block1 +;; +;; block1: +;; @004e return +;; } diff --git a/tests/misc_testsuite/gc/struct-instructions.wast b/tests/misc_testsuite/gc/struct-instructions.wast new file mode 100644 index 000000000000..9e20fce2e3ec --- /dev/null +++ b/tests/misc_testsuite/gc/struct-instructions.wast @@ -0,0 +1,131 @@ +(module + (type $ty (struct (field (mut f32)) + (field (mut i8)) + (field (mut anyref)))) + + (global $g (mut (ref null $ty)) (ref.null $ty)) + + ;; Constructors. + + (func $new (param f32 i32 anyref) (result (ref $ty)) + (struct.new $ty (local.get 0) (local.get 1) (local.get 2)) + ) + (func (export "new") (param f32 i32 anyref) + (global.set $g (call $new (local.get 0) (local.get 1) (local.get 2))) + ) + + (func $new-default (result (ref $ty)) + (struct.new_default $ty) + ) + (func (export "new-default") + (global.set $g (call $new-default)) + ) + + ;; Getters. + + (func $get-f32 (param (ref null $ty)) (result f32) + (struct.get $ty 0 (local.get 0)) + ) + (func (export "get-f32") (result f32) + (call $get-f32 (global.get $g)) + ) + + (func $get-s-i8 (param (ref null $ty)) (result i32) + (struct.get_s $ty 1 (local.get 0)) + ) + (func (export "get-s-i8") (result i32) + (call $get-s-i8 (global.get $g)) + ) + + (func $get-u-i8 (param (ref null $ty)) (result i32) + (struct.get_u $ty 1 (local.get 0)) + ) + (func (export "get-u-i8") (result i32) + (call $get-u-i8 (global.get $g)) + ) + + (func $get-anyref (param (ref null $ty)) (result anyref) + (struct.get $ty 2 (local.get 0)) + ) + (func (export "get-anyref") (result anyref) + (call $get-anyref (global.get $g)) + ) + + ;; Setters. + + (func $set-f32 (param (ref null $ty) f32) + (struct.set $ty 0 (local.get 0) (local.get 1)) + ) + (func (export "set-f32") (param f32) + (call $set-f32 (global.get $g) (local.get 0)) + ) + + (func $set-i8 (param (ref null $ty) i32) + (struct.set $ty 1 (local.get 0) (local.get 1)) + ) + (func (export "set-i8") (param i32) + (call $set-i8 (global.get $g) (local.get 0)) + ) + + (func $set-anyref (param (ref null $ty) anyref) + (struct.set $ty 2 (local.get 0) (local.get 1)) + ) + (func (export "set-anyref") (param anyref) + (call $set-anyref (global.get $g) (local.get 0)) + ) + + (func (export "set-anyref-non-null") + (call $set-anyref (global.get $g) (struct.new_default $ty)) + ) +) + +(assert_return (invoke "new" (f32.const 1) (i32.const -1) (ref.null any))) +(assert_return (invoke "get-f32") (f32.const 1)) +(assert_return (invoke "get-s-i8") (i32.const -1)) +(assert_return (invoke "get-u-i8") (i32.const 255)) +(assert_return (invoke "get-anyref") (ref.null any)) + +(assert_return (invoke "new-default")) +(assert_return (invoke "get-f32") (f32.const 0)) +(assert_return (invoke "get-s-i8") (i32.const 0)) +(assert_return (invoke "get-u-i8") (i32.const 0)) +(assert_return (invoke "get-anyref") (ref.null any)) + +(assert_return (invoke "set-f32" (f32.const 2))) +(assert_return (invoke "get-f32") (f32.const 2)) + +(assert_return (invoke "set-i8" (i32.const -1))) +(assert_return (invoke "get-s-i8") (i32.const -1)) +(assert_return (invoke "get-u-i8") (i32.const 255)) + +(assert_return (invoke "set-anyref-non-null")) +(assert_return (invoke "get-anyref") (ref.struct)) +(assert_return (invoke "set-anyref" (ref.null any))) +(assert_return (invoke "get-anyref") (ref.null any)) + +;; Null dereference + +(module + (type $t (struct (field (mut i32) (mut i16)))) + + (func (export "struct.get-null") (param (ref null $t)) + (drop (struct.get $t 0 (local.get 0))) + ) + + (func (export "struct.get_s-null") (param (ref null $t)) + (drop (struct.get_s $t 1 (local.get 0))) + ) + + (func (export "struct.get_u-null") (param (ref null $t)) + (drop (struct.get_u $t 1 (local.get 0))) + ) + + (func (export "struct.set-null") (param (ref null $t)) + (struct.set $t 0 (local.get 0) (i32.const 0)) + ) +) + +(assert_trap (invoke "struct.get-null" (ref.null none)) "null structure reference") +(assert_trap (invoke "struct.get_s-null" (ref.null none)) "null structure reference") +(assert_trap (invoke "struct.get_u-null" (ref.null none)) "null structure reference") +(assert_trap (invoke "struct.set-null" (ref.null none)) "null structure reference")