use crate::externref::VMExternRef;
use crate::instance::Instance;
use std::any::Any;
use std::ptr::NonNull;
use std::sync::atomic::{AtomicUsize, Ordering::SeqCst};
use std::u32;
use wasmtime_environ::BuiltinFunctionIndex;
#[derive(Debug, Copy, Clone)]
#[repr(C)]
pub struct VMFunctionImport {
pub body: NonNull<VMFunctionBody>,
pub vmctx: *mut VMContext,
}
#[cfg(test)]
mod test_vmfunction_import {
use super::VMFunctionImport;
use memoffset::offset_of;
use std::mem::size_of;
use wasmtime_environ::{Module, VMOffsets};
#[test]
fn check_vmfunction_import_offsets() {
let module = Module::new();
let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
assert_eq!(
size_of::<VMFunctionImport>(),
usize::from(offsets.size_of_vmfunction_import())
);
assert_eq!(
offset_of!(VMFunctionImport, body),
usize::from(offsets.vmfunction_import_body())
);
assert_eq!(
offset_of!(VMFunctionImport, vmctx),
usize::from(offsets.vmfunction_import_vmctx())
);
}
}
#[repr(C)]
pub struct VMFunctionBody(u8);
#[cfg(test)]
mod test_vmfunction_body {
use super::VMFunctionBody;
use std::mem::size_of;
#[test]
fn check_vmfunction_body_offsets() {
assert_eq!(size_of::<VMFunctionBody>(), 1);
}
}
#[derive(Debug, Copy, Clone)]
#[repr(C)]
pub struct VMTableImport {
pub from: *mut VMTableDefinition,
pub vmctx: *mut VMContext,
}
#[cfg(test)]
mod test_vmtable_import {
use super::VMTableImport;
use memoffset::offset_of;
use std::mem::size_of;
use wasmtime_environ::{Module, VMOffsets};
#[test]
fn check_vmtable_import_offsets() {
let module = Module::new();
let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
assert_eq!(
size_of::<VMTableImport>(),
usize::from(offsets.size_of_vmtable_import())
);
assert_eq!(
offset_of!(VMTableImport, from),
usize::from(offsets.vmtable_import_from())
);
assert_eq!(
offset_of!(VMTableImport, vmctx),
usize::from(offsets.vmtable_import_vmctx())
);
}
}
#[derive(Debug, Copy, Clone)]
#[repr(C)]
pub struct VMMemoryImport {
pub from: *mut VMMemoryDefinition,
pub vmctx: *mut VMContext,
}
#[cfg(test)]
mod test_vmmemory_import {
use super::VMMemoryImport;
use memoffset::offset_of;
use std::mem::size_of;
use wasmtime_environ::{Module, VMOffsets};
#[test]
fn check_vmmemory_import_offsets() {
let module = Module::new();
let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
assert_eq!(
size_of::<VMMemoryImport>(),
usize::from(offsets.size_of_vmmemory_import())
);
assert_eq!(
offset_of!(VMMemoryImport, from),
usize::from(offsets.vmmemory_import_from())
);
assert_eq!(
offset_of!(VMMemoryImport, vmctx),
usize::from(offsets.vmmemory_import_vmctx())
);
}
}
#[derive(Debug, Copy, Clone)]
#[repr(C)]
pub struct VMGlobalImport {
pub from: *mut VMGlobalDefinition,
}
#[cfg(test)]
mod test_vmglobal_import {
use super::VMGlobalImport;
use memoffset::offset_of;
use std::mem::size_of;
use wasmtime_environ::{Module, VMOffsets};
#[test]
fn check_vmglobal_import_offsets() {
let module = Module::new();
let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
assert_eq!(
size_of::<VMGlobalImport>(),
usize::from(offsets.size_of_vmglobal_import())
);
assert_eq!(
offset_of!(VMGlobalImport, from),
usize::from(offsets.vmglobal_import_from())
);
}
}
#[derive(Debug, Copy, Clone)]
#[repr(C)]
pub struct VMMemoryDefinition {
pub base: *mut u8,
pub current_length: usize,
}
#[cfg(test)]
mod test_vmmemory_definition {
use super::VMMemoryDefinition;
use memoffset::offset_of;
use std::mem::size_of;
use wasmtime_environ::{Module, VMOffsets};
#[test]
fn check_vmmemory_definition_offsets() {
let module = Module::new();
let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
assert_eq!(
size_of::<VMMemoryDefinition>(),
usize::from(offsets.size_of_vmmemory_definition())
);
assert_eq!(
offset_of!(VMMemoryDefinition, base),
usize::from(offsets.vmmemory_definition_base())
);
assert_eq!(
offset_of!(VMMemoryDefinition, current_length),
usize::from(offsets.vmmemory_definition_current_length())
);
}
}
#[derive(Debug, Copy, Clone)]
#[repr(C)]
pub struct VMTableDefinition {
pub base: *mut u8,
pub current_elements: u32,
}
#[cfg(test)]
mod test_vmtable_definition {
use super::VMTableDefinition;
use memoffset::offset_of;
use std::mem::size_of;
use wasmtime_environ::{Module, VMOffsets};
#[test]
fn check_vmtable_definition_offsets() {
let module = Module::new();
let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
assert_eq!(
size_of::<VMTableDefinition>(),
usize::from(offsets.size_of_vmtable_definition())
);
assert_eq!(
offset_of!(VMTableDefinition, base),
usize::from(offsets.vmtable_definition_base())
);
assert_eq!(
offset_of!(VMTableDefinition, current_elements),
usize::from(offsets.vmtable_definition_current_elements())
);
}
}
#[derive(Debug, Copy, Clone)]
#[repr(C, align(16))]
pub struct VMGlobalDefinition {
storage: [u8; 16],
}
#[cfg(test)]
mod test_vmglobal_definition {
use super::VMGlobalDefinition;
use crate::externref::VMExternRef;
use more_asserts::assert_ge;
use std::mem::{align_of, size_of};
use wasmtime_environ::{Module, VMOffsets};
#[test]
fn check_vmglobal_definition_alignment() {
assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<i32>());
assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<i64>());
assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<f32>());
assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<f64>());
assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<[u8; 16]>());
}
#[test]
fn check_vmglobal_definition_offsets() {
let module = Module::new();
let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
assert_eq!(
size_of::<VMGlobalDefinition>(),
usize::from(offsets.size_of_vmglobal_definition())
);
}
#[test]
fn check_vmglobal_begins_aligned() {
let module = Module::new();
let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
assert_eq!(offsets.vmctx_globals_begin() % 16, 0);
}
#[test]
fn check_vmglobal_can_contain_externref() {
assert!(size_of::<VMExternRef>() <= size_of::<VMGlobalDefinition>());
}
}
impl VMGlobalDefinition {
pub fn new() -> Self {
Self { storage: [0; 16] }
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_i32(&self) -> &i32 {
&*(self.storage.as_ref().as_ptr() as *const i32)
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_i32_mut(&mut self) -> &mut i32 {
&mut *(self.storage.as_mut().as_mut_ptr() as *mut i32)
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_u32(&self) -> &u32 {
&*(self.storage.as_ref().as_ptr() as *const u32)
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_u32_mut(&mut self) -> &mut u32 {
&mut *(self.storage.as_mut().as_mut_ptr() as *mut u32)
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_i64(&self) -> &i64 {
&*(self.storage.as_ref().as_ptr() as *const i64)
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_i64_mut(&mut self) -> &mut i64 {
&mut *(self.storage.as_mut().as_mut_ptr() as *mut i64)
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_u64(&self) -> &u64 {
&*(self.storage.as_ref().as_ptr() as *const u64)
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_u64_mut(&mut self) -> &mut u64 {
&mut *(self.storage.as_mut().as_mut_ptr() as *mut u64)
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_f32(&self) -> &f32 {
&*(self.storage.as_ref().as_ptr() as *const f32)
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_f32_mut(&mut self) -> &mut f32 {
&mut *(self.storage.as_mut().as_mut_ptr() as *mut f32)
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_f32_bits(&self) -> &u32 {
&*(self.storage.as_ref().as_ptr() as *const u32)
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_f32_bits_mut(&mut self) -> &mut u32 {
&mut *(self.storage.as_mut().as_mut_ptr() as *mut u32)
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_f64(&self) -> &f64 {
&*(self.storage.as_ref().as_ptr() as *const f64)
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_f64_mut(&mut self) -> &mut f64 {
&mut *(self.storage.as_mut().as_mut_ptr() as *mut f64)
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_f64_bits(&self) -> &u64 {
&*(self.storage.as_ref().as_ptr() as *const u64)
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_f64_bits_mut(&mut self) -> &mut u64 {
&mut *(self.storage.as_mut().as_mut_ptr() as *mut u64)
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_u128(&self) -> &u128 {
&*(self.storage.as_ref().as_ptr() as *const u128)
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_u128_mut(&mut self) -> &mut u128 {
&mut *(self.storage.as_mut().as_mut_ptr() as *mut u128)
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_u128_bits(&self) -> &[u8; 16] {
&*(self.storage.as_ref().as_ptr() as *const [u8; 16])
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_u128_bits_mut(&mut self) -> &mut [u8; 16] {
&mut *(self.storage.as_mut().as_mut_ptr() as *mut [u8; 16])
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_externref(&self) -> &Option<VMExternRef> {
&*(self.storage.as_ref().as_ptr() as *const Option<VMExternRef>)
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_externref_mut(&mut self) -> &mut Option<VMExternRef> {
&mut *(self.storage.as_mut().as_mut_ptr() as *mut Option<VMExternRef>)
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_anyfunc(&self) -> *const VMCallerCheckedAnyfunc {
*(self.storage.as_ref().as_ptr() as *const *const VMCallerCheckedAnyfunc)
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_anyfunc_mut(&mut self) -> &mut *const VMCallerCheckedAnyfunc {
&mut *(self.storage.as_mut().as_mut_ptr() as *mut *const VMCallerCheckedAnyfunc)
}
}
#[repr(C)]
#[derive(Debug, Eq, PartialEq, Clone, Copy, Hash)]
pub struct VMSharedSignatureIndex(u32);
#[cfg(test)]
mod test_vmshared_signature_index {
use super::VMSharedSignatureIndex;
use std::mem::size_of;
use wasmtime_environ::{Module, TargetSharedSignatureIndex, VMOffsets};
#[test]
fn check_vmshared_signature_index() {
let module = Module::new();
let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
assert_eq!(
size_of::<VMSharedSignatureIndex>(),
usize::from(offsets.size_of_vmshared_signature_index())
);
}
#[test]
fn check_target_shared_signature_index() {
assert_eq!(
size_of::<VMSharedSignatureIndex>(),
size_of::<TargetSharedSignatureIndex>()
);
}
}
impl VMSharedSignatureIndex {
pub fn new(value: u32) -> Self {
Self(value)
}
pub fn bits(&self) -> u32 {
self.0
}
}
impl Default for VMSharedSignatureIndex {
fn default() -> Self {
Self::new(u32::MAX)
}
}
#[derive(Debug, Clone)]
#[repr(C)]
pub struct VMCallerCheckedAnyfunc {
pub func_ptr: NonNull<VMFunctionBody>,
pub type_index: VMSharedSignatureIndex,
pub vmctx: *mut VMContext,
}
#[cfg(test)]
mod test_vmcaller_checked_anyfunc {
use super::VMCallerCheckedAnyfunc;
use memoffset::offset_of;
use std::mem::size_of;
use wasmtime_environ::{Module, VMOffsets};
#[test]
fn check_vmcaller_checked_anyfunc_offsets() {
let module = Module::new();
let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
assert_eq!(
size_of::<VMCallerCheckedAnyfunc>(),
usize::from(offsets.size_of_vmcaller_checked_anyfunc())
);
assert_eq!(
offset_of!(VMCallerCheckedAnyfunc, func_ptr),
usize::from(offsets.vmcaller_checked_anyfunc_func_ptr())
);
assert_eq!(
offset_of!(VMCallerCheckedAnyfunc, type_index),
usize::from(offsets.vmcaller_checked_anyfunc_type_index())
);
assert_eq!(
offset_of!(VMCallerCheckedAnyfunc, vmctx),
usize::from(offsets.vmcaller_checked_anyfunc_vmctx())
);
}
}
#[repr(C)]
pub struct VMBuiltinFunctionsArray {
ptrs: [usize; Self::len()],
}
impl VMBuiltinFunctionsArray {
pub const fn len() -> usize {
BuiltinFunctionIndex::builtin_functions_total_number() as usize
}
pub fn initialized() -> Self {
use crate::libcalls::*;
let mut ptrs = [0; Self::len()];
ptrs[BuiltinFunctionIndex::memory32_grow().index() as usize] =
wasmtime_memory32_grow as usize;
ptrs[BuiltinFunctionIndex::imported_memory32_grow().index() as usize] =
wasmtime_imported_memory32_grow as usize;
ptrs[BuiltinFunctionIndex::memory32_size().index() as usize] =
wasmtime_memory32_size as usize;
ptrs[BuiltinFunctionIndex::imported_memory32_size().index() as usize] =
wasmtime_imported_memory32_size as usize;
ptrs[BuiltinFunctionIndex::table_copy().index() as usize] = wasmtime_table_copy as usize;
ptrs[BuiltinFunctionIndex::table_grow_funcref().index() as usize] =
wasmtime_table_grow as usize;
ptrs[BuiltinFunctionIndex::table_grow_externref().index() as usize] =
wasmtime_table_grow as usize;
ptrs[BuiltinFunctionIndex::table_init().index() as usize] = wasmtime_table_init as usize;
ptrs[BuiltinFunctionIndex::elem_drop().index() as usize] = wasmtime_elem_drop as usize;
ptrs[BuiltinFunctionIndex::memory_copy().index() as usize] = wasmtime_memory_copy as usize;
ptrs[BuiltinFunctionIndex::memory_fill().index() as usize] = wasmtime_memory_fill as usize;
ptrs[BuiltinFunctionIndex::imported_memory_fill().index() as usize] =
wasmtime_imported_memory_fill as usize;
ptrs[BuiltinFunctionIndex::memory_init().index() as usize] = wasmtime_memory_init as usize;
ptrs[BuiltinFunctionIndex::data_drop().index() as usize] = wasmtime_data_drop as usize;
ptrs[BuiltinFunctionIndex::drop_externref().index() as usize] =
wasmtime_drop_externref as usize;
ptrs[BuiltinFunctionIndex::activations_table_insert_with_gc().index() as usize] =
wasmtime_activations_table_insert_with_gc as usize;
ptrs[BuiltinFunctionIndex::externref_global_get().index() as usize] =
wasmtime_externref_global_get as usize;
ptrs[BuiltinFunctionIndex::externref_global_set().index() as usize] =
wasmtime_externref_global_set as usize;
ptrs[BuiltinFunctionIndex::table_fill_externref().index() as usize] =
wasmtime_table_fill as usize;
ptrs[BuiltinFunctionIndex::table_fill_funcref().index() as usize] =
wasmtime_table_fill as usize;
if cfg!(debug_assertions) {
for i in 0..ptrs.len() {
debug_assert!(ptrs[i] != 0, "index {} is not initialized", i);
}
}
Self { ptrs }
}
}
#[derive(Debug, Copy, Clone)]
#[repr(C, align(16))]
pub struct VMInvokeArgument([u8; 16]);
#[cfg(test)]
mod test_vm_invoke_argument {
use super::VMInvokeArgument;
use std::mem::{align_of, size_of};
use wasmtime_environ::{Module, VMOffsets};
#[test]
fn check_vm_invoke_argument_alignment() {
assert_eq!(align_of::<VMInvokeArgument>(), 16);
}
#[test]
fn check_vmglobal_definition_offsets() {
let module = Module::new();
let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
assert_eq!(
size_of::<VMInvokeArgument>(),
usize::from(offsets.size_of_vmglobal_definition())
);
}
}
impl VMInvokeArgument {
pub fn new() -> Self {
Self([0; 16])
}
}
#[derive(Debug)]
#[repr(C)]
pub struct VMInterrupts {
pub stack_limit: AtomicUsize,
}
impl VMInterrupts {
pub fn interrupt(&self) {
self.stack_limit
.store(wasmtime_environ::INTERRUPTED, SeqCst);
}
}
impl Default for VMInterrupts {
fn default() -> VMInterrupts {
VMInterrupts {
stack_limit: AtomicUsize::new(usize::max_value()),
}
}
}
#[cfg(test)]
mod test_vminterrupts {
use super::VMInterrupts;
use memoffset::offset_of;
use std::mem::size_of;
use wasmtime_environ::{Module, VMOffsets};
#[test]
fn check_vminterrupts_interrupted_offset() {
let module = Module::new();
let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
assert_eq!(
offset_of!(VMInterrupts, stack_limit),
usize::from(offsets.vminterrupts_stack_limit())
);
}
}
#[derive(Debug)]
#[repr(C, align(16))]
pub struct VMContext {}
impl VMContext {
#[allow(clippy::cast_ptr_alignment)]
#[inline]
pub(crate) unsafe fn instance(&self) -> &Instance {
&*((self as *const Self as *mut u8).offset(-Instance::vmctx_offset()) as *const Instance)
}
#[inline]
pub unsafe fn host_state(&self) -> &dyn Any {
self.instance().host_state()
}
}
pub type VMTrampoline = unsafe extern "C" fn(
*mut VMContext,
*mut VMContext,
*const VMFunctionBody,
*mut u128,
);