Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
0200afd
typed key implementation
eugineerd May 5, 2025
6c4bf20
Implement fragmenting value components
eugineerd May 9, 2025
d5be3b5
Merge remote-tracking branch 'upstream/main' into value-components
eugineerd May 9, 2025
c84f7a1
update msrv to 1.86.0
eugineerd May 9, 2025
c8d064c
remove `filter_by_component_value` in Query implementation
eugineerd May 14, 2025
3032426
add support for `DynamicFragmentingValue` in dyn `FragmentingValue::eq`
eugineerd May 14, 2025
7fd9bf7
Remove `storage!=Table` restriction for fragmenting value components.
eugineerd May 14, 2025
7cfc733
fixed fragmenting value map being reset
eugineerd May 15, 2025
d41d9fc
add migration guide
eugineerd May 15, 2025
5f460e6
Merge branch 'main' of https://github.com/bevyengine/bevy into value-…
eugineerd May 19, 2025
a777ce6
a bit more documentation
eugineerd May 20, 2025
149634e
add some benches
eugineerd May 20, 2025
2ada359
Merge branch 'main' into value-components
eugineerd May 28, 2025
bf9c262
Merge branch 'main' into value-components
eugineerd Jun 1, 2025
441f858
add `Shared` storage
eugineerd May 27, 2025
8c57aec
add `StorageType::Shared`
eugineerd May 31, 2025
1cb6635
doc pass
eugineerd Jun 1, 2025
7689c1f
Enforce `Shared` storage invariants automatically
eugineerd Jun 1, 2025
49c0111
add shared components to component index
eugineerd Jun 1, 2025
2bf058b
use Shared in fragmenting value benches
eugineerd Jun 1, 2025
9ced4fa
add simple iteration benchmark
eugineerd Jun 1, 2025
41cfc3b
fix broken doc links
eugineerd Jun 1, 2025
a71bdcd
update migration guide
eugineerd Jun 1, 2025
64ebc24
add release note
eugineerd Jun 1, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
123 changes: 123 additions & 0 deletions benches/benches/bevy_ecs/components/fragmenting_values.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,123 @@
use bevy_ecs::prelude::*;
use criterion::Criterion;
use glam::*;

#[derive(Component, PartialEq, Eq, Hash, Clone)]
#[component(storage = "Shared")]
struct Fragmenting<const N: usize>(u32);

#[derive(Component)]
struct NonFragmenting<const N: usize>(Vec3);

pub fn insert_fragmenting_value(c: &mut Criterion) {
let mut group = c.benchmark_group("insert_fragmenting_value");
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(5));
group.bench_function("base", |b| {
b.iter(move || {
let mut world = World::new();
world.spawn_batch((0..10_000).map(|_| {
(
Fragmenting::<1>(1),
NonFragmenting::<1>(Vec3::ONE),
NonFragmenting::<2>(Vec3::ONE),
NonFragmenting::<3>(Vec3::ONE),
)
}));
});
});
group.bench_function("unbatched", |b| {
b.iter(move || {
let mut world = World::new();
for _ in 0..10_000 {
world.spawn((
Fragmenting::<1>(1),
NonFragmenting::<1>(Vec3::ONE),
NonFragmenting::<2>(Vec3::ONE),
NonFragmenting::<3>(Vec3::ONE),
));
}
});
});
group.bench_function("high_fragmentation_base", |b| {
b.iter(move || {
let mut world = World::new();
world.spawn_batch((0..10_000).map(|i| {
(
Fragmenting::<1>(i % 100),
NonFragmenting::<1>(Vec3::ONE),
NonFragmenting::<2>(Vec3::ONE),
NonFragmenting::<3>(Vec3::ONE),
)
}));
});
});
group.bench_function("high_fragmentation_unbatched", |b| {
b.iter(move || {
let mut world = World::new();
for i in 0..10_000 {
world.spawn((
Fragmenting::<1>(i % 100),
NonFragmenting::<1>(Vec3::ONE),
NonFragmenting::<2>(Vec3::ONE),
NonFragmenting::<3>(Vec3::ONE),
));
}
});
});
group.finish();
}

pub fn add_remove_fragmenting_value(c: &mut Criterion) {
let mut group = c.benchmark_group("add_remove_fragmenting_value");
group.warm_up_time(core::time::Duration::from_millis(500));
group.measurement_time(core::time::Duration::from_secs(5));

group.bench_function("non_fragmenting", |b| {
let mut world = World::new();
let entities: Vec<_> = world
.spawn_batch((0..10_000).map(|_| {
(
Fragmenting::<1>(1),
NonFragmenting::<1>(Vec3::ONE),
NonFragmenting::<2>(Vec3::ONE),
NonFragmenting::<3>(Vec3::ONE),
)
}))
.collect();
b.iter(move || {
for entity in &entities {
world
.entity_mut(*entity)
.insert(NonFragmenting::<4>(Vec3::ZERO));
}

for entity in &entities {
world.entity_mut(*entity).remove::<NonFragmenting<4>>();
}
});
});

group.bench_function("fragmenting", |b| {
let mut world = World::new();
let entities: Vec<_> = world
.spawn_batch((0..10_000).map(|_| {
(
Fragmenting::<1>(1),
NonFragmenting::<1>(Vec3::ONE),
NonFragmenting::<2>(Vec3::ONE),
NonFragmenting::<3>(Vec3::ONE),
)
}))
.collect();
b.iter(move || {
for entity in &entities {
world.entity_mut(*entity).insert(Fragmenting::<1>(2));
}

for entity in &entities {
world.entity_mut(*entity).remove::<NonFragmenting<1>>();
}
});
});
}
4 changes: 4 additions & 0 deletions benches/benches/bevy_ecs/components/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,13 @@ mod add_remove_sparse_set;
mod add_remove_table;
mod add_remove_very_big_table;
mod archetype_updates;
mod fragmenting_values;
mod insert_simple;
mod insert_simple_unbatched;

use archetype_updates::*;
use criterion::{criterion_group, Criterion};
use fragmenting_values::*;

criterion_group!(
benches,
Expand All @@ -19,6 +21,8 @@ criterion_group!(
insert_simple,
no_archetypes,
added_archetypes,
insert_fragmenting_value,
add_remove_fragmenting_value
);

fn add_remove(c: &mut Criterion) {
Expand Down
51 changes: 51 additions & 0 deletions benches/benches/bevy_ecs/iteration/iter_simple_shared.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
use bevy_ecs::prelude::*;
use glam::*;

#[derive(Component, Copy, Clone)]
struct Transform(Mat4);

#[derive(Component, Copy, Clone)]
#[component(storage = "SparseSet")]
struct Position(Vec3);

#[derive(Component, Copy, Clone)]
struct Rotation(Vec3);

#[derive(Component, Copy, Clone)]
#[component(storage = "SparseSet")]
struct Velocity(Vec3);

#[derive(Component, Copy, Clone, Eq, PartialEq, Hash)]
#[component(storage = "Shared")]
struct VelocityModifier(u32);

pub struct Benchmark<'w>(
World,
QueryState<(&'w Velocity, &'w mut Position, &'w VelocityModifier)>,
);

impl<'w> Benchmark<'w> {
pub fn new() -> Self {
let mut world = World::new();

world.spawn_batch((0..10_000).map(|i| {
(
Transform(Mat4::from_scale(Vec3::ONE)),
Position(Vec3::X),
Rotation(Vec3::X),
Velocity(Vec3::X),
VelocityModifier(i / 100),
)
}));

let query = world.query::<(&Velocity, &mut Position, &VelocityModifier)>();
Self(world, query)
}

#[inline(never)]
pub fn run(&mut self) {
for (velocity, mut position, modifier) in self.1.iter_mut(&mut self.0) {
position.0 += velocity.0 * (modifier.0 as f32);
}
}
}
5 changes: 5 additions & 0 deletions benches/benches/bevy_ecs/iteration/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ mod iter_simple_foreach_hybrid;
mod iter_simple_foreach_sparse_set;
mod iter_simple_foreach_wide;
mod iter_simple_foreach_wide_sparse_set;
mod iter_simple_shared;
mod iter_simple_sparse_set;
mod iter_simple_system;
mod iter_simple_wide;
Expand Down Expand Up @@ -48,6 +49,10 @@ fn iter_simple(c: &mut Criterion) {
let mut bench = iter_simple_system::Benchmark::new();
b.iter(move || bench.run());
});
group.bench_function("shared", |b| {
let mut bench = iter_simple_shared::Benchmark::new();
b.iter(move || bench.run());
});
group.bench_function("sparse_set", |b| {
let mut bench = iter_simple_sparse_set::Benchmark::new();
b.iter(move || bench.run());
Expand Down
1 change: 1 addition & 0 deletions crates/bevy_core_pipeline/src/oit/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ impl Default for OrderIndependentTransparencySettings {
impl Component for OrderIndependentTransparencySettings {
const STORAGE_TYPE: StorageType = StorageType::SparseSet;
type Mutability = Mutable;
type Key = NoKey<Self>;

fn on_add() -> Option<ComponentHook> {
Some(|world, context| {
Expand Down
28 changes: 24 additions & 4 deletions crates/bevy_ecs/macros/src/component.rs
Original file line number Diff line number Diff line change
Expand Up @@ -245,9 +245,10 @@ pub fn derive_component(input: TokenStream) -> TokenStream {
}
});

let mutable_type = (attrs.immutable || relationship.is_some())
.then_some(quote! { #bevy_ecs_path::component::Immutable })
.unwrap_or(quote! { #bevy_ecs_path::component::Mutable });
let mutable_type =
(attrs.immutable || relationship.is_some() || matches!(attrs.storage, StorageTy::Shared))
.then_some(quote! { #bevy_ecs_path::component::Immutable })
.unwrap_or(quote! { #bevy_ecs_path::component::Mutable });

let clone_behavior = if relationship_target.is_some() {
quote!(#bevy_ecs_path::component::ComponentCloneBehavior::Custom(#bevy_ecs_path::relationship::clone_relationship_target::<Self>))
Expand All @@ -260,13 +261,22 @@ pub fn derive_component(input: TokenStream) -> TokenStream {
)
};

let key = if let Some(key) = attrs.key {
quote! {#bevy_ecs_path::component::OtherComponentKey<Self, #key>}
} else if let StorageTy::Shared = attrs.storage {
quote! {#bevy_ecs_path::component::SelfKey<Self>}
} else {
quote! {#bevy_ecs_path::component::NoKey<Self>}
};

// This puts `register_required` before `register_recursive_requires` to ensure that the constructors of _all_ top
// level components are initialized first, giving them precedence over recursively defined constructors for the same component type
TokenStream::from(quote! {
#required_component_docs
impl #impl_generics #bevy_ecs_path::component::Component for #struct_name #type_generics #where_clause {
const STORAGE_TYPE: #bevy_ecs_path::component::StorageType = #storage;
type Mutability = #mutable_type;
type Key = #key;
fn register_required_components(
requiree: #bevy_ecs_path::component::ComponentId,
components: &mut #bevy_ecs_path::component::ComponentsRegistrator,
Expand Down Expand Up @@ -399,6 +409,7 @@ pub const ON_DESPAWN: &str = "on_despawn";

pub const IMMUTABLE: &str = "immutable";
pub const CLONE_BEHAVIOR: &str = "clone_behavior";
pub const KEY: &str = "key";

/// All allowed attribute value expression kinds for component hooks
#[derive(Debug)]
Expand Down Expand Up @@ -462,12 +473,14 @@ struct Attrs {
relationship_target: Option<RelationshipTarget>,
immutable: bool,
clone_behavior: Option<Expr>,
key: Option<Type>,
}

#[derive(Clone, Copy)]
enum StorageTy {
Table,
SparseSet,
Shared,
}

struct Require {
Expand All @@ -487,6 +500,7 @@ struct RelationshipTarget {
// values for `storage` attribute
const TABLE: &str = "Table";
const SPARSE_SET: &str = "SparseSet";
const SHARED: &str = "Shared";

fn parse_component_attr(ast: &DeriveInput) -> Result<Attrs> {
let mut attrs = Attrs {
Expand All @@ -501,6 +515,7 @@ fn parse_component_attr(ast: &DeriveInput) -> Result<Attrs> {
relationship_target: None,
immutable: false,
clone_behavior: None,
key: None,
};

let mut require_paths = HashSet::new();
Expand All @@ -511,9 +526,10 @@ fn parse_component_attr(ast: &DeriveInput) -> Result<Attrs> {
attrs.storage = match nested.value()?.parse::<LitStr>()?.value() {
s if s == TABLE => StorageTy::Table,
s if s == SPARSE_SET => StorageTy::SparseSet,
s if s == SHARED => StorageTy::Shared,
s => {
return Err(nested.error(format!(
"Invalid storage type `{s}`, expected '{TABLE}' or '{SPARSE_SET}'.",
"Invalid storage type `{s}`, expected '{TABLE}', '{SPARSE_SET}' or '{SHARED}'.",
)));
}
};
Expand All @@ -539,6 +555,9 @@ fn parse_component_attr(ast: &DeriveInput) -> Result<Attrs> {
} else if nested.path.is_ident(CLONE_BEHAVIOR) {
attrs.clone_behavior = Some(nested.value()?.parse()?);
Ok(())
} else if nested.path.is_ident(KEY) {
attrs.key = Some(nested.value()?.parse()?);
Ok(())
} else {
Err(nested.error("Unsupported attribute"))
}
Expand Down Expand Up @@ -646,6 +665,7 @@ fn storage_path(bevy_ecs_path: &Path, ty: StorageTy) -> TokenStream2 {
let storage_type = match ty {
StorageTy::Table => Ident::new("Table", Span::call_site()),
StorageTy::SparseSet => Ident::new("SparseSet", Span::call_site()),
StorageTy::Shared => Ident::new("Shared", Span::call_site()),
};

quote! { #bevy_ecs_path::component::StorageType::#storage_type }
Expand Down
26 changes: 26 additions & 0 deletions crates/bevy_ecs/macros/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,8 @@ pub fn derive_bundle(input: TokenStream) -> TokenStream {
let mut field_component_ids = Vec::new();
let mut field_get_component_ids = Vec::new();
let mut field_get_components = Vec::new();
let mut field_get_fragmenting_values = Vec::new();
let mut field_has_fragmenting_values = Vec::new();
let mut field_from_components = Vec::new();
let mut field_required_components = Vec::new();
for (((i, field_type), field_kind), field) in field_type
Expand All @@ -96,6 +98,9 @@ pub fn derive_bundle(input: TokenStream) -> TokenStream {
field_get_component_ids.push(quote! {
<#field_type as #ecs_path::bundle::Bundle>::get_component_ids(components, &mut *ids);
});
field_has_fragmenting_values.push(quote! {
<#field_type as #ecs_path::bundle::Bundle>::has_fragmenting_values()
});
match field {
Some(field) => {
field_get_components.push(quote! {
Expand All @@ -104,6 +109,9 @@ pub fn derive_bundle(input: TokenStream) -> TokenStream {
field_from_components.push(quote! {
#field: <#field_type as #ecs_path::bundle::BundleFromComponents>::from_components(ctx, &mut *func),
});
field_get_fragmenting_values.push(quote! {
self.#field.get_fragmenting_values(components, &mut *values);
});
}
None => {
let index = Index::from(i);
Expand All @@ -113,6 +121,9 @@ pub fn derive_bundle(input: TokenStream) -> TokenStream {
field_from_components.push(quote! {
#index: <#field_type as #ecs_path::bundle::BundleFromComponents>::from_components(ctx, &mut *func),
});
field_get_fragmenting_values.push(quote! {
self.#index.get_fragmenting_values(components, &mut *values);
});
}
}
}
Expand Down Expand Up @@ -155,6 +166,21 @@ pub fn derive_bundle(input: TokenStream) -> TokenStream {
){
#(#field_required_components)*
}

#[allow(unused_variables)]
#[inline]
fn get_fragmenting_values<'a>(
&'a self,
components: &mut #ecs_path::component::ComponentsRegistrator,
values: &mut impl FnMut(#ecs_path::component::ComponentId, &'a dyn #ecs_path::fragmenting_value::FragmentingValue)
) {
#(#field_get_fragmenting_values)*
}

#[inline(always)]
fn has_fragmenting_values() -> bool {
false #(|| #field_has_fragmenting_values)*
}
}

// SAFETY:
Expand Down
Loading