vendored bevy_egui and removed legacy code :/

Signed-off-by: Sienna Meridian Satterwhite <sienna@r3t.io>
This commit is contained in:
2025-12-14 20:25:55 +00:00
parent b0f62dae38
commit 5493faa1f1
32 changed files with 4844 additions and 865 deletions

113
Cargo.lock generated
View File

@@ -222,10 +222,10 @@ version = "0.1.0"
dependencies = [
"anyhow",
"bevy",
"bevy_egui",
"bincode",
"bytes",
"crossbeam-channel",
"egui",
"futures-lite",
"glam 0.29.3",
"iroh",
@@ -721,7 +721,7 @@ dependencies = [
"bevy_reflect",
"bytemuck",
"derive_more 2.0.1",
"encase",
"encase 0.11.2",
"serde",
"thiserror 2.0.17",
"wgpu-types",
@@ -825,53 +825,6 @@ dependencies = [
"syn",
]
[[package]]
name = "bevy_egui"
version = "0.38.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2c20416343c6d24eedad9db93c4c42c6571b15d14bac4f6f41b993ec413243f9"
dependencies = [
"arboard",
"bevy_app",
"bevy_asset",
"bevy_camera",
"bevy_color",
"bevy_core_pipeline",
"bevy_derive",
"bevy_ecs",
"bevy_image",
"bevy_input",
"bevy_log",
"bevy_math",
"bevy_mesh",
"bevy_picking",
"bevy_platform",
"bevy_reflect",
"bevy_render",
"bevy_shader",
"bevy_time",
"bevy_transform",
"bevy_ui_render",
"bevy_utils",
"bevy_window",
"bevy_winit",
"bytemuck",
"crossbeam-channel",
"egui",
"encase",
"getrandom 0.3.4",
"image",
"itertools 0.14.0",
"js-sys",
"thread_local",
"wasm-bindgen",
"wasm-bindgen-futures",
"web-sys",
"webbrowser",
"wgpu-types",
"winit",
]
[[package]]
name = "bevy_encase_derive"
version = "0.17.2"
@@ -879,7 +832,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7449e5903594a00f007732ba232af0c527ad4e6e3d29bc3e195ec78dbd20c8b2"
dependencies = [
"bevy_macro_utils",
"encase_derive_impl",
"encase_derive_impl 0.11.2",
]
[[package]]
@@ -1381,7 +1334,7 @@ dependencies = [
"bytemuck",
"derive_more 2.0.1",
"downcast-rs 2.0.2",
"encase",
"encase 0.11.2",
"fixedbitset",
"image",
"indexmap",
@@ -2811,6 +2764,18 @@ version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "edd0f118536f44f5ccd48bcb8b111bdc3de888b58c74639dfb034a357d0f206d"
[[package]]
name = "encase"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b0a05902cf601ed11d564128448097b98ebe3c6574bd7b6a653a3d56d54aa020"
dependencies = [
"const_panic",
"encase_derive 0.10.0",
"glam 0.29.3",
"thiserror 1.0.69",
]
[[package]]
name = "encase"
version = "0.11.2"
@@ -2818,18 +2783,38 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "02ba239319a4f60905966390f5e52799d868103a533bb7e27822792332504ddd"
dependencies = [
"const_panic",
"encase_derive",
"encase_derive 0.11.2",
"glam 0.30.9",
"thiserror 2.0.17",
]
[[package]]
name = "encase_derive"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "181d475b694e2dd56ae919ce7699d344d1fd259292d590c723a50d1189a2ea85"
dependencies = [
"encase_derive_impl 0.10.0",
]
[[package]]
name = "encase_derive"
version = "0.11.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5223d6c647f09870553224f6e37261fe5567bc5a4f4cf13ed337476e79990f2f"
dependencies = [
"encase_derive_impl",
"encase_derive_impl 0.11.2",
]
[[package]]
name = "encase_derive_impl"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f97b51c5cc57ef7c5f7a0c57c250251c49ee4c28f819f87ac32f4aceabc36792"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
@@ -4503,19 +4488,24 @@ name = "libmarathon"
version = "0.1.0"
dependencies = [
"anyhow",
"arboard",
"bevy",
"bincode",
"blake3",
"blocking",
"bytemuck",
"bytes",
"chrono",
"crdts",
"criterion",
"crossbeam-channel",
"egui",
"encase 0.10.0",
"futures-lite",
"glam 0.29.3",
"iroh",
"iroh-gossip",
"itertools 0.14.0",
"proptest",
"rand 0.8.5",
"raw-window-handle",
@@ -4530,6 +4520,7 @@ dependencies = [
"toml",
"tracing",
"uuid",
"wgpu-types",
"winit",
]
@@ -7998,22 +7989,6 @@ dependencies = [
"wasm-bindgen",
]
[[package]]
name = "webbrowser"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "00f1243ef785213e3a32fa0396093424a3a6ea566f9948497e5a2309261a4c97"
dependencies = [
"core-foundation 0.10.1",
"jni",
"log",
"ndk-context",
"objc2 0.6.3",
"objc2-foundation 0.3.2",
"url",
"web-sys",
]
[[package]]
name = "webpki-root-certs"
version = "0.26.11"

View File

@@ -19,7 +19,7 @@ bevy = { version = "0.17", default-features = false, features = [
"bevy_text",
"png",
] }
bevy_egui = "0.38"
egui = { version = "0.33", default-features = false, features = ["bytemuck", "default_fonts"] }
glam = "0.29"
winit = "0.30"
raw-window-handle = "0.6"

View File

@@ -3,6 +3,8 @@
//! This module handles the 3D camera setup for the cube demo.
use bevy::prelude::*;
use bevy::camera::RenderTarget;
use bevy::window::WindowRef;
pub struct CameraPlugin;
@@ -17,11 +19,19 @@ impl Plugin for CameraPlugin {
/// Camera is positioned at (4, 3, 6) looking at the cube's initial position (0,
/// 0.5, 0). This provides a good viewing angle to see the cube, ground plane,
/// and any movements.
///
/// libmarathon's debug_ui will automatically attach the primary egui context
/// to this camera via the setup_primary_egui_context_system.
fn setup_camera(mut commands: Commands) {
info!("Setting up camera");
commands.spawn((
Camera3d::default(),
Camera {
target: RenderTarget::Window(WindowRef::Primary),
..default()
},
Transform::from_xyz(4.0, 3.0, 6.0).looking_at(Vec3::new(0.0, 0.5, 0.0), Vec3::Y),
// PrimaryEguiContext will be auto-added by libmarathon
));
}

View File

@@ -1,11 +1,8 @@
//! Debug UI overlay using egui
use bevy::prelude::*;
use bevy_egui::{
egui,
EguiContexts,
EguiPrimaryContextPass,
};
use bevy::ecs::message::MessageWriter;
use libmarathon::debug_ui::{EguiContexts, EguiPrimaryContextPass};
use libmarathon::networking::{
EntityLockRegistry,
GossipBridge,
@@ -25,17 +22,15 @@ impl Plugin for DebugUiPlugin {
/// Render the debug UI panel
fn render_debug_ui(
mut contexts: EguiContexts,
mut egui_ctx: EguiContexts,
node_clock: Option<Res<NodeVectorClock>>,
gossip_bridge: Option<Res<GossipBridge>>,
lock_registry: Option<Res<EntityLockRegistry>>,
cube_query: Query<(&Transform, &NetworkedEntity), With<CubeMarker>>,
mut spawn_events: MessageWriter<SpawnCubeEvent>,
mut delete_events: MessageWriter<DeleteCubeEvent>,
) {
let Ok(ctx) = contexts.ctx_mut() else {
return;
};
) -> Result {
let ctx: &egui::Context = egui_ctx.ctx_mut()?;
egui::Window::new("Debug Info")
.default_pos([10.0, 10.0])
@@ -186,4 +181,6 @@ fn render_debug_ui(
ui.label("Scroll: Move cube (Z)");
ui.label("ESC: Deselect");
});
Ok(())
}

View File

@@ -21,8 +21,8 @@ use libmarathon::engine::InputEvent;
use libmarathon::platform::desktop;
use std::sync::Arc;
use winit::application::ApplicationHandler;
use winit::event::WindowEvent as WinitWindowEvent;
use winit::event_loop::{ActiveEventLoop, ControlFlow, EventLoop};
use winit::event::{Event as WinitEvent, WindowEvent as WinitWindowEvent};
use winit::event_loop::{ActiveEventLoop, ControlFlow, EventLoop, EventLoopProxy};
use winit::window::{Window as WinitWindow, WindowId, WindowAttributes};
// Re-export InputEventBuffer from the input module
@@ -124,6 +124,9 @@ impl AppHandler {
let physical_size = winit_window.inner_size();
let scale_factor = winit_window.scale_factor();
// Set the scale factor in the input bridge so mouse coords are converted correctly
desktop::set_scale_factor(scale_factor);
// Create window entity with all required components (use logical size)
let mut window = bevy::window::Window {
title: "Marathon".to_string(),
@@ -134,10 +137,13 @@ impl AppHandler {
mode: WindowMode::Windowed,
position: WindowPosition::Automatic,
focused: true,
// Let Window use default theme - will auto-detect system theme, egui will follow
..Default::default()
};
// Set scale factor explicitly
window.resolution.set_scale_factor(scale_factor as f32);
// Set scale factor using the proper API that applies to physical size
window
.resolution
.set_scale_factor_and_apply_to_physical_size(scale_factor as f32);
// Create WindowWrapper and RawHandleWrapper for renderer
let window_wrapper = WindowWrapper::new(winit_window.clone());
@@ -151,10 +157,9 @@ impl AppHandler {
)).id();
info!("Created window entity {}", window_entity);
// Send initialization events
// Send initialization event (only WindowCreated, like Bevy does)
// WindowResized and WindowScaleFactorChanged should only fire in response to actual winit events
send_window_created(&mut bevy_app, window_entity);
send_window_resized(&mut bevy_app, window_entity, physical_size, scale_factor);
send_scale_factor_changed(&mut bevy_app, window_entity, scale_factor);
// Now finish the app - the renderer will initialize with the window
bevy_app.finish();
@@ -187,9 +192,9 @@ impl AppHandler {
// Run one final update to process close event
bevy_app.update();
// Cleanup
bevy_app.finish();
bevy_app.cleanup();
// Don't call finish/cleanup - let Bevy's AppExit handle it
// bevy_app.finish();
// bevy_app.cleanup();
}
event_loop.exit();
@@ -240,7 +245,14 @@ impl ApplicationHandler for AppHandler {
}
WinitWindowEvent::Resized(physical_size) => {
// Notify Bevy of window resize
// Update the Bevy Window component's physical resolution
if let Some(mut window_component) = bevy_app.world_mut().get_mut::<Window>(*bevy_window_entity) {
window_component
.resolution
.set_physical_resolution(physical_size.width, physical_size.height);
}
// Notify Bevy systems of window resize
let scale_factor = window.scale_factor();
send_window_resized(bevy_app, *bevy_window_entity, physical_size, scale_factor);
}
@@ -269,6 +281,26 @@ impl ApplicationHandler for AppHandler {
window.request_redraw();
}
WinitWindowEvent::ScaleFactorChanged { scale_factor, .. } => {
// Update the Bevy Window component's scale factor
if let Some(mut window_component) = bevy_app.world_mut().get_mut::<Window>(*bevy_window_entity) {
let prior_factor = window_component.resolution.scale_factor();
// Use the proper API that applies to physical size
window_component
.resolution
.set_scale_factor_and_apply_to_physical_size(scale_factor as f32);
// Send scale factor changed event so camera system can update
send_scale_factor_changed(bevy_app, *bevy_window_entity, scale_factor);
info!(
"Scale factor changed from {} to {} for window {:?}",
prior_factor, scale_factor, bevy_window_entity
);
}
}
_ => {}
}
}
@@ -322,7 +354,8 @@ impl ApplicationHandler for AppHandler {
///
/// executor::run(app).expect("Failed to run executor");
/// ```
pub fn run(app: App) -> Result<(), Box<dyn std::error::Error>> {
pub fn run(mut app: App) -> Result<(), Box<dyn std::error::Error>> {
// Create event loop (using default type for now, WakeUp will be added when implementing battery mode)
let event_loop = EventLoop::new()?;
// TODO(@siennathesane): Add battery power detection and adaptive frame/tick rate limiting

View File

@@ -7,7 +7,7 @@ use bevy::prelude::*;
use bevy::input::keyboard::KeyboardInput;
use bevy::input::mouse::{MouseButtonInput, MouseWheel};
use bevy::window::CursorMoved;
use libmarathon::engine::{InputEvent, KeyCode as EngineKeyCode, MouseButton as EngineMouseButton, TouchPhase, Modifiers};
use libmarathon::engine::{InputEvent, InputEventBuffer, KeyCode as EngineKeyCode, MouseButton as EngineMouseButton, TouchPhase, Modifiers};
/// Convert Bevy's Vec2 to glam::Vec2
///
@@ -100,19 +100,6 @@ impl Plugin for DesktopInputBridgePlugin {
}
}
/// Buffer for InputEvents collected this frame
#[derive(Resource, Default)]
pub struct InputEventBuffer {
pub events: Vec<InputEvent>,
}
impl InputEventBuffer {
/// Get all events from this frame
pub fn events(&self) -> &[InputEvent] {
&self.events
}
}
/// Clear the buffer at the start of each frame
fn clear_buffer(mut buffer: ResMut<InputEventBuffer>) {
buffer.events.clear();
@@ -152,17 +139,21 @@ fn collect_mouse_buttons(
}
}
/// Collect mouse motion events (for drag tracking)
/// Collect mouse motion events (for hover and drag tracking)
fn collect_mouse_motion(
mut buffer: ResMut<InputEventBuffer>,
mut cursor_moved: MessageReader<CursorMoved>,
mouse_buttons: Res<ButtonInput<MouseButton>>,
) {
// Only process if cursor actually moved
for event in cursor_moved.read() {
let cursor_pos = event.position;
// Generate drag events for currently pressed buttons
// ALWAYS send MouseMove for cursor tracking (hover, tooltips, etc.)
buffer.events.push(InputEvent::MouseMove {
pos: to_glam_vec2(cursor_pos),
});
// ALSO generate drag events for currently pressed buttons
if mouse_buttons.pressed(MouseButton::Left) {
buffer.events.push(InputEvent::Mouse {
pos: to_glam_vec2(cursor_pos),

View File

@@ -1,22 +1,5 @@
//! Input event buffer shared between executor and ECS
//! Input event buffer re-export
//!
//! InputEventBuffer is now defined in libmarathon::engine
use bevy::prelude::*;
use libmarathon::engine::InputEvent;
/// Input event buffer resource for Bevy ECS
#[derive(Resource, Default)]
pub struct InputEventBuffer {
pub events: Vec<InputEvent>,
}
impl InputEventBuffer {
/// Get all events from this frame
pub fn events(&self) -> &[InputEvent] {
&self.events
}
/// Clear the buffer
pub fn clear(&mut self) {
self.events.clear();
}
}
pub use libmarathon::engine::InputEventBuffer;

View File

@@ -5,7 +5,7 @@
use bevy::prelude::*;
use libmarathon::{
engine::{GameAction, InputController},
networking::{EntityLockRegistry, NetworkedEntity, NodeVectorClock},
networking::{EntityLockRegistry, NetworkedEntity, NetworkedSelection, NodeVectorClock},
};
use super::event_buffer::InputEventBuffer;
@@ -45,15 +45,17 @@ fn to_bevy_vec2(v: glam::Vec2) -> bevy::math::Vec2 {
fn handle_game_actions(
input_buffer: Res<InputEventBuffer>,
mut controller_res: ResMut<InputControllerResource>,
lock_registry: Res<EntityLockRegistry>,
mut lock_registry: ResMut<EntityLockRegistry>,
node_clock: Res<NodeVectorClock>,
mut cube_query: Query<(&NetworkedEntity, &mut Transform), With<crate::cube::CubeMarker>>,
mut cube_query: Query<(&NetworkedEntity, &mut Transform, &mut NetworkedSelection), With<crate::cube::CubeMarker>>,
camera_query: Query<(&Camera, &GlobalTransform)>,
window_query: Query<&Window>,
) {
let node_id = node_clock.node_id;
// Process all input events through the controller to get game actions
let mut all_actions = Vec::new();
for event in input_buffer.events() {
for event in input_buffer.events.iter() {
let actions = controller_res.controller.process_event(event);
all_actions.extend(actions);
}
@@ -61,6 +63,17 @@ fn handle_game_actions(
// Apply game actions to entities
for action in all_actions {
match action {
GameAction::SelectEntity { position } => {
apply_select_entity(
position,
&mut lock_registry,
node_id,
&mut cube_query,
&camera_query,
&window_query,
);
}
GameAction::MoveEntity { delta } => {
apply_move_entity(delta, &lock_registry, node_id, &mut cube_query);
}
@@ -84,17 +97,83 @@ fn handle_game_actions(
}
}
/// Apply SelectEntity action - raycast to find clicked cube and select it
fn apply_select_entity(
position: glam::Vec2,
lock_registry: &mut EntityLockRegistry,
node_id: uuid::Uuid,
cube_query: &mut Query<(&NetworkedEntity, &mut Transform, &mut NetworkedSelection), With<crate::cube::CubeMarker>>,
camera_query: &Query<(&Camera, &GlobalTransform)>,
window_query: &Query<&Window>,
) {
// Get the camera and window
let Ok((camera, camera_transform)) = camera_query.single() else {
return;
};
let Ok(window) = window_query.single() else {
return;
};
// Convert screen position to world ray
let Some(ray) = screen_to_world_ray(position, camera, camera_transform, window) else {
return;
};
// Find the closest cube hit by the ray
let mut closest_hit: Option<(uuid::Uuid, f32)> = None;
for (networked, transform, _) in cube_query.iter() {
// Test ray against cube AABB (1x1x1 cube)
if let Some(distance) = ray_aabb_intersection(
ray.origin,
ray.direction,
transform.translation,
Vec3::splat(0.5), // Half extents for 1x1x1 cube
) {
if closest_hit.map_or(true, |(_, d)| distance < d) {
closest_hit = Some((networked.network_id, distance));
}
}
}
// If we hit a cube, clear all selections and select this one
if let Some((hit_entity_id, _)) = closest_hit {
// Clear all previous selections and locks
for (networked, _, mut selection) in cube_query.iter_mut() {
selection.clear();
lock_registry.release(networked.network_id, node_id);
}
// Select and lock the clicked cube
for (networked, _, mut selection) in cube_query.iter_mut() {
if networked.network_id == hit_entity_id {
selection.add(hit_entity_id);
let _ = lock_registry.try_acquire(hit_entity_id, node_id);
info!("Selected cube {}", hit_entity_id);
break;
}
}
} else {
// Clicked on empty space - deselect all
for (networked, _, mut selection) in cube_query.iter_mut() {
selection.clear();
lock_registry.release(networked.network_id, node_id);
}
info!("Deselected all cubes");
}
}
/// Apply MoveEntity action to locked cubes
fn apply_move_entity(
delta: glam::Vec2,
lock_registry: &EntityLockRegistry,
node_id: uuid::Uuid,
cube_query: &mut Query<(&NetworkedEntity, &mut Transform), With<crate::cube::CubeMarker>>,
cube_query: &mut Query<(&NetworkedEntity, &mut Transform, &mut NetworkedSelection), With<crate::cube::CubeMarker>>,
) {
let bevy_delta = to_bevy_vec2(delta);
let sensitivity = 0.01; // Scale factor
for (networked, mut transform) in cube_query.iter_mut() {
for (networked, mut transform, _) in cube_query.iter_mut() {
if lock_registry.is_locked_by(networked.network_id, node_id, node_id) {
transform.translation.x += bevy_delta.x * sensitivity;
transform.translation.y -= bevy_delta.y * sensitivity; // Invert Y for screen coords
@@ -107,12 +186,12 @@ fn apply_rotate_entity(
delta: glam::Vec2,
lock_registry: &EntityLockRegistry,
node_id: uuid::Uuid,
cube_query: &mut Query<(&NetworkedEntity, &mut Transform), With<crate::cube::CubeMarker>>,
cube_query: &mut Query<(&NetworkedEntity, &mut Transform, &mut NetworkedSelection), With<crate::cube::CubeMarker>>,
) {
let bevy_delta = to_bevy_vec2(delta);
let sensitivity = 0.01;
for (networked, mut transform) in cube_query.iter_mut() {
for (networked, mut transform, _) in cube_query.iter_mut() {
if lock_registry.is_locked_by(networked.network_id, node_id, node_id) {
let rotation_x = Quat::from_rotation_y(bevy_delta.x * sensitivity);
let rotation_y = Quat::from_rotation_x(-bevy_delta.y * sensitivity);
@@ -126,11 +205,11 @@ fn apply_move_depth(
delta: f32,
lock_registry: &EntityLockRegistry,
node_id: uuid::Uuid,
cube_query: &mut Query<(&NetworkedEntity, &mut Transform), With<crate::cube::CubeMarker>>,
cube_query: &mut Query<(&NetworkedEntity, &mut Transform, &mut NetworkedSelection), With<crate::cube::CubeMarker>>,
) {
let sensitivity = 0.1;
for (networked, mut transform) in cube_query.iter_mut() {
for (networked, mut transform, _) in cube_query.iter_mut() {
if lock_registry.is_locked_by(networked.network_id, node_id, node_id) {
transform.translation.z += delta * sensitivity;
}
@@ -141,12 +220,99 @@ fn apply_move_depth(
fn apply_reset_entity(
lock_registry: &EntityLockRegistry,
node_id: uuid::Uuid,
cube_query: &mut Query<(&NetworkedEntity, &mut Transform), With<crate::cube::CubeMarker>>,
cube_query: &mut Query<(&NetworkedEntity, &mut Transform, &mut NetworkedSelection), With<crate::cube::CubeMarker>>,
) {
for (networked, mut transform) in cube_query.iter_mut() {
for (networked, mut transform, _) in cube_query.iter_mut() {
if lock_registry.is_locked_by(networked.network_id, node_id, node_id) {
transform.translation = Vec3::ZERO;
transform.rotation = Quat::IDENTITY;
}
}
}
/// A 3D ray for raycasting
struct Ray {
origin: Vec3,
direction: Vec3,
}
/// Convert screen coordinates to a world-space ray from the camera
fn screen_to_world_ray(
screen_pos: glam::Vec2,
camera: &Camera,
camera_transform: &GlobalTransform,
window: &Window,
) -> Option<Ray> {
// Convert screen position to viewport position (0..1 range)
let viewport_pos = Vec2::new(screen_pos.x, screen_pos.y);
// Use Bevy's viewport_to_world method
let ray_bevy = camera.viewport_to_world(camera_transform, viewport_pos).ok()?;
Some(Ray {
origin: ray_bevy.origin,
direction: *ray_bevy.direction,
})
}
/// Test ray-AABB (axis-aligned bounding box) intersection
///
/// Returns the distance along the ray if there's an intersection, None otherwise.
fn ray_aabb_intersection(
ray_origin: Vec3,
ray_direction: Vec3,
aabb_center: Vec3,
aabb_half_extents: Vec3,
) -> Option<f32> {
// Calculate AABB min and max
let aabb_min = aabb_center - aabb_half_extents;
let aabb_max = aabb_center + aabb_half_extents;
// Slab method for ray-AABB intersection
let mut tmin = f32::NEG_INFINITY;
let mut tmax = f32::INFINITY;
for i in 0..3 {
let origin_component = ray_origin[i];
let dir_component = ray_direction[i];
let min_component = aabb_min[i];
let max_component = aabb_max[i];
if dir_component.abs() < f32::EPSILON {
// Ray is parallel to slab, check if origin is within slab
if origin_component < min_component || origin_component > max_component {
return None;
}
} else {
// Compute intersection t values for near and far plane
let inv_dir = 1.0 / dir_component;
let mut t1 = (min_component - origin_component) * inv_dir;
let mut t2 = (max_component - origin_component) * inv_dir;
// Ensure t1 is the near intersection
if t1 > t2 {
std::mem::swap(&mut t1, &mut t2);
}
// Update tmin and tmax
tmin = tmin.max(t1);
tmax = tmax.min(t2);
// Check for intersection failure
if tmin > tmax {
return None;
}
}
}
// If tmin is negative, the ray origin is inside the AABB
// Return tmax in that case, otherwise return tmin
if tmin < 0.0 {
if tmax < 0.0 {
return None; // AABB is behind the ray
}
Some(tmax)
} else {
Some(tmin)
}
}

View File

@@ -3,10 +3,9 @@
//! This demonstrates real-time CRDT synchronization with Apple Pencil input.
use bevy::prelude::*;
// use bevy_egui::EguiPlugin; // Disabled - needs WinitPlugin which we own directly
use libmarathon::{
engine::{EngineBridge, EngineCore},
persistence::{PersistenceConfig, PersistencePlugin},
persistence::PersistenceConfig,
};
use std::path::PathBuf;
@@ -21,14 +20,13 @@ mod session;
mod session_ui;
mod setup;
use debug_ui::DebugUiPlugin;
use engine_bridge::EngineBridgePlugin;
mod input;
use camera::*;
use cube::*;
use debug_ui::*;
use input::*;
use rendering::*;
use selection::*;
use session::*;
@@ -80,9 +78,8 @@ fn main() {
.disable::<bevy::gilrs::GilrsPlugin>() // We handle gamepad input ourselves
);
// app.add_plugins(EguiPlugin::default()); // Disabled - needs WinitPlugin
app.add_plugins(EngineBridgePlugin);
app.add_plugins(PersistencePlugin::with_config(
// Marathon core plugins (networking, debug UI, persistence)
app.add_plugins(libmarathon::MarathonPlugin::new(
db_path,
PersistenceConfig {
flush_interval_secs: 2,
@@ -91,13 +88,16 @@ fn main() {
..Default::default()
},
));
// App-specific bridge for polling engine events
app.add_plugins(EngineBridgePlugin);
app.add_plugins(CameraPlugin);
app.add_plugins(RenderingPlugin);
app.add_plugins(input::InputHandlerPlugin);
app.add_plugins(CubePlugin);
app.add_plugins(SelectionPlugin);
// app.add_plugins(DebugUiPlugin); // Disabled - uses egui
// app.add_plugins(SessionUiPlugin); // Disabled - uses egui
app.add_plugins(DebugUiPlugin);
app.add_plugins(SessionUiPlugin);
app.add_systems(Startup, initialize_offline_resources);
// Run with our executor (unbounded event loop)

View File

@@ -4,8 +4,8 @@
//! and shows connected peer information.
use bevy::prelude::*;
use bevy_egui::{egui, EguiContexts, EguiPrimaryContextPass};
use libmarathon::{
debug_ui::{egui, EguiContexts, EguiPrimaryContextPass},
engine::{EngineBridge, EngineCommand},
networking::{CurrentSession, NodeVectorClock, SessionId},
};

View File

@@ -1,4 +1,4 @@
/target
chat.db
*.db
*.db-shm
*.db-wal

View File

@@ -30,6 +30,12 @@ blocking = "1.6"
crossbeam-channel = "0.5"
iroh = { workspace = true, features = ["discovery-local-network"] }
iroh-gossip.workspace = true
egui = { version = "0.33", default-features = false, features = ["bytemuck", "default_fonts"] }
arboard = "3.4"
bytemuck = { version = "1.14", features = ["derive"] }
encase = { version = "0.10", features = ["glam"] }
wgpu-types = "26.0"
itertools = "0.14"
[dev-dependencies]
tokio.workspace = true

View File

@@ -0,0 +1,81 @@
struct Transform {
scale: vec2<f32>,
translation: vec2<f32>,
}
struct VertexInput {
@location(0) position: vec2<f32>,
@location(1) uv: vec2<f32>,
@location(2) color: vec4<f32>,
}
struct VertexOutput {
@builtin(position) position: vec4<f32>,
@location(0) color: vec4<f32>,
@location(1) uv: vec2<f32>,
}
@group(0) @binding(0) var<uniform> transform: Transform;
#ifdef BINDLESS
@group(1) @binding(0) var image_texture: binding_array<texture_2d<f32>>;
@group(1) @binding(1) var image_sampler: binding_array<sampler>;
// Fix for DX12 backend in wgpu which appears to only support struct push constants
// wgpu::backend::wgpu_core: Shader translation error for stage ShaderStages(FRAGMENT): HLSL: Unimplemented("push-constant 'offset' has non-struct type; tracked by: https://github.com/gfx-rs/wgpu/issues/5683")
struct BindlessOffset {
offset: u32,
};
var<push_constant> offset: BindlessOffset;
#else //BINDLESS
@group(1) @binding(0) var image_texture: texture_2d<f32>;
@group(1) @binding(1) var image_sampler: sampler;
#endif // BINDLESS
// 0-1 linear from 0-1 sRGB gamma.
fn linear_from_gamma_rgb(srgb: vec3<f32>) -> vec3<f32> {
let cutoff = srgb < vec3<f32>(0.04045);
let lower = srgb / vec3<f32>(12.92);
let higher = pow((srgb + vec3<f32>(0.055)) / vec3<f32>(1.055), vec3<f32>(2.4));
return select(higher, lower, cutoff);
}
// 0-1 sRGB gamma from 0-1 linear.
fn gamma_from_linear_rgb(rgb: vec3<f32>) -> vec3<f32> {
let cutoff = rgb < vec3<f32>(0.0031308);
let lower = rgb * vec3<f32>(12.92);
let higher = vec3<f32>(1.055) * pow(rgb, vec3<f32>(1.0 / 2.4)) - vec3<f32>(0.055);
return select(higher, lower, cutoff);
}
// 0-1 sRGBA gamma from 0-1 linear.
fn gamma_from_linear_rgba(linear_rgba: vec4<f32>) -> vec4<f32> {
return vec4<f32>(gamma_from_linear_rgb(linear_rgba.rgb), linear_rgba.a);
}
@vertex
fn vs_main(in: VertexInput) -> VertexOutput {
let position = in.position * transform.scale + transform.translation;
// Not sure why Egui does vertex color interpolation in sRGB but here we do it the same way as well.
return VertexOutput(vec4<f32>(position, 0.0, 1.0), in.color, in.uv);
}
@fragment
fn fs_main(in: VertexOutput) -> @location(0) vec4<f32> {
#ifdef BINDLESS
let image_texture = image_texture[offset.offset];
let image_sampler = image_sampler[offset.offset];
#endif
let texture_color_linear = textureSample(image_texture, image_sampler, in.uv);
// We un-premultiply Egui-managed textures on CPU, because Bevy doesn't premultiply it's own images, so here we pre-multiply everything.
let texture_color_linear_premultiplied = vec4<f32>(texture_color_linear.rgb * texture_color_linear.a, texture_color_linear.a);
let texture_color_gamma_premultiplied = gamma_from_linear_rgba(texture_color_linear_premultiplied);
// Quoting the Egui's glsl shader:
// "We multiply the colors in gamma space, because that's the only way to get text to look right."
let color_gamma = texture_color_gamma_premultiplied * in.color;
return vec4<f32>(linear_from_gamma_rgb(color_gamma.rgb), color_gamma.a);
}

View File

@@ -1,245 +0,0 @@
#!/usr/bin/env -S cargo +nightly -Zscript
---
[dependencies]
rusqlite = { version = "0.37.0", features = ["bundled"] }
csv = "1.3"
chrono = "0.4"
plist = "1.8"
ns-keyed-archive = "0.1.4"
anyhow = "1.0"
---
use rusqlite::{Connection, OpenFlags};
use std::fs::File;
use csv::Writer;
use chrono::{DateTime, Utc};
use anyhow::Result;
use ns_keyed_archive::decode::from_bytes as decode_keyed_archive;
const PHONE_NUMBER: &str = "+31639132913";
const COCOA_EPOCH_OFFSET: i64 = 978307200;
fn cocoa_timestamp_to_datetime(timestamp: i64) -> String {
if timestamp == 0 {
return String::new();
}
let seconds_since_2001 = timestamp / 1_000_000_000;
let nanoseconds = (timestamp % 1_000_000_000) as u32;
let unix_timestamp = COCOA_EPOCH_OFFSET + seconds_since_2001;
DateTime::from_timestamp(unix_timestamp, nanoseconds)
.map(|dt: DateTime<Utc>| dt.to_rfc3339())
.unwrap_or_default()
}
fn extract_text_from_attributed_body(attributed_body: &[u8]) -> String {
if attributed_body.is_empty() {
return String::new();
}
// Try to parse as NSKeyedArchiver using the specialized crate
match decode_keyed_archive(attributed_body) {
Ok(value) => {
// Try to extract the string value from the decoded archive
if let Some(s) = extract_string_from_value(&value) {
return s;
}
}
Err(_) => {
// If ns-keyed-archive fails, try regular plist parsing
if let Ok(value) = plist::from_bytes::<plist::Value>(attributed_body) {
if let Some(dict) = value.as_dictionary() {
if let Some(objects) = dict.get("$objects").and_then(|v| v.as_array()) {
for obj in objects {
if let Some(s) = obj.as_string() {
if !s.is_empty()
&& s != "$null"
&& !s.starts_with("NS")
&& !s.starts_with("__k")
{
return s.to_string();
}
}
}
}
}
}
// Last resort: simple string extraction
return extract_text_fallback(attributed_body);
}
}
String::new()
}
fn extract_string_from_value(value: &plist::Value) -> Option<String> {
match value {
plist::Value::String(s) => Some(s.clone()),
plist::Value::Dictionary(dict) => {
// Look for common NSAttributedString keys
for key in &["NSString", "NS.string", "string"] {
if let Some(val) = dict.get(*key) {
if let Some(s) = extract_string_from_value(val) {
return Some(s);
}
}
}
None
}
plist::Value::Array(arr) => {
// Find first non-empty string in array
for item in arr {
if let Some(s) = extract_string_from_value(item) {
if !s.is_empty() && !s.starts_with("NS") && !s.starts_with("__k") {
return Some(s);
}
}
}
None
}
_ => None,
}
}
fn extract_text_fallback(attributed_body: &[u8]) -> String {
// Simple fallback: extract printable ASCII strings
let mut current_str = String::new();
let mut best_string = String::new();
for &byte in attributed_body {
if (32..127).contains(&byte) {
current_str.push(byte as char);
} else {
if current_str.len() > best_string.len()
&& !current_str.starts_with("NS")
&& !current_str.starts_with("__k")
&& current_str != "streamtyped"
&& current_str != "NSDictionary"
{
best_string = current_str.clone();
}
current_str.clear();
}
}
// Check final string
if current_str.len() > best_string.len() {
best_string = current_str;
}
// Clean up common artifacts
best_string = best_string.trim_start_matches(|c: char| {
c == '+' && best_string.len() > 2
}).trim().to_string();
best_string
}
fn main() -> Result<()> {
let home = std::env::var("HOME")?;
let chat_db_path = format!("{}/Library/Messages/chat.db", home);
let conn = Connection::open_with_flags(&chat_db_path, OpenFlags::SQLITE_OPEN_READ_ONLY)?;
let mut stmt = conn.prepare(
"SELECT
m.ROWID,
m.text,
m.attributedBody,
m.date,
m.date_read,
m.date_delivered,
m.is_from_me,
m.is_read,
COALESCE(h.id, 'unknown') as handle_id,
c.chat_identifier,
m.service
FROM message m
LEFT JOIN handle h ON m.handle_id = h.ROWID
LEFT JOIN chat_message_join cmj ON m.ROWID = cmj.message_id
LEFT JOIN chat c ON cmj.chat_id = c.ROWID
WHERE h.id = ?1 OR c.chat_identifier = ?1
ORDER BY m.date ASC",
)?;
let messages = stmt.query_map([PHONE_NUMBER], |row| {
Ok((
row.get::<_, i64>(0)?, // ROWID
row.get::<_, Option<String>>(1)?, // text
row.get::<_, Option<Vec<u8>>>(2)?, // attributedBody
row.get::<_, i64>(3)?, // date
row.get::<_, Option<i64>>(4)?, // date_read
row.get::<_, Option<i64>>(5)?, // date_delivered
row.get::<_, i32>(6)?, // is_from_me
row.get::<_, i32>(7)?, // is_read
row.get::<_, String>(8)?, // handle_id
row.get::<_, Option<String>>(9)?, // chat_identifier
row.get::<_, Option<String>>(10)?, // service
))
})?;
let file = File::create("lonni_messages.csv")?;
let mut wtr = Writer::from_writer(file);
wtr.write_record(&[
"id",
"date",
"date_read",
"date_delivered",
"is_from_me",
"is_read",
"handle",
"chat_identifier",
"service",
"text",
])?;
let mut count = 0;
for message in messages {
let (
rowid,
text,
attributed_body,
date,
date_read,
date_delivered,
is_from_me,
is_read,
handle_id,
chat_identifier,
service,
) = message?;
// Extract text from attributedBody if text field is empty
let message_text = text.unwrap_or_else(|| {
attributed_body
.as_ref()
.map(|body| extract_text_from_attributed_body(body))
.unwrap_or_default()
});
wtr.write_record(&[
rowid.to_string(),
cocoa_timestamp_to_datetime(date),
date_read.map(cocoa_timestamp_to_datetime).unwrap_or_default(),
date_delivered.map(cocoa_timestamp_to_datetime).unwrap_or_default(),
is_from_me.to_string(),
is_read.to_string(),
handle_id,
chat_identifier.unwrap_or_default(),
service.unwrap_or_default(),
message_text,
])?;
count += 1;
if count % 1000 == 0 {
println!("Exported {} messages...", count);
}
}
wtr.flush()?;
println!("Successfully exported {} messages to lonni_messages.csv", count);
Ok(())
}

View File

@@ -1,152 +0,0 @@
use rusqlite::{
Connection,
OpenFlags,
Row,
params,
};
use crate::{
error::Result,
models::*,
};
pub struct ChatDb {
conn: Connection,
}
impl ChatDb {
/// Open a connection to the chat database in read-only mode
pub fn open(path: &str) -> Result<Self> {
let conn = Connection::open_with_flags(path, OpenFlags::SQLITE_OPEN_READ_ONLY)?;
Ok(Self { conn })
}
/// Get messages from the conversation with +31 6 39 13 29 13
///
/// Returns messages from January 1, 2024 to present from the conversation
/// with the specified Dutch phone number.
///
/// # Arguments
///
/// * `start_date` - Start date (defaults to January 1, 2024 if None)
/// * `end_date` - End date (defaults to current time if None)
pub fn get_our_messages(
&self,
start_date: Option<chrono::DateTime<chrono::Utc>>,
end_date: Option<chrono::DateTime<chrono::Utc>>,
) -> Result<Vec<Message>> {
use chrono::{
TimeZone,
Utc,
};
// Default date range: January 1, 2024 to now
let start =
start_date.unwrap_or_else(|| Utc.with_ymd_and_hms(2024, 1, 1, 0, 0, 0).unwrap());
let end = end_date.unwrap_or_else(|| Utc::now());
// Convert to Apple timestamps (nanoseconds since 2001-01-01)
let start_timestamp = datetime_to_apple_timestamp(start);
let end_timestamp = datetime_to_apple_timestamp(end);
// The phone number might be stored with or without spaces
let phone_with_spaces = "+31 6 39 13 29 13";
let phone_without_spaces = "+31639132913";
// Find the chat with this phone number (try both formats)
let chat = self
.get_chat_for_phone_number(phone_with_spaces)
.or_else(|_| self.get_chat_for_phone_number(phone_without_spaces))?;
// Get messages from this chat within the date range
let mut stmt = self.conn.prepare(
"SELECT m.ROWID, m.guid, m.text, m.service, m.handle_id, m.date, m.date_read, m.date_delivered,
m.is_from_me, m.is_read, m.is_delivered, m.is_sent, m.is_emote, m.is_audio_message,
m.cache_has_attachments, m.associated_message_guid, m.associated_message_type,
m.thread_originator_guid, m.reply_to_guid, m.is_spam
FROM message m
INNER JOIN chat_message_join cmj ON m.ROWID = cmj.message_id
WHERE cmj.chat_id = ?
AND m.date >= ?
AND m.date <= ?
ORDER BY m.date ASC"
)?;
let messages = stmt
.query_map(
params![chat.rowid, start_timestamp, end_timestamp],
map_message_row,
)?
.collect::<std::result::Result<Vec<_>, _>>()?;
Ok(messages)
}
/// Helper function to find the largest chat with a specific phone number
fn get_chat_for_phone_number(&self, phone_number: &str) -> Result<Chat> {
let mut stmt = self.conn.prepare(
"SELECT c.ROWID, c.guid, c.chat_identifier, c.service_name, c.display_name,
c.group_id, c.room_name, c.is_archived, c.is_filtered,
c.last_read_message_timestamp, COUNT(cmj.message_id) as msg_count
FROM chat c
INNER JOIN chat_handle_join chj ON c.ROWID = chj.chat_id
INNER JOIN handle h ON chj.handle_id = h.ROWID
INNER JOIN chat_message_join cmj ON c.ROWID = cmj.chat_id
WHERE h.id = ?
GROUP BY c.ROWID
ORDER BY msg_count DESC
LIMIT 1",
)?;
let chat = stmt.query_row(params![phone_number], |row| {
Ok(Chat {
rowid: row.get(0)?,
guid: row.get(1)?,
chat_identifier: row.get(2)?,
service_name: row.get(3)?,
display_name: row.get(4)?,
group_id: row.get(5)?,
room_name: row.get(6)?,
is_archived: row.get::<_, i64>(7)? != 0,
is_filtered: row.get::<_, i64>(8)? != 0,
last_read_message_timestamp: row
.get::<_, Option<i64>>(9)?
.map(apple_timestamp_to_datetime),
})
})?;
Ok(chat)
}
}
// Helper function to map database rows to structs
fn map_message_row(row: &Row) -> rusqlite::Result<Message> {
Ok(Message {
rowid: row.get(0)?,
guid: row.get(1)?,
text: row.get(2)?,
service: row.get(3)?,
handle_id: row.get(4)?,
date: row
.get::<_, Option<i64>>(5)?
.map(apple_timestamp_to_datetime),
date_read: row
.get::<_, Option<i64>>(6)?
.map(apple_timestamp_to_datetime),
date_delivered: row
.get::<_, Option<i64>>(7)?
.map(apple_timestamp_to_datetime),
is_from_me: row.get::<_, i64>(8)? != 0,
is_read: row.get::<_, i64>(9)? != 0,
is_delivered: row.get::<_, i64>(10)? != 0,
is_sent: row.get::<_, i64>(11)? != 0,
is_emote: row.get::<_, i64>(12)? != 0,
is_audio_message: row.get::<_, i64>(13)? != 0,
cache_has_attachments: row.get::<_, i64>(14)? != 0,
associated_message_guid: row.get(15)?,
associated_message_type: row.get(16)?,
thread_originator_guid: row.get(17)?,
reply_to_guid: row.get(18)?,
is_spam: row.get::<_, i64>(19)? != 0,
})
}

View File

@@ -0,0 +1,283 @@
// Copyright (c) 2021 Vladyslav Batyrenko
// SPDX-License-Identifier: MIT
//
// This code is vendored from bevy_egui: https://github.com/vladbat00/bevy_egui
// Original author: Vladyslav Batyrenko <vladyslav.batyrenko@gmail.com>
use bevy::ecs::{
entity::Entity,
query::{QueryData, QueryEntityError, QueryFilter, QueryItem, ROQueryItem},
system::Query,
};
use bevy::input::keyboard::{Key, KeyCode};
/// Translates [`egui::CursorIcon`] into [`bevy::window::SystemCursorIcon`].
#[inline(always)]
pub fn egui_to_winit_cursor_icon(
cursor_icon: egui::CursorIcon,
) -> Option<bevy::window::SystemCursorIcon> {
match cursor_icon {
egui::CursorIcon::Default => Some(bevy::window::SystemCursorIcon::Default),
egui::CursorIcon::PointingHand => Some(bevy::window::SystemCursorIcon::Pointer),
egui::CursorIcon::ResizeHorizontal => Some(bevy::window::SystemCursorIcon::EwResize),
egui::CursorIcon::ResizeNeSw => Some(bevy::window::SystemCursorIcon::NeswResize),
egui::CursorIcon::ResizeNwSe => Some(bevy::window::SystemCursorIcon::NwseResize),
egui::CursorIcon::ResizeVertical => Some(bevy::window::SystemCursorIcon::NsResize),
egui::CursorIcon::Text => Some(bevy::window::SystemCursorIcon::Text),
egui::CursorIcon::Grab => Some(bevy::window::SystemCursorIcon::Grab),
egui::CursorIcon::Grabbing => Some(bevy::window::SystemCursorIcon::Grabbing),
egui::CursorIcon::ContextMenu => Some(bevy::window::SystemCursorIcon::ContextMenu),
egui::CursorIcon::Help => Some(bevy::window::SystemCursorIcon::Help),
egui::CursorIcon::Progress => Some(bevy::window::SystemCursorIcon::Progress),
egui::CursorIcon::Wait => Some(bevy::window::SystemCursorIcon::Wait),
egui::CursorIcon::Cell => Some(bevy::window::SystemCursorIcon::Cell),
egui::CursorIcon::Crosshair => Some(bevy::window::SystemCursorIcon::Crosshair),
egui::CursorIcon::VerticalText => Some(bevy::window::SystemCursorIcon::VerticalText),
egui::CursorIcon::Alias => Some(bevy::window::SystemCursorIcon::Alias),
egui::CursorIcon::Copy => Some(bevy::window::SystemCursorIcon::Copy),
egui::CursorIcon::Move => Some(bevy::window::SystemCursorIcon::Move),
egui::CursorIcon::NoDrop => Some(bevy::window::SystemCursorIcon::NoDrop),
egui::CursorIcon::NotAllowed => Some(bevy::window::SystemCursorIcon::NotAllowed),
egui::CursorIcon::AllScroll => Some(bevy::window::SystemCursorIcon::AllScroll),
egui::CursorIcon::ZoomIn => Some(bevy::window::SystemCursorIcon::ZoomIn),
egui::CursorIcon::ZoomOut => Some(bevy::window::SystemCursorIcon::ZoomOut),
egui::CursorIcon::ResizeEast => Some(bevy::window::SystemCursorIcon::EResize),
egui::CursorIcon::ResizeSouthEast => Some(bevy::window::SystemCursorIcon::SeResize),
egui::CursorIcon::ResizeSouth => Some(bevy::window::SystemCursorIcon::SResize),
egui::CursorIcon::ResizeSouthWest => Some(bevy::window::SystemCursorIcon::SwResize),
egui::CursorIcon::ResizeWest => Some(bevy::window::SystemCursorIcon::WResize),
egui::CursorIcon::ResizeNorthWest => Some(bevy::window::SystemCursorIcon::NwResize),
egui::CursorIcon::ResizeNorth => Some(bevy::window::SystemCursorIcon::NResize),
egui::CursorIcon::ResizeNorthEast => Some(bevy::window::SystemCursorIcon::NeResize),
egui::CursorIcon::ResizeColumn => Some(bevy::window::SystemCursorIcon::ColResize),
egui::CursorIcon::ResizeRow => Some(bevy::window::SystemCursorIcon::RowResize),
egui::CursorIcon::None => None,
}
}
/// Matches the implementation of <https://github.com/emilk/egui/blob/68b3ef7f6badfe893d3bbb1f791b481069d807d9/crates/egui-winit/src/lib.rs#L1005>.
#[inline(always)]
pub fn bevy_to_egui_key(key: &Key) -> Option<egui::Key> {
let key = match key {
Key::Character(str) => return egui::Key::from_name(str.as_str()),
Key::Unidentified(_) | Key::Dead(_) => return None,
Key::Enter => egui::Key::Enter,
Key::Tab => egui::Key::Tab,
Key::Space => egui::Key::Space,
Key::ArrowDown => egui::Key::ArrowDown,
Key::ArrowLeft => egui::Key::ArrowLeft,
Key::ArrowRight => egui::Key::ArrowRight,
Key::ArrowUp => egui::Key::ArrowUp,
Key::End => egui::Key::End,
Key::Home => egui::Key::Home,
Key::PageDown => egui::Key::PageDown,
Key::PageUp => egui::Key::PageUp,
Key::Backspace => egui::Key::Backspace,
Key::Delete => egui::Key::Delete,
Key::Insert => egui::Key::Insert,
Key::Escape => egui::Key::Escape,
Key::F1 => egui::Key::F1,
Key::F2 => egui::Key::F2,
Key::F3 => egui::Key::F3,
Key::F4 => egui::Key::F4,
Key::F5 => egui::Key::F5,
Key::F6 => egui::Key::F6,
Key::F7 => egui::Key::F7,
Key::F8 => egui::Key::F8,
Key::F9 => egui::Key::F9,
Key::F10 => egui::Key::F10,
Key::F11 => egui::Key::F11,
Key::F12 => egui::Key::F12,
Key::F13 => egui::Key::F13,
Key::F14 => egui::Key::F14,
Key::F15 => egui::Key::F15,
Key::F16 => egui::Key::F16,
Key::F17 => egui::Key::F17,
Key::F18 => egui::Key::F18,
Key::F19 => egui::Key::F19,
Key::F20 => egui::Key::F20,
_ => return None,
};
Some(key)
}
/// Matches the implementation of <https://github.com/emilk/egui/blob/68b3ef7f6badfe893d3bbb1f791b481069d807d9/crates/egui-winit/src/lib.rs#L1080>.
#[inline(always)]
pub fn bevy_to_egui_physical_key(key: &KeyCode) -> Option<egui::Key> {
let key = match key {
KeyCode::ArrowDown => egui::Key::ArrowDown,
KeyCode::ArrowLeft => egui::Key::ArrowLeft,
KeyCode::ArrowRight => egui::Key::ArrowRight,
KeyCode::ArrowUp => egui::Key::ArrowUp,
KeyCode::Escape => egui::Key::Escape,
KeyCode::Tab => egui::Key::Tab,
KeyCode::Backspace => egui::Key::Backspace,
KeyCode::Enter | KeyCode::NumpadEnter => egui::Key::Enter,
KeyCode::Insert => egui::Key::Insert,
KeyCode::Delete => egui::Key::Delete,
KeyCode::Home => egui::Key::Home,
KeyCode::End => egui::Key::End,
KeyCode::PageUp => egui::Key::PageUp,
KeyCode::PageDown => egui::Key::PageDown,
// Punctuation
KeyCode::Space => egui::Key::Space,
KeyCode::Comma => egui::Key::Comma,
KeyCode::Period => egui::Key::Period,
// KeyCode::Colon => egui::Key::Colon, // NOTE: there is no physical colon key on an american keyboard
KeyCode::Semicolon => egui::Key::Semicolon,
KeyCode::Backslash => egui::Key::Backslash,
KeyCode::Slash | KeyCode::NumpadDivide => egui::Key::Slash,
KeyCode::BracketLeft => egui::Key::OpenBracket,
KeyCode::BracketRight => egui::Key::CloseBracket,
KeyCode::Backquote => egui::Key::Backtick,
KeyCode::Cut => egui::Key::Cut,
KeyCode::Copy => egui::Key::Copy,
KeyCode::Paste => egui::Key::Paste,
KeyCode::Minus | KeyCode::NumpadSubtract => egui::Key::Minus,
KeyCode::NumpadAdd => egui::Key::Plus,
KeyCode::Equal => egui::Key::Equals,
KeyCode::Digit0 | KeyCode::Numpad0 => egui::Key::Num0,
KeyCode::Digit1 | KeyCode::Numpad1 => egui::Key::Num1,
KeyCode::Digit2 | KeyCode::Numpad2 => egui::Key::Num2,
KeyCode::Digit3 | KeyCode::Numpad3 => egui::Key::Num3,
KeyCode::Digit4 | KeyCode::Numpad4 => egui::Key::Num4,
KeyCode::Digit5 | KeyCode::Numpad5 => egui::Key::Num5,
KeyCode::Digit6 | KeyCode::Numpad6 => egui::Key::Num6,
KeyCode::Digit7 | KeyCode::Numpad7 => egui::Key::Num7,
KeyCode::Digit8 | KeyCode::Numpad8 => egui::Key::Num8,
KeyCode::Digit9 | KeyCode::Numpad9 => egui::Key::Num9,
KeyCode::KeyA => egui::Key::A,
KeyCode::KeyB => egui::Key::B,
KeyCode::KeyC => egui::Key::C,
KeyCode::KeyD => egui::Key::D,
KeyCode::KeyE => egui::Key::E,
KeyCode::KeyF => egui::Key::F,
KeyCode::KeyG => egui::Key::G,
KeyCode::KeyH => egui::Key::H,
KeyCode::KeyI => egui::Key::I,
KeyCode::KeyJ => egui::Key::J,
KeyCode::KeyK => egui::Key::K,
KeyCode::KeyL => egui::Key::L,
KeyCode::KeyM => egui::Key::M,
KeyCode::KeyN => egui::Key::N,
KeyCode::KeyO => egui::Key::O,
KeyCode::KeyP => egui::Key::P,
KeyCode::KeyQ => egui::Key::Q,
KeyCode::KeyR => egui::Key::R,
KeyCode::KeyS => egui::Key::S,
KeyCode::KeyT => egui::Key::T,
KeyCode::KeyU => egui::Key::U,
KeyCode::KeyV => egui::Key::V,
KeyCode::KeyW => egui::Key::W,
KeyCode::KeyX => egui::Key::X,
KeyCode::KeyY => egui::Key::Y,
KeyCode::KeyZ => egui::Key::Z,
KeyCode::F1 => egui::Key::F1,
KeyCode::F2 => egui::Key::F2,
KeyCode::F3 => egui::Key::F3,
KeyCode::F4 => egui::Key::F4,
KeyCode::F5 => egui::Key::F5,
KeyCode::F6 => egui::Key::F6,
KeyCode::F7 => egui::Key::F7,
KeyCode::F8 => egui::Key::F8,
KeyCode::F9 => egui::Key::F9,
KeyCode::F10 => egui::Key::F10,
KeyCode::F11 => egui::Key::F11,
KeyCode::F12 => egui::Key::F12,
KeyCode::F13 => egui::Key::F13,
KeyCode::F14 => egui::Key::F14,
KeyCode::F15 => egui::Key::F15,
KeyCode::F16 => egui::Key::F16,
KeyCode::F17 => egui::Key::F17,
KeyCode::F18 => egui::Key::F18,
KeyCode::F19 => egui::Key::F19,
KeyCode::F20 => egui::Key::F20,
_ => return None,
};
Some(key)
}
/// Converts [`bevy::math::Vec2`] into [`egui::Pos2`].
#[inline(always)]
pub fn vec2_into_egui_pos2(vec: bevy::math::Vec2) -> egui::Pos2 {
egui::Pos2::new(vec.x, vec.y)
}
/// Converts [`bevy::math::Vec2`] into [`egui::Vec2`].
#[inline(always)]
pub fn vec2_into_egui_vec2(vec: bevy::math::Vec2) -> egui::Vec2 {
egui::Vec2::new(vec.x, vec.y)
}
/// Converts [`bevy::math::Rect`] into [`egui::Rect`].
#[inline(always)]
pub fn rect_into_egui_rect(rect: bevy::math::Rect) -> egui::Rect {
egui::Rect {
min: vec2_into_egui_pos2(rect.min),
max: vec2_into_egui_pos2(rect.max),
}
}
/// Converts [`egui::Pos2`] into [`bevy::math::Vec2`].
#[inline(always)]
pub fn egui_pos2_into_vec2(pos: egui::Pos2) -> bevy::math::Vec2 {
bevy::math::Vec2::new(pos.x, pos.y)
}
/// Converts [`egui::Vec2`] into [`bevy::math::Vec2`].
#[inline(always)]
pub fn egui_vec2_into_vec2(pos: egui::Vec2) -> bevy::math::Vec2 {
bevy::math::Vec2::new(pos.x, pos.y)
}
/// Converts [`egui::Rect`] into [`bevy::math::Rect`].
#[inline(always)]
pub fn egui_rect_into_rect(rect: egui::Rect) -> bevy::math::Rect {
bevy::math::Rect {
min: egui_pos2_into_vec2(rect.min),
max: egui_pos2_into_vec2(rect.max),
}
}
pub(crate) trait QueryHelper<'w> {
type QueryData: bevy::ecs::query::QueryData;
fn get_some(&self, entity: Entity) -> Option<ROQueryItem<'_, 'w, Self::QueryData>>;
fn get_some_mut(&mut self, entity: Entity) -> Option<QueryItem<'_, 'w, Self::QueryData>>;
}
impl<'w, D: QueryData, F: QueryFilter> QueryHelper<'w> for Query<'_, 'w, D, F> {
type QueryData = D;
fn get_some(&self, entity: Entity) -> Option<ROQueryItem<'_, 'w, Self::QueryData>> {
match self.get(entity) {
Ok(item) => Some(item),
Err(QueryEntityError::EntityDoesNotExist(_)) => None,
err => {
err.unwrap();
unreachable!()
}
}
}
fn get_some_mut(&mut self, entity: Entity) -> Option<QueryItem<'_, 'w, Self::QueryData>> {
match self.get_mut(entity) {
Ok(item) => Some(item),
Err(QueryEntityError::EntityDoesNotExist(_)) => None,
err => {
err.unwrap();
unreachable!()
}
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,967 @@
// Copyright (c) 2021 Vladyslav Batyrenko
// SPDX-License-Identifier: MIT
//
// This code is vendored from bevy_egui: https://github.com/vladbat00/bevy_egui
// Original author: Vladyslav Batyrenko <vladyslav.batyrenko@gmail.com>
// Adapted for Marathon engine with simplified feature set (desktop-only, single window).
#![allow(clippy::type_complexity)]
//! Debug UI integration using egui for the Marathon engine.
//!
//! This is a vendored and simplified version of bevy_egui, stripped down to support:
//! - Desktop platforms only (no WASM/web)
//! - Single window
//! - No picking/accessibility features
//! - Works with Marathon's custom executor
/// Helpers for converting Bevy types into Egui ones and vice versa.
pub mod helpers;
/// Systems for translating Bevy input messages into Egui input.
pub mod input;
/// Systems for handling Egui output.
pub mod output;
/// Rendering Egui with [`bevy_render`].
pub mod render;
pub use egui;
use self::input::*;
use bevy::app::prelude::*;
use bevy::asset::{AssetEvent, AssetId, Assets, Handle, load_internal_asset};
use bevy::prelude::{Deref, DerefMut, Shader};
use bevy::ecs::{
prelude::*,
query::{QueryData, QuerySingleError},
schedule::{InternedScheduleLabel, ScheduleLabel},
system::SystemParam,
};
use bevy::image::{Image, ImageSampler};
use bevy::input::InputSystems;
#[allow(unused_imports)]
use bevy::log;
use bevy::platform::collections::{HashMap, HashSet};
use bevy::reflect::Reflect;
use bevy::render::{
ExtractSchedule, Render, RenderApp, RenderSystems,
extract_resource::{ExtractResource, ExtractResourcePlugin},
render_resource::SpecializedRenderPipelines,
};
use bevy::window::CursorIcon;
use output::process_output_system;
/// Adds all Egui resources and render graph nodes.
pub struct EguiPlugin {
/// The bindless mode array size for egui rendering.
pub bindless_mode_array_size: Option<std::num::NonZero<u32>>,
}
impl Default for EguiPlugin {
fn default() -> Self {
Self {
bindless_mode_array_size: None,
}
}
}
/// A resource for storing global plugin settings.
#[derive(Clone, Debug, Resource, Reflect)]
pub struct EguiGlobalSettings {
/// Set this to `false` if you want to control the creation of [`EguiContext`] instances manually.
pub auto_create_primary_context: bool,
/// Controls running of the input systems.
pub input_system_settings: EguiInputSystemSettings,
/// Controls whether `bevy_egui` updates [`CursorIcon`], enabled by default.
pub enable_cursor_icon_updates: bool,
/// Controls whether focused non-window contexts can be updated (disabled for simplicity).
#[reflect(ignore)]
pub enable_focused_non_window_context_updates: bool,
}
impl Default for EguiGlobalSettings {
fn default() -> Self {
Self {
auto_create_primary_context: true,
input_system_settings: EguiInputSystemSettings::default(),
enable_cursor_icon_updates: true,
enable_focused_non_window_context_updates: false,
}
}
}
/// A component for storing Egui context settings.
#[derive(Clone, Debug, Component, Reflect)]
pub struct EguiContextSettings {
/// If set to `true`, a user is expected to call [`egui::Context::run`] manually.
pub run_manually: bool,
/// Global scale factor for Egui widgets (`1.0` by default).
pub scale_factor: f32,
/// Controls running of the input systems.
pub input_system_settings: EguiInputSystemSettings,
/// Controls whether updates [`CursorIcon`], enabled by default.
pub enable_cursor_icon_updates: bool,
/// Controls whether IME (Input Method Editor) is enabled (disabled for simplicity).
#[reflect(ignore)]
pub enable_ime: bool,
}
impl Default for EguiContextSettings {
fn default() -> Self {
Self {
run_manually: false,
scale_factor: 1.0,
input_system_settings: EguiInputSystemSettings::default(),
enable_cursor_icon_updates: true,
enable_ime: false,
}
}
}
impl PartialEq for EguiContextSettings {
fn eq(&self, other: &Self) -> bool {
self.scale_factor == other.scale_factor
}
}
#[derive(Clone, Debug, Reflect, PartialEq, Eq)]
/// All the systems are enabled by default.
pub struct EguiInputSystemSettings {
/// Controls running of the [`write_modifiers_keys_state_system`] system.
pub run_write_modifiers_keys_state_system: bool,
/// Controls running of the [`write_window_pointer_moved_messages_system`] system.
pub run_write_window_pointer_moved_messages_system: bool,
/// Controls running of the [`write_pointer_button_messages_system`] system.
pub run_write_pointer_button_messages_system: bool,
/// Controls running of the [`write_window_touch_messages_system`] system.
pub run_write_window_touch_messages_system: bool,
/// Controls running of the [`write_mouse_wheel_messages_system`] system.
pub run_write_mouse_wheel_messages_system: bool,
/// Controls running of the [`write_keyboard_input_messages_system`] system.
pub run_write_keyboard_input_messages_system: bool,
/// Disabled for simplicity (non-window contexts)
#[reflect(ignore)]
pub run_write_non_window_pointer_moved_messages_system: bool,
/// Disabled for simplicity (non-window contexts)
#[reflect(ignore)]
pub run_write_non_window_touch_messages_system: bool,
/// Disabled for simplicity (IME)
#[reflect(ignore)]
pub run_write_ime_messages_system: bool,
/// Disabled for simplicity (file drag and drop)
#[reflect(ignore)]
pub run_write_file_dnd_messages_system: bool,
}
impl Default for EguiInputSystemSettings {
fn default() -> Self {
Self {
run_write_modifiers_keys_state_system: true,
run_write_window_pointer_moved_messages_system: true,
run_write_pointer_button_messages_system: true,
run_write_window_touch_messages_system: true,
run_write_mouse_wheel_messages_system: true,
run_write_keyboard_input_messages_system: true,
run_write_non_window_pointer_moved_messages_system: false,
run_write_non_window_touch_messages_system: false,
run_write_ime_messages_system: false,
run_write_file_dnd_messages_system: false,
}
}
}
/// Use this schedule to run your UI systems with the primary Egui context.
#[derive(ScheduleLabel, Clone, Debug, PartialEq, Eq, Hash)]
pub struct EguiPrimaryContextPass;
/// A marker component for a primary Egui context.
#[derive(Component, Clone)]
#[require(EguiMultipassSchedule::new(EguiPrimaryContextPass))]
pub struct PrimaryEguiContext;
/// Add this component to your Egui context to enable multi-pass support.
#[derive(Component, Clone)]
#[require(EguiContext)]
pub struct EguiMultipassSchedule(pub InternedScheduleLabel);
impl EguiMultipassSchedule {
/// Constructs the component from a schedule label.
pub fn new(schedule: impl ScheduleLabel) -> Self {
Self(schedule.intern())
}
}
/// Is used for storing Egui context input.
#[derive(Component, Clone, Debug, Default, Deref, DerefMut)]
pub struct EguiInput(pub egui::RawInput);
/// Intermediate output buffer generated on an Egui pass end.
#[derive(Component, Clone, Default, Deref, DerefMut)]
pub struct EguiFullOutput(pub Option<egui::FullOutput>);
/// Is used for storing Egui shapes and textures delta.
#[derive(Component, Clone, Default, Debug)]
pub struct EguiRenderOutput {
/// Pairs of rectangles and paint commands.
pub paint_jobs: Vec<egui::ClippedPrimitive>,
/// The change in egui textures since last frame.
pub textures_delta: egui::TexturesDelta,
}
impl EguiRenderOutput {
/// Returns `true` if the output has no Egui shapes and no textures delta.
pub fn is_empty(&self) -> bool {
self.paint_jobs.is_empty() && self.textures_delta.is_empty()
}
}
/// Stores last Egui output.
#[derive(Component, Clone, Default)]
pub struct EguiOutput {
/// The field gets updated during [`process_output_system`].
pub platform_output: egui::PlatformOutput,
}
/// A component for storing `bevy_egui` context.
#[derive(Clone, Component, Default)]
#[require(
EguiContextSettings,
EguiInput,
EguiContextPointerPosition,
EguiContextPointerTouchId,
EguiFullOutput,
EguiRenderOutput,
EguiOutput,
CursorIcon
)]
pub struct EguiContext {
ctx: egui::Context,
}
impl EguiContext {
/// Borrows the underlying Egui context mutably.
#[must_use]
pub fn get_mut(&mut self) -> &mut egui::Context {
&mut self.ctx
}
/// Borrows the underlying Egui context immutably.
#[must_use]
pub fn get(&self) -> &egui::Context {
&self.ctx
}
}
type EguiContextsQuery<'w, 's> = Query<
'w,
's,
(
&'static mut EguiContext,
Option<&'static PrimaryEguiContext>,
),
>;
#[derive(SystemParam)]
/// A helper SystemParam that provides a way to get [`EguiContext`] with less boilerplate.
pub struct EguiContexts<'w, 's> {
q: EguiContextsQuery<'w, 's>,
user_textures: ResMut<'w, EguiUserTextures>,
}
impl EguiContexts<'_, '_> {
/// Returns an Egui context with the [`PrimaryEguiContext`] component.
#[inline]
pub fn ctx_mut(&mut self) -> Result<&mut egui::Context, QuerySingleError> {
self.q.iter_mut().fold(
Err(QuerySingleError::NoEntities("".into())),
|result, (ctx, primary)| match (&result, primary) {
(Err(QuerySingleError::MultipleEntities(_)), _) => result,
(Err(QuerySingleError::NoEntities(_)), Some(_)) => Ok(ctx.into_inner().get_mut()),
(Err(QuerySingleError::NoEntities(_)), None) => result,
(Ok(_), Some(_)) => Err(QuerySingleError::MultipleEntities("".into())),
(Ok(_), None) => result,
},
)
}
/// Can accept either a strong or a weak handle.
pub fn add_image(&mut self, image: EguiTextureHandle) -> egui::TextureId {
self.user_textures.add_image(image)
}
/// Removes the image handle and an Egui texture id associated with it.
#[track_caller]
pub fn remove_image(&mut self, image: impl Into<AssetId<Image>>) -> Option<egui::TextureId> {
self.user_textures.remove_image(image)
}
/// Returns an associated Egui texture id.
#[must_use]
#[track_caller]
pub fn image_id(&self, image: impl Into<AssetId<Image>>) -> Option<egui::TextureId> {
self.user_textures.image_id(image)
}
}
/// A resource for storing user textures.
#[derive(Clone, Resource, ExtractResource)]
pub struct EguiUserTextures {
textures: HashMap<AssetId<Image>, (EguiTextureHandle, u64)>,
free_list: Vec<u64>,
}
impl Default for EguiUserTextures {
fn default() -> Self {
Self {
textures: HashMap::default(),
free_list: vec![0],
}
}
}
impl EguiUserTextures {
/// Adds an image and returns its texture ID.
pub fn add_image(&mut self, image: EguiTextureHandle) -> egui::TextureId {
let (_, id) = *self.textures.entry(image.asset_id()).or_insert_with(|| {
let id = self
.free_list
.pop()
.expect("free list must contain at least 1 element");
log::debug!("Add a new image (id: {}, handle: {:?})", id, image);
if self.free_list.is_empty() {
self.free_list.push(id.checked_add(1).expect("out of ids"));
}
(image, id)
});
egui::TextureId::User(id)
}
/// Removes the image handle and an Egui texture id associated with it.
pub fn remove_image(&mut self, image: impl Into<AssetId<Image>>) -> Option<egui::TextureId> {
let image = image.into();
let id = self.textures.remove(&image);
log::debug!("Remove image (id: {:?}, handle: {:?})", id, image);
if let Some((_, id)) = id {
self.free_list.push(id);
}
id.map(|(_, id)| egui::TextureId::User(id))
}
/// Returns an associated Egui texture id.
#[must_use]
pub fn image_id(&self, image: impl Into<AssetId<Image>>) -> Option<egui::TextureId> {
let image = image.into();
self.textures
.get(&image)
.map(|&(_, id)| egui::TextureId::User(id))
}
}
/// A wrapper type for an image handle or an asset id.
#[derive(Clone, Debug)]
pub enum EguiTextureHandle {
/// Strong handle to an image.
Strong(Handle<Image>),
/// Weak handle to an image.
Weak(AssetId<Image>),
}
impl EguiTextureHandle {
/// Returns an [`AssetId`] of a wrapped handle.
pub fn asset_id(&self) -> AssetId<Image> {
match self {
EguiTextureHandle::Strong(handle) => handle.id(),
EguiTextureHandle::Weak(asset_id) => *asset_id,
}
}
}
impl From<EguiTextureHandle> for AssetId<Image> {
fn from(value: EguiTextureHandle) -> Self {
value.asset_id()
}
}
/// Stores physical size and scale factor.
#[derive(Component, Debug, Default, Clone, Copy, PartialEq)]
pub struct RenderComputedScaleFactor {
/// Scale factor.
pub scale_factor: f32,
}
/// The names of debug_ui nodes.
pub mod node {
/// The main egui pass.
pub const EGUI_PASS: &str = "egui_pass";
}
#[derive(SystemSet, Clone, Hash, Debug, Eq, PartialEq)]
/// The plugin startup system sets.
pub enum EguiStartupSet {
/// Initializes a primary Egui context.
InitContexts,
}
/// System sets that run during the [`PreUpdate`] schedule.
#[derive(SystemSet, Clone, Hash, Debug, Eq, PartialEq)]
pub enum EguiPreUpdateSet {
/// Initializes Egui contexts for newly created render targets.
InitContexts,
/// Reads Egui inputs and writes them into the [`EguiInput`] resource.
ProcessInput,
/// Begins the `egui` pass.
BeginPass,
}
/// Subsets of the [`EguiPreUpdateSet::ProcessInput`] set.
#[derive(SystemSet, Clone, Hash, Debug, Eq, PartialEq)]
pub enum EguiInputSet {
/// Reads key modifiers state and pointer positions.
InitReading,
/// Processes window mouse button click and touch messages.
FocusContext,
/// Processes rest of the messages.
ReadBevyMessages,
/// Feeds all the events into [`EguiInput`].
WriteEguiEvents,
}
/// System sets that run during the [`PostUpdate`] schedule.
#[derive(SystemSet, Clone, Hash, Debug, Eq, PartialEq)]
pub enum EguiPostUpdateSet {
/// Ends Egui pass.
EndPass,
/// Processes Egui output, reads paint jobs for the renderer.
ProcessOutput,
/// Post-processing of Egui output.
PostProcessOutput,
}
impl Plugin for EguiPlugin {
fn build(&self, app: &mut App) {
app.register_type::<EguiGlobalSettings>();
app.register_type::<EguiContextSettings>();
app.init_resource::<EguiGlobalSettings>();
app.init_resource::<ModifierKeysState>();
app.init_resource::<EguiWantsInput>();
app.init_resource::<WindowToEguiContextMap>();
app.add_message::<EguiInputEvent>();
app.add_message::<input::EguiFileDragAndDropMessage>();
app.init_resource::<EguiManagedTextures>();
app.init_resource::<EguiUserTextures>();
app.add_plugins(ExtractResourcePlugin::<EguiUserTextures>::default());
app.add_plugins(ExtractResourcePlugin::<
render::systems::ExtractedEguiManagedTextures,
>::default());
app.configure_sets(
PreUpdate,
(
EguiPreUpdateSet::InitContexts,
EguiPreUpdateSet::ProcessInput.after(InputSystems),
EguiPreUpdateSet::BeginPass,
)
.chain(),
);
app.configure_sets(
PreUpdate,
(
EguiInputSet::InitReading,
EguiInputSet::FocusContext,
EguiInputSet::ReadBevyMessages,
EguiInputSet::WriteEguiEvents,
)
.chain(),
);
app.configure_sets(
PostUpdate,
(
EguiPostUpdateSet::EndPass,
EguiPostUpdateSet::ProcessOutput,
EguiPostUpdateSet::PostProcessOutput,
)
.chain(),
);
// Startup systems
app.add_systems(
PreStartup,
(
(setup_primary_egui_context_system, ApplyDeferred)
.run_if(|s: Res<EguiGlobalSettings>| s.auto_create_primary_context),
update_ui_size_and_scale_system,
)
.chain()
.in_set(EguiStartupSet::InitContexts),
);
// PreUpdate systems
app.add_systems(
PreUpdate,
(
setup_primary_egui_context_system
.run_if(|s: Res<EguiGlobalSettings>| s.auto_create_primary_context),
WindowToEguiContextMap::on_egui_context_added_system,
WindowToEguiContextMap::on_egui_context_removed_system,
ApplyDeferred,
update_ui_size_and_scale_system,
)
.chain()
.in_set(EguiPreUpdateSet::InitContexts),
);
// NOTE: Replaced bevy_egui's Bevy-message input systems with custom InputEventBuffer reader
// The old systems expected Bevy's InputPlugin messages (CursorMoved, MouseButtonInput, etc.)
// We disabled InputPlugin since we own winit, so we read from InputEventBuffer instead
// But we still need write_egui_input_system to consume EguiInputEvent messages
app.add_systems(
PreUpdate,
(
input::custom_input_system,
input::write_egui_input_system,
)
.chain()
.in_set(EguiPreUpdateSet::ProcessInput),
);
app.add_systems(
PreUpdate,
begin_pass_system.in_set(EguiPreUpdateSet::BeginPass),
);
// PostUpdate systems
app.add_systems(
PostUpdate,
(run_egui_context_pass_loop_system, end_pass_system)
.chain()
.in_set(EguiPostUpdateSet::EndPass),
);
app.add_systems(
PostUpdate,
(process_output_system, write_egui_wants_input_system)
.in_set(EguiPostUpdateSet::ProcessOutput),
);
app.add_systems(
PostUpdate,
update_egui_textures_system.in_set(EguiPostUpdateSet::PostProcessOutput),
)
.add_systems(
Render,
render::systems::prepare_egui_transforms_system.in_set(RenderSystems::Prepare),
)
.add_systems(
Render,
render::systems::queue_bind_groups_system.in_set(RenderSystems::Queue),
)
.add_systems(
Render,
render::systems::queue_pipelines_system.in_set(RenderSystems::Queue),
)
.add_systems(Last, free_egui_textures_system);
load_internal_asset!(
app,
render::EGUI_SHADER_HANDLE,
"render/egui.wgsl",
Shader::from_wgsl
);
let Some(render_app) = app.get_sub_app_mut(RenderApp) else {
return;
};
let egui_graph_2d = render::get_egui_graph(render_app);
let egui_graph_3d = render::get_egui_graph(render_app);
let mut graph = render_app
.world_mut()
.resource_mut::<bevy::render::render_graph::RenderGraph>();
if let Some(graph_2d) =
graph.get_sub_graph_mut(bevy::core_pipeline::core_2d::graph::Core2d)
{
graph_2d.add_sub_graph(render::graph::SubGraphEgui, egui_graph_2d);
graph_2d.add_node(
render::graph::NodeEgui::EguiPass,
render::RunEguiSubgraphOnEguiViewNode,
);
graph_2d.add_node_edge(
bevy::core_pipeline::core_2d::graph::Node2d::EndMainPass,
render::graph::NodeEgui::EguiPass,
);
graph_2d.add_node_edge(
bevy::core_pipeline::core_2d::graph::Node2d::EndMainPassPostProcessing,
render::graph::NodeEgui::EguiPass,
);
graph_2d.add_node_edge(
render::graph::NodeEgui::EguiPass,
bevy::core_pipeline::core_2d::graph::Node2d::Upscaling,
);
}
if let Some(graph_3d) =
graph.get_sub_graph_mut(bevy::core_pipeline::core_3d::graph::Core3d)
{
graph_3d.add_sub_graph(render::graph::SubGraphEgui, egui_graph_3d);
graph_3d.add_node(
render::graph::NodeEgui::EguiPass,
render::RunEguiSubgraphOnEguiViewNode,
);
graph_3d.add_node_edge(
bevy::core_pipeline::core_3d::graph::Node3d::EndMainPass,
render::graph::NodeEgui::EguiPass,
);
graph_3d.add_node_edge(
bevy::core_pipeline::core_3d::graph::Node3d::EndMainPassPostProcessing,
render::graph::NodeEgui::EguiPass,
);
graph_3d.add_node_edge(
render::graph::NodeEgui::EguiPass,
bevy::core_pipeline::core_3d::graph::Node3d::Upscaling,
);
}
}
fn finish(&self, app: &mut App) {
if let Some(render_app) = app.get_sub_app_mut(RenderApp) {
render_app
.insert_resource(render::EguiRenderSettings {
bindless_mode_array_size: self.bindless_mode_array_size,
})
.init_resource::<render::EguiPipeline>()
.init_resource::<SpecializedRenderPipelines<render::EguiPipeline>>()
.init_resource::<render::systems::EguiTransforms>()
.init_resource::<render::systems::EguiRenderData>()
.add_systems(
ExtractSchedule,
render::extract_egui_camera_view_system,
)
.add_systems(
Render,
render::systems::prepare_egui_transforms_system.in_set(RenderSystems::Prepare),
)
.add_systems(
Render,
render::systems::prepare_egui_render_target_data_system
.in_set(RenderSystems::Prepare),
)
.add_systems(
Render,
render::systems::queue_bind_groups_system.in_set(RenderSystems::Queue),
)
.add_systems(
Render,
render::systems::queue_pipelines_system.in_set(RenderSystems::Queue),
);
}
}
}
fn input_system_is_enabled(
test: impl Fn(&EguiInputSystemSettings) -> bool,
) -> impl Fn(Res<EguiGlobalSettings>) -> bool {
move |settings| test(&settings.input_system_settings)
}
/// Contains textures allocated and painted by Egui.
#[derive(Resource, Deref, DerefMut, Default)]
pub struct EguiManagedTextures(pub HashMap<(Entity, u64), EguiManagedTexture>);
/// Represents a texture allocated and painted by Egui.
pub struct EguiManagedTexture {
/// Assets store handle.
pub handle: Handle<Image>,
/// Stored in full so we can do partial updates.
pub color_image: egui::ColorImage,
}
/// Adds bevy_egui components to the first camera (primary context).
pub fn setup_primary_egui_context_system(
mut commands: Commands,
new_cameras: Query<(Entity, Option<&EguiContext>), Added<bevy::camera::Camera>>,
mut egui_context_exists: Local<bool>,
) -> Result {
for (camera_entity, context) in new_cameras {
log::info!("setup_primary_egui_context_system: processing camera {:?}", camera_entity);
if context.is_some() || *egui_context_exists {
log::info!("setup_primary_egui_context_system: skipping camera {:?}, context already exists", camera_entity);
*egui_context_exists = true;
return Ok(());
}
// Let egui use its default visuals (like official bevy_egui)
// Do NOT override theme - egui will auto-detect system theme
let context = EguiContext::default();
log::info!("Creating a primary Egui context for camera {:?}", camera_entity);
let mut camera_commands = commands.get_entity(camera_entity)?;
camera_commands.insert((context, PrimaryEguiContext));
camera_commands.insert(EguiMultipassSchedule::new(EguiPrimaryContextPass));
*egui_context_exists = true;
}
Ok(())
}
#[derive(QueryData)]
#[query_data(mutable)]
#[allow(missing_docs)]
pub struct UpdateUiSizeAndScaleQuery {
ctx: &'static mut EguiContext,
egui_input: &'static mut EguiInput,
egui_settings: &'static EguiContextSettings,
camera: &'static bevy::camera::Camera,
}
/// Updates UI screen_rect and pixels_per_point.
pub fn update_ui_size_and_scale_system(mut contexts: Query<UpdateUiSizeAndScaleQuery>) {
for mut context in contexts.iter_mut() {
let Some((scale_factor, viewport_rect)) = context
.camera
.target_scaling_factor()
.map(|scale_factor| scale_factor * context.egui_settings.scale_factor)
.zip(context.camera.physical_viewport_rect())
else {
continue;
};
let viewport_rect = egui::Rect {
min: helpers::vec2_into_egui_pos2(viewport_rect.min.as_vec2() / scale_factor),
max: helpers::vec2_into_egui_pos2(viewport_rect.max.as_vec2() / scale_factor),
};
if viewport_rect.width() < 1.0 || viewport_rect.height() < 1.0 {
continue;
}
// DIAGNOSTIC: Check screen_rect being set
log::warn!(
"Setting egui screen_rect: {:?}, scale_factor: {}, physical_viewport: {:?}",
viewport_rect,
scale_factor,
context.camera.physical_viewport_rect()
);
context.egui_input.screen_rect = Some(viewport_rect);
context.ctx.get_mut().set_pixels_per_point(scale_factor);
}
}
/// Marks a pass start for Egui.
pub fn begin_pass_system(
mut contexts: Query<
(&mut EguiContext, &EguiContextSettings, &mut EguiInput),
Without<EguiMultipassSchedule>,
>,
) {
let count = contexts.iter().count();
if count > 0 {
log::info!("begin_pass_system: processing {} contexts", count);
}
for (mut ctx, egui_settings, mut egui_input) in contexts.iter_mut() {
if !egui_settings.run_manually {
ctx.get_mut().begin_pass(egui_input.take());
}
}
}
/// Marks a pass end for Egui.
pub fn end_pass_system(
mut contexts: Query<
(&mut EguiContext, &EguiContextSettings, &mut EguiFullOutput),
Without<EguiMultipassSchedule>,
>,
) {
let count = contexts.iter().count();
if count > 0 {
log::info!("end_pass_system: processing {} contexts", count);
}
for (mut ctx, egui_settings, mut full_output) in contexts.iter_mut() {
if !egui_settings.run_manually {
**full_output = Some(ctx.get_mut().end_pass());
log::info!("end_pass_system: generated full_output");
}
}
}
#[derive(QueryData)]
#[query_data(mutable)]
#[allow(missing_docs)]
pub struct MultiPassEguiQuery {
entity: Entity,
context: &'static mut EguiContext,
input: &'static mut EguiInput,
output: &'static mut EguiFullOutput,
multipass_schedule: &'static EguiMultipassSchedule,
settings: &'static EguiContextSettings,
}
/// Runs Egui contexts with the [`EguiMultipassSchedule`] component.
pub fn run_egui_context_pass_loop_system(world: &mut World) {
let mut contexts_query = world.query::<MultiPassEguiQuery>();
let mut used_schedules = HashSet::<InternedScheduleLabel>::default();
let mut multipass_contexts: Vec<_> = contexts_query
.iter_mut(world)
.filter_map(|mut egui_context| {
if egui_context.settings.run_manually {
return None;
}
Some((
egui_context.entity,
egui_context.context.get_mut().clone(),
egui_context.input.take(),
egui_context.multipass_schedule.clone(),
))
})
.collect();
if !multipass_contexts.is_empty() {
log::info!("run_egui_context_pass_loop_system: processing {} contexts", multipass_contexts.len());
}
for (entity, ctx, input, EguiMultipassSchedule(multipass_schedule)) in &mut multipass_contexts {
if !used_schedules.insert(*multipass_schedule) {
panic!(
"Each Egui context running in the multi-pass mode must have a unique schedule (attempted to reuse schedule {multipass_schedule:?})"
);
}
// DIAGNOSTIC: Check input being passed to run()
let raw_input = input.take();
log::warn!(
"Calling ctx.run() with screen_rect: {:?}",
raw_input.screen_rect
);
let output = ctx.run(raw_input, |_| {
let _ = world.try_run_schedule(*multipass_schedule);
});
// DIAGNOSTIC: Check fonts after run()
ctx.fonts(|fonts| {
let num_families = fonts.families().len();
log::warn!("After run(), context has {} font families", num_families);
});
log::info!("run_egui_context_pass_loop_system: generated output for entity {:?}", entity);
**contexts_query
.get_mut(world, *entity)
.expect("previously queried context")
.output = Some(output);
}
// Run the primary schedule if it hasn't been run yet
if world
.query_filtered::<Entity, (With<EguiContext>, With<PrimaryEguiContext>)>()
.iter(world)
.next()
.is_none()
{
return;
}
if !used_schedules.contains(&ScheduleLabel::intern(&EguiPrimaryContextPass)) {
let _ = world.try_run_schedule(EguiPrimaryContextPass);
}
}
/// Updates textures painted by Egui.
pub fn update_egui_textures_system(
mut egui_render_output: Query<(Entity, &EguiRenderOutput)>,
mut egui_managed_textures: ResMut<EguiManagedTextures>,
mut image_assets: ResMut<Assets<Image>>,
) {
use bevy::image::TextureAccessError;
for (entity, egui_render_output) in egui_render_output.iter_mut() {
if !egui_render_output.textures_delta.set.is_empty() {
log::info!("update_egui_textures_system: {} texture updates", egui_render_output.textures_delta.set.len());
}
for (texture_id, image_delta) in &egui_render_output.textures_delta.set {
let color_image = render::as_color_image(&image_delta.image);
let texture_id = match texture_id {
egui::TextureId::Managed(texture_id) => *texture_id,
egui::TextureId::User(_) => continue,
};
let sampler = ImageSampler::Descriptor(render::texture_options_as_sampler_descriptor(
&image_delta.options,
));
if let Some(pos) = image_delta.pos {
// Partial update
if let Some(managed_texture) = egui_managed_textures.get_mut(&(entity, texture_id))
&& let Some(image) = image_assets.get_mut(managed_texture.handle.id())
{
if update_image_rect(image, pos, &color_image).is_err() {
log::error!(
"Failed to write into texture (id: {:?}) for partial update",
texture_id
);
}
} else {
log::warn!("Partial update of a missing texture (id: {:?})", texture_id);
}
} else {
// Full update
let image = render::color_image_as_bevy_image(&color_image, sampler);
let handle = image_assets.add(image);
log::info!("update_egui_textures_system: created texture {:?} ({}x{})",
texture_id, color_image.width(), color_image.height());
egui_managed_textures.insert(
(entity, texture_id),
EguiManagedTexture {
handle,
color_image,
},
);
}
}
}
fn update_image_rect(
dest: &mut Image,
[x, y]: [usize; 2],
src: &egui::ColorImage,
) -> Result<(), TextureAccessError> {
for sy in 0..src.height() {
for sx in 0..src.width() {
let px = src[(sx, sy)];
dest.set_color_at(
(x + sx) as u32,
(y + sy) as u32,
bevy::color::Color::srgba_u8(px.r(), px.g(), px.b(), px.a()),
)?;
}
}
Ok(())
}
}
/// Frees Egui-managed textures and user textures.
pub fn free_egui_textures_system(
mut egui_user_textures: ResMut<EguiUserTextures>,
egui_render_output: Query<(Entity, &EguiRenderOutput)>,
mut egui_managed_textures: ResMut<EguiManagedTextures>,
mut image_assets: ResMut<Assets<Image>>,
mut image_event_reader: MessageReader<AssetEvent<Image>>,
) {
for (entity, egui_render_output) in egui_render_output.iter() {
for &texture_id in &egui_render_output.textures_delta.free {
if let egui::TextureId::Managed(texture_id) = texture_id {
let managed_texture = egui_managed_textures.remove(&(entity, texture_id));
if let Some(managed_texture) = managed_texture {
image_assets.remove(&managed_texture.handle);
}
}
}
}
for message in image_event_reader.read() {
if let AssetEvent::Removed { id } = message {
egui_user_textures.remove_image(EguiTextureHandle::Weak(*id));
}
}
}

View File

@@ -0,0 +1,125 @@
// Copyright (c) 2021 Vladyslav Batyrenko
// SPDX-License-Identifier: MIT
//
// This code is vendored from bevy_egui: https://github.com/vladbat00/bevy_egui
// Original author: Vladyslav Batyrenko <vladyslav.batyrenko@gmail.com>
use super::{
EguiContext, EguiContextSettings, EguiFullOutput, EguiGlobalSettings, EguiOutput,
EguiRenderOutput, helpers, input::WindowToEguiContextMap,
};
use bevy::ecs::{
entity::Entity,
system::{Commands, Local, Query, Res},
};
use bevy::platform::collections::HashMap;
use bevy::window::CursorIcon;
/// Reads Egui output.
#[allow(clippy::too_many_arguments)]
pub fn process_output_system(
mut commands: Commands,
mut context_query: Query<(
Entity,
&mut EguiContext,
&mut EguiFullOutput,
&mut EguiRenderOutput,
&mut EguiOutput,
&EguiContextSettings,
)>,
#[cfg(all(feature = "manage_clipboard", not(target_os = "android")))]
mut egui_clipboard: bevy::ecs::system::ResMut<crate::EguiClipboard>,
// NOTE: RequestRedraw not used since we own winit and run unbounded (continuous redraws)
// mut request_redraw_writer: MessageWriter<RequestRedraw>,
mut last_cursor_icon: Local<HashMap<Entity, egui::CursorIcon>>,
egui_global_settings: Res<EguiGlobalSettings>,
window_to_egui_context_map: Res<WindowToEguiContextMap>,
) {
let mut should_request_redraw = false;
for (entity, mut context, mut full_output, mut render_output, mut egui_output, settings) in
context_query.iter_mut()
{
let ctx = context.get_mut();
let Some(full_output) = full_output.0.take() else {
bevy::log::error!(
"bevy_egui pass output has not been prepared (if EguiSettings::run_manually is set to true, make sure to call egui::Context::run or egui::Context::begin_pass and egui::Context::end_pass)"
);
continue;
};
let egui::FullOutput {
platform_output,
shapes,
textures_delta,
pixels_per_point,
viewport_output: _,
} = full_output;
let paint_jobs = ctx.tessellate(shapes, pixels_per_point);
render_output.paint_jobs = paint_jobs;
render_output.textures_delta = textures_delta;
egui_output.platform_output = platform_output;
for command in &egui_output.platform_output.commands {
match command {
egui::OutputCommand::CopyText(_text) =>
{
#[cfg(all(feature = "manage_clipboard", not(target_os = "android")))]
if !_text.is_empty() {
egui_clipboard.set_text(_text);
}
}
egui::OutputCommand::CopyImage(_image) => {
#[cfg(all(feature = "manage_clipboard", not(target_os = "android")))]
egui_clipboard.set_image(_image);
}
egui::OutputCommand::OpenUrl(_url) => {
#[cfg(feature = "open_url")]
{
let egui::output::OpenUrl { url, new_tab } = _url;
let target = if *new_tab {
"_blank"
} else {
settings
.default_open_url_target
.as_deref()
.unwrap_or("_self")
};
if let Err(err) = webbrowser::open_browser_with_options(
webbrowser::Browser::Default,
url,
webbrowser::BrowserOptions::new().with_target_hint(target),
) {
bevy::log::error!("Failed to open '{}': {:?}", url, err);
}
}
}
}
}
if egui_global_settings.enable_cursor_icon_updates
&& settings.enable_cursor_icon_updates
&& let Some(window_entity) = window_to_egui_context_map.context_to_window.get(&entity)
{
let last_cursor_icon = last_cursor_icon.entry(entity).or_default();
if *last_cursor_icon != egui_output.platform_output.cursor_icon {
commands
.entity(*window_entity)
.try_insert(CursorIcon::System(
helpers::egui_to_winit_cursor_icon(egui_output.platform_output.cursor_icon)
.unwrap_or(bevy::window::SystemCursorIcon::Default),
));
*last_cursor_icon = egui_output.platform_output.cursor_icon;
}
}
let needs_repaint = !render_output.is_empty();
should_request_redraw |= ctx.has_requested_repaint() && needs_repaint;
}
// NOTE: RequestRedraw not needed - we own winit and run unbounded (continuous redraws)
// if should_request_redraw {
// request_redraw_writer.write(RequestRedraw);
// }
}

View File

@@ -0,0 +1,81 @@
struct Transform {
scale: vec2<f32>,
translation: vec2<f32>,
}
struct VertexInput {
@location(0) position: vec2<f32>,
@location(1) uv: vec2<f32>,
@location(2) color: vec4<f32>,
}
struct VertexOutput {
@builtin(position) position: vec4<f32>,
@location(0) color: vec4<f32>,
@location(1) uv: vec2<f32>,
}
@group(0) @binding(0) var<uniform> transform: Transform;
#ifdef BINDLESS
@group(1) @binding(0) var image_texture: binding_array<texture_2d<f32>>;
@group(1) @binding(1) var image_sampler: binding_array<sampler>;
// Fix for DX12 backend in wgpu which appears to only support struct push constants
// wgpu::backend::wgpu_core: Shader translation error for stage ShaderStages(FRAGMENT): HLSL: Unimplemented("push-constant 'offset' has non-struct type; tracked by: https://github.com/gfx-rs/wgpu/issues/5683")
struct BindlessOffset {
offset: u32,
};
var<push_constant> offset: BindlessOffset;
#else //BINDLESS
@group(1) @binding(0) var image_texture: texture_2d<f32>;
@group(1) @binding(1) var image_sampler: sampler;
#endif // BINDLESS
// 0-1 linear from 0-1 sRGB gamma.
fn linear_from_gamma_rgb(srgb: vec3<f32>) -> vec3<f32> {
let cutoff = srgb < vec3<f32>(0.04045);
let lower = srgb / vec3<f32>(12.92);
let higher = pow((srgb + vec3<f32>(0.055)) / vec3<f32>(1.055), vec3<f32>(2.4));
return select(higher, lower, cutoff);
}
// 0-1 sRGB gamma from 0-1 linear.
fn gamma_from_linear_rgb(rgb: vec3<f32>) -> vec3<f32> {
let cutoff = rgb < vec3<f32>(0.0031308);
let lower = rgb * vec3<f32>(12.92);
let higher = vec3<f32>(1.055) * pow(rgb, vec3<f32>(1.0 / 2.4)) - vec3<f32>(0.055);
return select(higher, lower, cutoff);
}
// 0-1 sRGBA gamma from 0-1 linear.
fn gamma_from_linear_rgba(linear_rgba: vec4<f32>) -> vec4<f32> {
return vec4<f32>(gamma_from_linear_rgb(linear_rgba.rgb), linear_rgba.a);
}
@vertex
fn vs_main(in: VertexInput) -> VertexOutput {
let position = in.position * transform.scale + transform.translation;
// Not sure why Egui does vertex color interpolation in sRGB but here we do it the same way as well.
return VertexOutput(vec4<f32>(position, 0.0, 1.0), in.color, in.uv);
}
@fragment
fn fs_main(in: VertexOutput) -> @location(0) vec4<f32> {
#ifdef BINDLESS
let image_texture = image_texture[offset.offset];
let image_sampler = image_sampler[offset.offset];
#endif
let texture_color_linear = textureSample(image_texture, image_sampler, in.uv);
// We un-premultiply Egui-managed textures on CPU, because Bevy doesn't premultiply it's own images, so here we pre-multiply everything.
let texture_color_linear_premultiplied = vec4<f32>(texture_color_linear.rgb * texture_color_linear.a, texture_color_linear.a);
let texture_color_gamma_premultiplied = gamma_from_linear_rgba(texture_color_linear_premultiplied);
// Quoting the Egui's glsl shader:
// "We multiply the colors in gamma space, because that's the only way to get text to look right."
let color_gamma = texture_color_gamma_premultiplied * in.color;
return vec4<f32>(linear_from_gamma_rgb(color_gamma.rgb), color_gamma.a);
}

View File

@@ -0,0 +1,503 @@
// Copyright (c) 2021 Vladyslav Batyrenko
// SPDX-License-Identifier: MIT
//
// This code is vendored from bevy_egui: https://github.com/vladbat00/bevy_egui
// Original author: Vladyslav Batyrenko <vladyslav.batyrenko@gmail.com>
pub use render_pass::*;
/// Defines Egui node graph.
pub mod graph {
use bevy::render::render_graph::{RenderLabel, RenderSubGraph};
/// Egui subgraph (is run by [`super::RunEguiSubgraphOnEguiViewNode`]).
#[derive(Debug, Hash, PartialEq, Eq, Clone, RenderSubGraph)]
pub struct SubGraphEgui;
/// Egui node defining the Egui rendering pass.
#[derive(Debug, Hash, PartialEq, Eq, Clone, RenderLabel)]
pub enum NodeEgui {
/// Egui rendering pass.
EguiPass,
}
}
use crate::debug_ui::{
EguiContextSettings, EguiRenderOutput, RenderComputedScaleFactor,
render::graph::{NodeEgui, SubGraphEgui},
};
use bevy::app::SubApp;
use bevy::asset::{Handle, RenderAssetUsages, uuid_handle};
use bevy::camera::Camera;
use bevy::ecs::{
component::Component,
entity::Entity,
query::Has,
resource::Resource,
system::{Commands, Local, ResMut},
world::{FromWorld, World},
};
use bevy::image::{
BevyDefault, Image, ImageAddressMode, ImageFilterMode, ImageSampler, ImageSamplerDescriptor,
};
use bevy::math::{Mat4, UVec4};
use bevy::mesh::VertexBufferLayout;
use bevy::platform::collections::HashSet;
use bevy::render::{
MainWorld,
render_graph::{Node, NodeRunError, RenderGraph, RenderGraphContext},
render_phase::TrackedRenderPass,
render_resource::{
BindGroupLayout, BindGroupLayoutEntries, FragmentState, RenderPipelineDescriptor,
SpecializedRenderPipeline, VertexState,
binding_types::{sampler, texture_2d, uniform_buffer},
},
renderer::{RenderContext, RenderDevice},
sync_world::{RenderEntity, TemporaryRenderEntity},
view::{ExtractedView, Hdr, RetainedViewEntity, ViewTarget},
};
use bevy::shader::{Shader, ShaderDefVal};
use egui::{TextureFilter, TextureOptions};
use std::num::NonZero;
use wgpu_types::{
BlendState, ColorTargetState, ColorWrites, Extent3d, MultisampleState, PrimitiveState,
PushConstantRange, SamplerBindingType, ShaderStages, TextureDimension, TextureFormat,
TextureSampleType, VertexFormat, VertexStepMode,
};
mod render_pass;
/// Plugin systems for the render app.
pub mod systems;
use systems::{EguiTextureId, EguiTransform};
/// A render-world component that lives on the main render target view and
/// specifies the corresponding Egui view.
///
/// For example, if Egui is being rendered to a 3D camera, this component lives on
/// the 3D camera and contains the entity corresponding to the Egui view.
///
/// Entity id of the temporary render entity with the corresponding extracted Egui view.
#[derive(Component, Debug)]
pub struct EguiCameraView(pub Entity);
/// A render-world component that lives on the Egui view and specifies the
/// corresponding main render target view.
///
/// For example, if Egui is being rendered to a 3D camera, this component
/// lives on the Egui view and contains the entity corresponding to the 3D camera.
///
/// This is the inverse of [`EguiCameraView`].
#[derive(Component, Debug)]
pub struct EguiViewTarget(pub Entity);
/// Adds and returns an Egui subgraph.
pub fn get_egui_graph(render_app: &mut SubApp) -> RenderGraph {
let pass_node = EguiPassNode::new(render_app.world_mut());
let mut graph = RenderGraph::default();
graph.add_node(NodeEgui::EguiPass, pass_node);
graph
}
/// A [`Node`] that executes the Egui rendering subgraph on the Egui view.
pub struct RunEguiSubgraphOnEguiViewNode;
impl Node for RunEguiSubgraphOnEguiViewNode {
fn run<'w>(
&self,
graph: &mut RenderGraphContext,
_: &mut RenderContext<'w>,
world: &'w World,
) -> Result<(), NodeRunError> {
// Fetch the UI view.
let Some(mut render_views) = world.try_query::<&EguiCameraView>() else {
return Ok(());
};
let Ok(default_camera_view) = render_views.get(world, graph.view_entity()) else {
return Ok(());
};
// Run the subgraph on the Egui view.
graph.run_sub_graph(SubGraphEgui, vec![], Some(default_camera_view.0))?;
Ok(())
}
}
/// Extracts all Egui contexts associated with a camera into the render world.
pub fn extract_egui_camera_view_system(
mut commands: Commands,
mut world: ResMut<MainWorld>,
mut live_entities: Local<HashSet<RetainedViewEntity>>,
) {
live_entities.clear();
let mut q = world.query::<(
Entity,
RenderEntity,
&Camera,
&mut EguiRenderOutput,
&EguiContextSettings,
Has<Hdr>,
)>();
for (main_entity, render_entity, camera, mut egui_render_output, settings, hdr) in
&mut q.iter_mut(&mut world)
{
// Move Egui shapes and textures out of the main world into the render one.
let egui_render_output = std::mem::take(egui_render_output.as_mut());
// Ignore inactive cameras.
if !camera.is_active {
commands
.get_entity(render_entity)
.expect("Camera entity wasn't synced.")
.remove::<EguiCameraView>();
continue;
}
const UI_CAMERA_FAR: f32 = 1000.0;
const EGUI_CAMERA_SUBVIEW: u32 = 2095931312;
const UI_CAMERA_TRANSFORM_OFFSET: f32 = -0.1;
if let Some(physical_viewport_rect) = camera.physical_viewport_rect() {
// Use a projection matrix with the origin in the top left instead of the bottom left that comes with OrthographicProjection.
let projection_matrix = Mat4::orthographic_rh(
0.0,
physical_viewport_rect.width() as f32,
physical_viewport_rect.height() as f32,
0.0,
0.0,
UI_CAMERA_FAR,
);
// We use `EGUI_CAMERA_SUBVIEW` here so as not to conflict with the
// main 3D or 2D camera or UI view, which will have subview index 0 or 1.
let retained_view_entity =
RetainedViewEntity::new(main_entity.into(), None, EGUI_CAMERA_SUBVIEW);
// Creates the UI view.
let ui_camera_view = commands
.spawn((
ExtractedView {
retained_view_entity,
clip_from_view: projection_matrix,
world_from_view: bevy::transform::components::GlobalTransform::from_xyz(
0.0,
0.0,
UI_CAMERA_FAR + UI_CAMERA_TRANSFORM_OFFSET,
),
clip_from_world: None,
hdr,
viewport: UVec4::from((
physical_viewport_rect.min,
physical_viewport_rect.size(),
)),
color_grading: Default::default(),
},
// Link to the main camera view.
EguiViewTarget(render_entity),
egui_render_output,
RenderComputedScaleFactor {
scale_factor: settings.scale_factor
* camera.target_scaling_factor().unwrap_or(1.0),
},
TemporaryRenderEntity,
))
.id();
let mut entity_commands = commands
.get_entity(render_entity)
.expect("Camera entity wasn't synced.");
// Link from the main 2D/3D camera view to the UI view.
entity_commands.insert(EguiCameraView(ui_camera_view));
live_entities.insert(retained_view_entity);
}
}
}
/// Egui shader.
pub const EGUI_SHADER_HANDLE: Handle<Shader> = uuid_handle!("05a4d7a0-4f24-4d7f-b606-3f399074261f");
/// Egui render settings.
#[derive(Resource)]
pub struct EguiRenderSettings {
/// See [`super::EguiPlugin`] for setting description.
pub bindless_mode_array_size: Option<NonZero<u32>>,
}
/// Egui render pipeline.
#[derive(Resource)]
pub struct EguiPipeline {
/// Transform bind group layout.
pub transform_bind_group_layout: BindGroupLayout,
/// Texture bind group layout.
pub texture_bind_group_layout: BindGroupLayout,
/// Is bindless rendering mode enabled
/// and how many textures can be rendered in one bind group.
pub bindless: Option<NonZero<u32>>,
}
impl FromWorld for EguiPipeline {
fn from_world(render_world: &mut World) -> Self {
let render_device = render_world.resource::<RenderDevice>();
let settings = render_world.resource::<EguiRenderSettings>();
let features = render_device.features();
// TODO: In wgpu 0.26
// Check: max_binding_array_elements_per_shader_stage and
// max_binding_array_sampler_elements-per_shader_stage
// to be sure that device support provided limits
let bindless = if features.contains(wgpu_types::Features::TEXTURE_BINDING_ARRAY)
&& features.contains(wgpu_types::Features::PUSH_CONSTANTS)
{
settings.bindless_mode_array_size
} else {
None
};
let transform_bind_group_layout = render_device.create_bind_group_layout(
"egui_transform_layout",
&BindGroupLayoutEntries::single(
ShaderStages::VERTEX,
uniform_buffer::<EguiTransform>(true),
),
);
let texture_bind_group_layout = if let Some(bindless) = bindless {
render_device.create_bind_group_layout(
"egui_texture_layout",
&BindGroupLayoutEntries::sequential(
ShaderStages::FRAGMENT,
(
texture_2d(TextureSampleType::Float { filterable: true }).count(bindless),
sampler(SamplerBindingType::Filtering).count(bindless),
),
),
)
} else {
render_device.create_bind_group_layout(
"egui_texture_layout",
&BindGroupLayoutEntries::sequential(
ShaderStages::FRAGMENT,
(
texture_2d(TextureSampleType::Float { filterable: true }),
sampler(SamplerBindingType::Filtering),
),
),
)
};
EguiPipeline {
transform_bind_group_layout,
texture_bind_group_layout,
bindless,
}
}
}
/// Key for specialized pipeline.
#[derive(PartialEq, Eq, Hash, Clone, Copy)]
pub struct EguiPipelineKey {
/// Equals `true` for cameras that have the [`Hdr`] component.
pub hdr: bool,
}
impl SpecializedRenderPipeline for EguiPipeline {
type Key = EguiPipelineKey;
fn specialize(&self, key: Self::Key) -> RenderPipelineDescriptor {
let mut shader_defs = Vec::new();
let mut push_constant_ranges = Vec::new();
if let Some(bindless) = self.bindless {
shader_defs.push(ShaderDefVal::UInt("BINDLESS".into(), u32::from(bindless)));
push_constant_ranges.push(PushConstantRange {
stages: ShaderStages::FRAGMENT,
range: 0..4,
});
}
RenderPipelineDescriptor {
label: Some("egui_pipeline".into()),
layout: vec![
self.transform_bind_group_layout.clone(),
self.texture_bind_group_layout.clone(),
],
vertex: VertexState {
shader: EGUI_SHADER_HANDLE,
shader_defs: shader_defs.clone(),
entry_point: Some("vs_main".into()),
buffers: vec![VertexBufferLayout::from_vertex_formats(
VertexStepMode::Vertex,
[
VertexFormat::Float32x2, // position
VertexFormat::Float32x2, // UV
VertexFormat::Unorm8x4, // color (sRGB)
],
)],
},
fragment: Some(FragmentState {
shader: EGUI_SHADER_HANDLE,
shader_defs,
entry_point: Some("fs_main".into()),
targets: vec![Some(ColorTargetState {
format: if key.hdr {
ViewTarget::TEXTURE_FORMAT_HDR
} else {
TextureFormat::bevy_default()
},
blend: Some(BlendState::PREMULTIPLIED_ALPHA_BLENDING),
write_mask: ColorWrites::ALL,
})],
}),
primitive: PrimitiveState::default(),
depth_stencil: None,
multisample: MultisampleState::default(),
push_constant_ranges,
zero_initialize_workgroup_memory: false,
}
}
}
pub(crate) struct DrawCommand {
pub(crate) clip_rect: egui::Rect,
pub(crate) primitive: DrawPrimitive,
}
pub(crate) enum DrawPrimitive {
Egui(EguiDraw),
PaintCallback(PaintCallbackDraw),
}
pub(crate) struct PaintCallbackDraw {
pub(crate) callback: std::sync::Arc<EguiBevyPaintCallback>,
pub(crate) rect: egui::Rect,
}
pub(crate) struct EguiDraw {
pub(crate) vertices_count: usize,
pub(crate) egui_texture: EguiTextureId,
}
pub(crate) fn as_color_image(image: &egui::ImageData) -> egui::ColorImage {
match image {
egui::ImageData::Color(image) => (**image).clone(),
}
}
pub(crate) fn color_image_as_bevy_image(
egui_image: &egui::ColorImage,
sampler_descriptor: ImageSampler,
) -> Image {
let pixels = egui_image
.pixels
.iter()
// We unmultiply Egui textures to premultiply them later in the fragment shader.
// As user textures loaded as Bevy assets are not premultiplied (and there seems to be no
// convenient way to convert them to premultiplied ones), we do this with Egui ones.
.flat_map(|color| color.to_srgba_unmultiplied())
.collect();
Image {
sampler: sampler_descriptor,
..Image::new(
Extent3d {
width: egui_image.width() as u32,
height: egui_image.height() as u32,
depth_or_array_layers: 1,
},
TextureDimension::D2,
pixels,
TextureFormat::Rgba8UnormSrgb,
RenderAssetUsages::MAIN_WORLD | RenderAssetUsages::RENDER_WORLD,
)
}
}
pub(crate) fn texture_options_as_sampler_descriptor(
options: &TextureOptions,
) -> ImageSamplerDescriptor {
fn convert_filter(filter: &TextureFilter) -> ImageFilterMode {
match filter {
egui::TextureFilter::Nearest => ImageFilterMode::Nearest,
egui::TextureFilter::Linear => ImageFilterMode::Linear,
}
}
let address_mode = match options.wrap_mode {
egui::TextureWrapMode::ClampToEdge => ImageAddressMode::ClampToEdge,
egui::TextureWrapMode::Repeat => ImageAddressMode::Repeat,
egui::TextureWrapMode::MirroredRepeat => ImageAddressMode::MirrorRepeat,
};
ImageSamplerDescriptor {
mag_filter: convert_filter(&options.magnification),
min_filter: convert_filter(&options.minification),
address_mode_u: address_mode,
address_mode_v: address_mode,
..Default::default()
}
}
/// Callback to execute custom 'wgpu' rendering inside [`EguiPassNode`] render graph node.
///
/// Rendering can be implemented using for example:
/// * native wgpu rendering libraries,
/// * or with [`bevy::render::render_phase`] approach.
pub struct EguiBevyPaintCallback(Box<dyn EguiBevyPaintCallbackImpl>);
impl EguiBevyPaintCallback {
/// Creates a new [`egui::epaint::PaintCallback`] from a callback trait instance.
pub fn new_paint_callback<T>(rect: egui::Rect, callback: T) -> egui::epaint::PaintCallback
where
T: EguiBevyPaintCallbackImpl + 'static,
{
let callback = Self(Box::new(callback));
egui::epaint::PaintCallback {
rect,
callback: std::sync::Arc::new(callback),
}
}
pub(crate) fn cb(&self) -> &dyn EguiBevyPaintCallbackImpl {
self.0.as_ref()
}
}
/// Callback that executes custom rendering logic
pub trait EguiBevyPaintCallbackImpl: Send + Sync {
/// Paint callback will be rendered in near future, all data must be finalized for render step
fn update(
&self,
info: egui::PaintCallbackInfo,
render_entity: RenderEntity,
pipeline_key: EguiPipelineKey,
world: &mut World,
);
/// Paint callback call before render step
///
///
/// Can be used to implement custom render passes
/// or to submit command buffers for execution before egui render pass
fn prepare_render<'w>(
&self,
info: egui::PaintCallbackInfo,
render_context: &mut RenderContext<'w>,
render_entity: RenderEntity,
pipeline_key: EguiPipelineKey,
world: &'w World,
) {
let _ = (info, render_context, render_entity, pipeline_key, world);
// Do nothing by default
}
/// Paint callback render step
///
/// Native wgpu RenderPass can be retrieved from [`TrackedRenderPass`] by calling
/// [`TrackedRenderPass::wgpu_pass`].
fn render<'pass>(
&self,
info: egui::PaintCallbackInfo,
render_pass: &mut TrackedRenderPass<'pass>,
render_entity: RenderEntity,
pipeline_key: EguiPipelineKey,
world: &'pass World,
);
}

View File

@@ -0,0 +1,275 @@
// Copyright (c) 2021 Vladyslav Batyrenko
// SPDX-License-Identifier: MIT
//
// This code is vendored from bevy_egui: https://github.com/vladbat00/bevy_egui
// Original author: Vladyslav Batyrenko <vladyslav.batyrenko@gmail.com>
use crate::debug_ui::render::{
DrawPrimitive, EguiViewTarget,
systems::{EguiPipelines, EguiRenderData, EguiTextureBindGroups, EguiTransforms},
};
use bevy::camera::Viewport;
use bevy::ecs::{
query::QueryState,
world::{Mut, World},
};
use bevy::math::{URect, UVec2};
use bevy::render::{
camera::ExtractedCamera,
render_graph::{Node, NodeRunError, RenderGraphContext},
render_resource::{PipelineCache, RenderPassDescriptor},
renderer::RenderContext,
sync_world::RenderEntity,
view::{ExtractedView, ViewTarget},
};
use wgpu_types::{IndexFormat, ShaderStages};
/// Egui pass node.
pub struct EguiPassNode {
egui_view_query: QueryState<(&'static ExtractedView, &'static EguiViewTarget)>,
egui_view_target_query: QueryState<(&'static ViewTarget, &'static ExtractedCamera)>,
}
impl EguiPassNode {
/// Creates an Egui pass node.
pub fn new(world: &mut World) -> Self {
Self {
egui_view_query: world.query_filtered(),
egui_view_target_query: world.query(),
}
}
}
impl Node for EguiPassNode {
fn update(&mut self, world: &mut World) {
self.egui_view_query.update_archetypes(world);
self.egui_view_target_query.update_archetypes(world);
world.resource_scope(|world, mut render_data: Mut<EguiRenderData>| {
for (_main_entity, data) in &mut render_data.0 {
let Some(key) = data.key else {
bevy::log::warn!("Failed to retrieve egui node data!");
return;
};
for (clip_rect, command) in data.postponed_updates.drain(..) {
let info = egui::PaintCallbackInfo {
viewport: command.rect,
clip_rect,
pixels_per_point: data.pixels_per_point,
screen_size_px: data.target_size.to_array(),
};
command
.callback
.cb()
.update(info, data.render_entity, key, world);
}
}
});
}
fn run<'w>(
&self,
graph: &mut RenderGraphContext,
render_context: &mut RenderContext<'w>,
world: &'w World,
) -> Result<(), NodeRunError> {
let egui_pipelines = &world.resource::<EguiPipelines>().0;
let pipeline_cache = world.resource::<PipelineCache>();
let render_data = world.resource::<EguiRenderData>();
// Extract the UI view.
let input_view_entity = graph.view_entity();
// Query the UI view components.
let Ok((view, view_target)) = self.egui_view_query.get_manual(world, input_view_entity)
else {
return Ok(());
};
let Ok((target, camera)) = self.egui_view_target_query.get_manual(world, view_target.0)
else {
return Ok(());
};
let Some(data) = render_data.0.get(&view.retained_view_entity.main_entity) else {
return Ok(());
};
let mut render_pass = render_context.begin_tracked_render_pass(RenderPassDescriptor {
label: Some("egui_pass"),
color_attachments: &[Some(target.get_unsampled_color_attachment())],
depth_stencil_attachment: None,
timestamp_writes: None,
occlusion_query_set: None,
});
let Some(viewport) = camera.viewport.clone().or_else(|| {
camera.physical_viewport_size.map(|size| Viewport {
physical_position: UVec2::ZERO,
physical_size: size,
..Default::default()
})
}) else {
return Ok(());
};
render_pass.set_camera_viewport(&Viewport {
physical_position: UVec2::ZERO,
physical_size: camera.physical_target_size.unwrap(),
..Default::default()
});
let mut requires_reset = true;
let mut last_scissor_rect = None;
let mut last_bindless_offset = None;
let pipeline_id = egui_pipelines
.get(&view.retained_view_entity.main_entity)
.expect("Expected a queued pipeline");
let Some(pipeline) = pipeline_cache.get_render_pipeline(*pipeline_id) else {
return Ok(());
};
let bind_groups = world.resource::<EguiTextureBindGroups>();
let egui_transforms = world.resource::<EguiTransforms>();
let transform_buffer_offset =
egui_transforms.offsets[&view.retained_view_entity.main_entity];
let transform_buffer_bind_group = &egui_transforms
.bind_group
.as_ref()
.expect("Expected a prepared bind group")
.1;
let (vertex_buffer, index_buffer) = match (&data.vertex_buffer, &data.index_buffer) {
(Some(vertex), Some(index)) => (vertex, index),
_ => {
return Ok(());
}
};
let mut index_offset: u32 = 0;
for draw_command in &data.draw_commands {
if requires_reset {
render_pass.set_render_pipeline(pipeline);
render_pass.set_bind_group(
0,
transform_buffer_bind_group,
&[transform_buffer_offset],
);
render_pass.set_camera_viewport(&Viewport {
physical_position: UVec2::ZERO,
physical_size: camera.physical_target_size.unwrap(),
..Default::default()
});
requires_reset = false;
last_bindless_offset = None;
last_scissor_rect = None;
}
let clip_urect = URect {
min: UVec2 {
x: (draw_command.clip_rect.min.x * data.pixels_per_point).round() as u32,
y: (draw_command.clip_rect.min.y * data.pixels_per_point).round() as u32,
},
max: UVec2 {
x: (draw_command.clip_rect.max.x * data.pixels_per_point).round() as u32,
y: (draw_command.clip_rect.max.y * data.pixels_per_point).round() as u32,
},
};
let scissor_rect = clip_urect.intersect(URect {
min: viewport.physical_position,
max: viewport.physical_position + viewport.physical_size,
});
if scissor_rect.is_empty() {
continue;
}
if Some(scissor_rect) != last_scissor_rect {
last_scissor_rect = Some(scissor_rect);
// Bevy TrackedRenderPass doesn't track set_scissor_rect calls,
// so set_scissor_rect is updated only when it is needed.
render_pass.set_scissor_rect(
scissor_rect.min.x,
scissor_rect.min.y,
scissor_rect.width(),
scissor_rect.height(),
);
}
let Some(pipeline_key) = data.key else {
continue;
};
match &draw_command.primitive {
DrawPrimitive::Egui(command) => {
let Some((texture_bind_group, bindless_offset)) =
bind_groups.get(&command.egui_texture)
else {
index_offset += command.vertices_count as u32;
continue;
};
render_pass.set_bind_group(1, texture_bind_group, &[]);
render_pass.set_vertex_buffer(0, vertex_buffer.slice(..));
render_pass.set_index_buffer(index_buffer.slice(..), 0, IndexFormat::Uint32);
if let Some(bindless_offset) = bindless_offset
&& last_bindless_offset != Some(bindless_offset)
{
last_bindless_offset = Some(bindless_offset);
// Use push constant to cheaply provide which texture to use inside
// binding array. This is used to avoid costly set_bind_group operations
// when frequent switching between textures is being done
render_pass.set_push_constants(
ShaderStages::FRAGMENT,
0,
bytemuck::bytes_of(bindless_offset),
);
}
// NOTE: vertices_count is actually the indices count (poorly named in EguiDraw struct)
render_pass.draw_indexed(
index_offset..(index_offset + command.vertices_count as u32),
0,
0..1,
);
index_offset += command.vertices_count as u32;
}
DrawPrimitive::PaintCallback(command) => {
let info = egui::PaintCallbackInfo {
viewport: command.rect,
clip_rect: draw_command.clip_rect,
pixels_per_point: data.pixels_per_point,
screen_size_px: [viewport.physical_size.x, viewport.physical_size.y],
};
let viewport = info.viewport_in_pixels();
if viewport.width_px > 0 && viewport.height_px > 0 {
requires_reset = true;
render_pass.set_viewport(
viewport.left_px as f32,
viewport.top_px as f32,
viewport.width_px as f32,
viewport.height_px as f32,
0.,
1.,
);
command.callback.cb().render(
info,
&mut render_pass,
RenderEntity::from(input_view_entity),
pipeline_key,
world,
);
}
}
}
}
Ok(())
}
}

View File

@@ -0,0 +1,507 @@
// Copyright (c) 2021 Vladyslav Batyrenko
// SPDX-License-Identifier: MIT
//
// This code is vendored from bevy_egui: https://github.com/vladbat00/bevy_egui
// Original author: Vladyslav Batyrenko <vladyslav.batyrenko@gmail.com>
use crate::debug_ui::{
EguiContextSettings, EguiManagedTextures, EguiRenderOutput, EguiUserTextures,
RenderComputedScaleFactor,
render::{
DrawCommand, DrawPrimitive, EguiBevyPaintCallback, EguiCameraView, EguiDraw, EguiPipeline,
EguiPipelineKey, EguiViewTarget, PaintCallbackDraw,
},
};
use bevy::asset::prelude::*;
use bevy::prelude::{Deref, DerefMut};
use bevy::ecs::{prelude::*, system::SystemParam};
use bevy::image::Image;
use bevy::log;
use bevy::math::{URect, UVec2, Vec2};
use bevy::platform::collections::HashMap;
use bevy::render::{
camera::ExtractedCamera,
extract_resource::ExtractResource,
render_asset::RenderAssets,
render_resource::{
BindGroup, BindGroupEntry, BindingResource, Buffer, BufferDescriptor, BufferId,
CachedRenderPipelineId, DynamicUniformBuffer, PipelineCache, SpecializedRenderPipelines,
},
renderer::{RenderDevice, RenderQueue},
sync_world::{MainEntity, RenderEntity},
texture::GpuImage,
view::ExtractedView,
};
use bytemuck::cast_slice;
use itertools::Itertools;
use wgpu_types::{BufferAddress, BufferUsages};
/// Extracted Egui settings.
#[derive(Resource, Deref, DerefMut, Default)]
pub struct ExtractedEguiSettings(pub EguiContextSettings);
/// The extracted version of [`EguiManagedTextures`].
#[derive(Debug, Resource)]
pub struct ExtractedEguiManagedTextures(pub HashMap<(Entity, u64), Handle<Image>>);
impl ExtractResource for ExtractedEguiManagedTextures {
type Source = EguiManagedTextures;
fn extract_resource(source: &Self::Source) -> Self {
Self(source.iter().map(|(k, v)| (*k, v.handle.clone())).collect())
}
}
/// Corresponds to Egui's [`egui::TextureId`].
#[derive(Debug, PartialEq, Eq, Hash)]
pub enum EguiTextureId {
/// Textures allocated via Egui.
Managed(MainEntity, u64),
/// Textures allocated via Bevy.
User(u64),
}
/// Extracted Egui textures.
#[derive(SystemParam)]
pub struct ExtractedEguiTextures<'w> {
/// Maps Egui managed texture ids to Bevy image handles.
pub egui_textures: Res<'w, ExtractedEguiManagedTextures>,
/// Maps Bevy managed texture handles to Egui user texture ids.
pub user_textures: Res<'w, EguiUserTextures>,
}
impl ExtractedEguiTextures<'_> {
/// Returns an iterator over all textures (both Egui and Bevy managed).
pub fn handles(&self) -> impl Iterator<Item = (EguiTextureId, AssetId<Image>)> + '_ {
self.egui_textures
.0
.iter()
.map(|(&(window, texture_id), managed_tex)| {
(
EguiTextureId::Managed(MainEntity::from(window), texture_id),
managed_tex.id(),
)
})
.chain(
self.user_textures
.textures
.iter()
.map(|(handle, (_, id))| (EguiTextureId::User(*id), *handle)),
)
}
}
/// Describes the transform buffer.
#[derive(Resource, Default)]
pub struct EguiTransforms {
/// Uniform buffer.
pub buffer: DynamicUniformBuffer<EguiTransform>,
/// The Entity is from the main world.
pub offsets: HashMap<MainEntity, u32>,
/// Bind group.
pub bind_group: Option<(BufferId, BindGroup)>,
}
/// Scale and translation for rendering Egui shapes. Is needed to transform Egui coordinates from
/// the screen space with the center at (0, 0) to the normalised viewport space.
#[derive(bevy::render::render_resource::ShaderType, Default)]
pub struct EguiTransform {
/// Is affected by render target size, scale factor and [`EguiContextSettings::scale_factor`].
pub scale: Vec2,
/// Normally equals `[-1.0, 1.0]`.
pub translation: Vec2,
}
impl EguiTransform {
/// Calculates the transform from target size and target scale factor multiplied by [`EguiContextSettings::scale_factor`].
pub fn new(target_size: Vec2, scale_factor: f32) -> Self {
EguiTransform {
scale: Vec2::new(
2.0 / (target_size.x / scale_factor),
-2.0 / (target_size.y / scale_factor),
),
translation: Vec2::new(-1.0, 1.0),
}
}
}
/// Prepares Egui transforms.
pub fn prepare_egui_transforms_system(
mut egui_transforms: ResMut<EguiTransforms>,
views: Query<&RenderComputedScaleFactor>,
render_targets: Query<(&ExtractedView, &ExtractedCamera, &EguiCameraView)>,
render_device: Res<RenderDevice>,
render_queue: Res<RenderQueue>,
egui_pipeline: Res<EguiPipeline>,
) -> Result {
egui_transforms.buffer.clear();
egui_transforms.offsets.clear();
for (view, camera, egui_camera_view) in render_targets.iter() {
let Some(target_size) = camera.physical_target_size else {
continue;
};
let &RenderComputedScaleFactor { scale_factor } = views.get(egui_camera_view.0)?;
let transform = EguiTransform::new(target_size.as_vec2(), scale_factor);
let offset = egui_transforms
.buffer
.push(&transform);
egui_transforms
.offsets
.insert(view.retained_view_entity.main_entity, offset);
}
egui_transforms
.buffer
.write_buffer(&render_device, &render_queue);
if let Some(buffer) = egui_transforms.buffer.buffer() {
match egui_transforms.bind_group {
Some((id, _)) if buffer.id() == id => {}
_ => {
let transform_bind_group = render_device.create_bind_group(
Some("egui transform bind group"),
&egui_pipeline.transform_bind_group_layout,
&[BindGroupEntry {
binding: 0,
resource: egui_transforms.buffer.binding().unwrap(),
}],
);
egui_transforms.bind_group = Some((buffer.id(), transform_bind_group));
}
};
}
Ok(())
}
/// Maps Egui textures to bind groups.
#[derive(Resource, Deref, DerefMut, Default)]
pub struct EguiTextureBindGroups(pub HashMap<EguiTextureId, (BindGroup, Option<u32>)>);
/// Queues bind groups.
pub fn queue_bind_groups_system(
mut commands: Commands,
egui_textures: ExtractedEguiTextures,
render_device: Res<RenderDevice>,
gpu_images: Res<RenderAssets<GpuImage>>,
egui_pipeline: Res<EguiPipeline>,
) {
let egui_texture_iterator = egui_textures.handles().filter_map(|(texture, handle_id)| {
let gpu_image = gpu_images.get(handle_id)?;
Some((texture, gpu_image))
});
let bind_groups = if let Some(bindless) = egui_pipeline.bindless {
let bindless = u32::from(bindless) as usize;
let mut bind_groups = HashMap::new();
let mut texture_array = Vec::new();
let mut sampler_array = Vec::new();
let mut egui_texture_ids = Vec::new();
for textures in egui_texture_iterator.chunks(bindless).into_iter() {
texture_array.clear();
sampler_array.clear();
egui_texture_ids.clear();
for (egui_texture_id, gpu_image) in textures {
egui_texture_ids.push(egui_texture_id);
// Dereference needed to convert from bevy to wgpu type
texture_array.push(&*gpu_image.texture_view);
sampler_array.push(&*gpu_image.sampler);
}
let bind_group = render_device.create_bind_group(
None,
&egui_pipeline.texture_bind_group_layout,
&[
BindGroupEntry {
binding: 0,
resource: BindingResource::TextureViewArray(texture_array.as_slice()),
},
BindGroupEntry {
binding: 1,
resource: BindingResource::SamplerArray(sampler_array.as_slice()),
},
],
);
// Simply assign bind group to egui texture
// Additional code is not needed because bevy RenderPass set_bind_group
// removes redundant switching between bind groups
for (offset, egui_texture_id) in egui_texture_ids.drain(..).enumerate() {
bind_groups.insert(egui_texture_id, (bind_group.clone(), Some(offset as u32)));
}
}
bind_groups
} else {
egui_texture_iterator
.map(|(texture, gpu_image)| {
let bind_group = render_device.create_bind_group(
None,
&egui_pipeline.texture_bind_group_layout,
&[
BindGroupEntry {
binding: 0,
resource: BindingResource::TextureView(&gpu_image.texture_view),
},
BindGroupEntry {
binding: 1,
resource: BindingResource::Sampler(&gpu_image.sampler),
},
],
);
(texture, (bind_group, None::<u32>))
})
.collect()
};
commands.insert_resource(EguiTextureBindGroups(bind_groups))
}
/// Cached Pipeline IDs for the specialized instances of `EguiPipeline`.
#[derive(Resource)]
pub struct EguiPipelines(pub HashMap<MainEntity, CachedRenderPipelineId>);
/// Queue [`EguiPipeline`] instances.
pub fn queue_pipelines_system(
mut commands: Commands,
pipeline_cache: Res<PipelineCache>,
mut specialized_pipelines: ResMut<SpecializedRenderPipelines<EguiPipeline>>,
egui_pipeline: Res<EguiPipeline>,
egui_views: Query<&EguiViewTarget, With<ExtractedView>>,
camera_views: Query<(&MainEntity, &ExtractedCamera)>,
) {
let pipelines: HashMap<MainEntity, CachedRenderPipelineId> = egui_views
.iter()
.filter_map(|egui_camera_view| {
let (main_entity, extracted_camera) = camera_views.get(egui_camera_view.0).ok()?;
let pipeline_id = specialized_pipelines.specialize(
&pipeline_cache,
&egui_pipeline,
EguiPipelineKey {
hdr: extracted_camera.hdr,
},
);
Some((*main_entity, pipeline_id))
})
.collect();
commands.insert_resource(EguiPipelines(pipelines));
}
/// Cached Pipeline IDs for the specialized instances of `EguiPipeline`.
#[derive(Default, Resource)]
pub struct EguiRenderData(pub(crate) HashMap<MainEntity, EguiRenderTargetData>);
pub(crate) struct EguiRenderTargetData {
keep: bool,
pub(crate) render_entity: RenderEntity,
pub(crate) vertex_data: Vec<u8>,
pub(crate) vertex_buffer_capacity: usize,
pub(crate) vertex_buffer: Option<Buffer>,
pub(crate) index_data: Vec<u32>,
pub(crate) index_buffer_capacity: usize,
pub(crate) index_buffer: Option<Buffer>,
pub(crate) draw_commands: Vec<DrawCommand>,
pub(crate) postponed_updates: Vec<(egui::Rect, PaintCallbackDraw)>,
pub(crate) pixels_per_point: f32,
pub(crate) target_size: UVec2,
pub(crate) key: Option<EguiPipelineKey>,
}
impl Default for EguiRenderTargetData {
fn default() -> Self {
Self {
keep: false,
render_entity: RenderEntity::from(Entity::PLACEHOLDER),
vertex_data: Vec::new(),
vertex_buffer_capacity: 0,
vertex_buffer: None,
index_data: Vec::new(),
index_buffer_capacity: 0,
index_buffer: None,
draw_commands: Vec::new(),
postponed_updates: Vec::new(),
pixels_per_point: 0.0,
target_size: UVec2::ZERO,
key: None,
}
}
}
/// Prepares Egui transforms.
pub fn prepare_egui_render_target_data_system(
mut render_data: ResMut<EguiRenderData>,
render_targets: Query<(
Entity,
&ExtractedView,
&RenderComputedScaleFactor,
&EguiViewTarget,
&EguiRenderOutput,
)>,
extracted_cameras: Query<&ExtractedCamera>,
render_device: Res<RenderDevice>,
render_queue: Res<RenderQueue>,
) {
let render_data = &mut render_data.0;
render_data.retain(|_, data| {
let keep = data.keep;
data.keep = false;
keep
});
for (render_entity, view, computed_scale_factor, egui_view_target, render_output) in
render_targets.iter()
{
let data = render_data
.entry(view.retained_view_entity.main_entity)
.or_default();
data.keep = true;
data.render_entity = render_entity.into();
// Construct a pipeline key based on a render target.
let Ok(extracted_camera) = extracted_cameras.get(egui_view_target.0) else {
// This is ok when a window is minimized.
log::trace!("ExtractedCamera entity doesn't exist for the Egui view");
continue;
};
data.key = Some(EguiPipelineKey {
hdr: extracted_camera.hdr,
});
data.pixels_per_point = computed_scale_factor.scale_factor;
if extracted_camera
.physical_viewport_size
.is_none_or(|size| size.x < 1 || size.y < 1)
{
continue;
}
let mut index_offset = 0;
data.draw_commands.clear();
data.vertex_data.clear();
data.index_data.clear();
data.postponed_updates.clear();
for egui::epaint::ClippedPrimitive {
clip_rect,
primitive,
} in render_output.paint_jobs.as_slice()
{
let clip_rect = *clip_rect;
let clip_urect = URect {
min: UVec2 {
x: (clip_rect.min.x * data.pixels_per_point).round() as u32,
y: (clip_rect.min.y * data.pixels_per_point).round() as u32,
},
max: UVec2 {
x: (clip_rect.max.x * data.pixels_per_point).round() as u32,
y: (clip_rect.max.y * data.pixels_per_point).round() as u32,
},
};
if clip_urect
.intersect(URect::new(
view.viewport.x,
view.viewport.y,
view.viewport.x + view.viewport.z,
view.viewport.y + view.viewport.w,
))
.is_empty()
{
continue;
}
let mesh = match primitive {
egui::epaint::Primitive::Mesh(mesh) => mesh,
egui::epaint::Primitive::Callback(paint_callback) => {
let callback = match paint_callback
.callback
.clone()
.downcast::<EguiBevyPaintCallback>()
{
Ok(callback) => callback,
Err(err) => {
log::error!("Unsupported Egui paint callback type: {err:?}");
continue;
}
};
data.postponed_updates.push((
clip_rect,
PaintCallbackDraw {
callback: callback.clone(),
rect: paint_callback.rect,
},
));
data.draw_commands.push(DrawCommand {
primitive: DrawPrimitive::PaintCallback(PaintCallbackDraw {
callback,
rect: paint_callback.rect,
}),
clip_rect,
});
continue;
}
};
data.vertex_data
.extend_from_slice(cast_slice::<_, u8>(mesh.vertices.as_slice()));
data.index_data
.extend(mesh.indices.iter().map(|i| i + index_offset));
index_offset += mesh.vertices.len() as u32;
let texture_handle = match mesh.texture_id {
egui::TextureId::Managed(id) => {
EguiTextureId::Managed(view.retained_view_entity.main_entity, id)
}
egui::TextureId::User(id) => EguiTextureId::User(id),
};
data.draw_commands.push(DrawCommand {
primitive: DrawPrimitive::Egui(EguiDraw {
vertices_count: mesh.indices.len(),
egui_texture: texture_handle,
}),
clip_rect,
});
}
if data.vertex_data.len() > data.vertex_buffer_capacity {
data.vertex_buffer_capacity = data.vertex_data.len().next_power_of_two();
data.vertex_buffer = Some(render_device.create_buffer(&BufferDescriptor {
label: Some("egui vertex buffer"),
size: data.vertex_buffer_capacity as BufferAddress,
usage: BufferUsages::COPY_DST | BufferUsages::VERTEX,
mapped_at_creation: false,
}));
}
let index_data_size = data.index_data.len() * std::mem::size_of::<u32>();
if index_data_size > data.index_buffer_capacity {
data.index_buffer_capacity = index_data_size.next_power_of_two();
data.index_buffer = Some(render_device.create_buffer(&BufferDescriptor {
label: Some("egui index buffer"),
size: data.index_buffer_capacity as BufferAddress,
usage: BufferUsages::COPY_DST | BufferUsages::INDEX,
mapped_at_creation: false,
}));
}
let (vertex_buffer, index_buffer) = match (&data.vertex_buffer, &data.index_buffer) {
(Some(vertex), Some(index)) => (vertex, index),
_ => {
continue;
}
};
render_queue.write_buffer(vertex_buffer, 0, &data.vertex_data);
render_queue.write_buffer(index_buffer, 0, cast_slice(&data.index_data));
}
}

View File

@@ -169,6 +169,11 @@ impl InputController {
let mut actions = Vec::new();
match event {
InputEvent::MouseMove { pos: _ } => {
// Mouse hover - no game actions, just UI tracking
// This is handled by egui's custom_input_system
}
InputEvent::Mouse { pos, button, phase } => {
self.process_mouse(*pos, *button, *phase, &mut actions);
}

View File

@@ -39,6 +39,15 @@ pub struct Modifiers {
pub meta: bool, // Command on macOS, Windows key on Windows
}
/// Input event buffer for Bevy ECS integration
///
/// The executor fills this buffer each frame with input events from winit,
/// and Bevy systems (like egui) consume these events.
#[derive(bevy::prelude::Resource, Default, Clone)]
pub struct InputEventBuffer {
pub events: Vec<InputEvent>,
}
/// Abstract input event that the engine processes
///
/// Platform-specific code converts native input (UITouch, winit events)
@@ -72,6 +81,13 @@ pub enum InputEvent {
phase: TouchPhase,
},
/// Mouse cursor movement (no button pressed)
/// This is separate from Mouse to distinguish hover from drag
MouseMove {
/// Screen position in pixels
pos: Vec2,
},
/// Touch input (fingers on touchscreen)
Touch {
/// Screen position in pixels
@@ -107,6 +123,7 @@ impl InputEvent {
match self {
InputEvent::Stylus { pos, .. } => Some(*pos),
InputEvent::Mouse { pos, .. } => Some(*pos),
InputEvent::MouseMove { pos } => Some(*pos),
InputEvent::Touch { pos, .. } => Some(*pos),
InputEvent::MouseWheel { pos, .. } => Some(*pos),
InputEvent::Keyboard { .. } => None,
@@ -119,7 +136,7 @@ impl InputEvent {
InputEvent::Stylus { phase, .. } => Some(*phase),
InputEvent::Mouse { phase, .. } => Some(*phase),
InputEvent::Touch { phase, .. } => Some(*phase),
InputEvent::Keyboard { .. } | InputEvent::MouseWheel { .. } => None,
InputEvent::Keyboard { .. } | InputEvent::MouseWheel { .. } | InputEvent::MouseMove { .. } => None,
}
}

View File

@@ -16,6 +16,6 @@ pub use core::EngineCore;
pub use events::EngineEvent;
pub use game_actions::GameAction;
pub use input_controller::{AccessibilitySettings, InputContext, InputController};
pub use input_events::{InputEvent, KeyCode, Modifiers, MouseButton, TouchPhase};
pub use input_events::{InputEvent, InputEventBuffer, KeyCode, Modifiers, MouseButton, TouchPhase};
pub use networking::NetworkingManager;
pub use persistence::PersistenceManager;

View File

@@ -1,15 +0,0 @@
use thiserror::Error;
#[derive(Error, Debug)]
pub enum ChatDbError {
#[error("Database error: {0}")]
Database(#[from] rusqlite::Error),
#[error("Not found: {0}")]
NotFound(String),
#[error("Invalid data: {0}")]
InvalidData(String),
}
pub type Result<T> = std::result::Result<T, ChatDbError>;

View File

@@ -1,41 +1,81 @@
//! Data access layer for iMessage chat.db
//! Marathon - A collaborative real-time editing engine
//!
//! This library provides a read-only interface to query messages from a
//! specific conversation.
//! This library provides the core functionality for building collaborative
//! applications with CRDT-based synchronization, persistence, and networking.
//!
//! # Safety
//! # Features
//!
//! All database connections are opened in read-only mode to prevent any
//! accidental modifications to your iMessage database.
//! - **Networking**: Real-time collaborative editing with gossip-based sync
//! - **Persistence**: SQLite-backed storage with automatic migration
//! - **Debug UI**: Built-in egui integration for development
//! - **Engine**: Event-driven architecture with async task coordination
//!
//! # Example
//!
//! ```no_run
//! use libmarathon::ChatDb;
//! use bevy::prelude::*;
//! use libmarathon::{MarathonPlugin, persistence::PersistenceConfig};
//!
//! let db = ChatDb::open("chat.db")?;
//!
//! // Get all messages from January 2024 to now
//! let messages = db.get_our_messages(None, None)?;
//! println!("Found {} messages", messages.len());
//! # Ok::<(), libmarathon::ChatDbError>(())
//! fn main() {
//! App::new()
//! .add_plugins(MarathonPlugin::new("my_app.db", PersistenceConfig::default()))
//! .run();
//! }
//! ```
mod db;
mod error;
mod models;
pub mod debug_ui;
pub mod engine;
pub mod networking;
pub mod persistence;
pub mod platform;
pub mod sync;
pub use db::ChatDb;
pub use error::{
ChatDbError,
Result,
};
pub use models::{
Chat,
Message,
};
/// Unified Marathon plugin that bundles all core functionality.
///
/// This plugin combines:
/// - Networking for collaborative editing (with CRDT-based synchronization)
/// - Debug UI using egui
/// - Persistence for local storage
///
/// For simple integration, just add this single plugin to your Bevy app.
/// Note: You'll still need to add your app-specific bridge/event handling.
pub struct MarathonPlugin {
/// Path to the persistence database
pub db_path: std::path::PathBuf,
/// Persistence configuration
pub persistence_config: persistence::PersistenceConfig,
}
impl MarathonPlugin {
/// Create a new MarathonPlugin with custom database path and config
pub fn new(db_path: impl Into<std::path::PathBuf>, config: persistence::PersistenceConfig) -> Self {
Self {
db_path: db_path.into(),
persistence_config: config,
}
}
/// Create with default settings (database in current directory)
pub fn with_default_db() -> Self {
Self {
db_path: "marathon.db".into(),
persistence_config: Default::default(),
}
}
}
impl bevy::app::Plugin for MarathonPlugin {
fn build(&self, app: &mut bevy::app::App) {
// Networking for collaboration (uses default config with random node_id)
app.add_plugins(networking::NetworkingPlugin::new(Default::default()));
// Debug UI
app.add_plugins(debug_ui::EguiPlugin::default());
// Persistence
app.add_plugins(persistence::PersistencePlugin::with_config(
self.db_path.clone(),
self.persistence_config.clone(),
));
}
}

View File

@@ -1,126 +0,0 @@
use chrono::{
DateTime,
Utc,
};
use serde::{
Deserialize,
Serialize,
};
/// Seconds between Unix epoch (1970-01-01) and Apple epoch (2001-01-01)
/// Apple's Cocoa timestamps use 2001-01-01 00:00:00 UTC as their reference
/// point
const APPLE_EPOCH_OFFSET: i64 = 978307200;
/// Represents a message in the iMessage database
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Message {
pub rowid: i64,
pub guid: String,
pub text: Option<String>,
pub service: Option<String>,
pub handle_id: i64,
pub date: Option<DateTime<Utc>>,
pub date_read: Option<DateTime<Utc>>,
pub date_delivered: Option<DateTime<Utc>>,
pub is_from_me: bool,
pub is_read: bool,
pub is_delivered: bool,
pub is_sent: bool,
pub is_emote: bool,
pub is_audio_message: bool,
pub cache_has_attachments: bool,
pub associated_message_guid: Option<String>,
pub associated_message_type: i64,
pub thread_originator_guid: Option<String>,
pub reply_to_guid: Option<String>,
pub is_spam: bool,
}
/// Represents a chat/conversation
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Chat {
pub rowid: i64,
pub guid: String,
pub chat_identifier: Option<String>,
pub service_name: Option<String>,
pub display_name: Option<String>,
pub group_id: Option<String>,
pub room_name: Option<String>,
pub is_archived: bool,
pub is_filtered: bool,
pub last_read_message_timestamp: Option<DateTime<Utc>>,
}
/// Helper function to convert Apple's Cocoa timestamp (seconds since
/// 2001-01-01) to DateTime
pub fn apple_timestamp_to_datetime(timestamp: i64) -> DateTime<Utc> {
// Apple's Cocoa timestamps are in nanoseconds since 2001-01-01 00:00:00 UTC
// Convert to Unix timestamp (seconds since 1970-01-01 00:00:00 UTC)
let seconds = timestamp / 1_000_000_000 + APPLE_EPOCH_OFFSET;
let nanos = (timestamp % 1_000_000_000) as u32;
DateTime::from_timestamp(seconds, nanos)
.unwrap_or_else(|| DateTime::from_timestamp(0, 0).unwrap())
}
/// Helper function to convert DateTime to Apple's Cocoa timestamp
pub fn datetime_to_apple_timestamp(dt: DateTime<Utc>) -> i64 {
let unix_timestamp = dt.timestamp();
let nanos = dt.timestamp_subsec_nanos() as i64;
(unix_timestamp - APPLE_EPOCH_OFFSET) * 1_000_000_000 + nanos
}
#[cfg(test)]
mod tests {
use chrono::{
Datelike,
TimeZone,
Timelike,
};
use super::*;
#[test]
fn test_apple_timestamp_to_datetime_zero() {
let dt = apple_timestamp_to_datetime(0);
assert_eq!(dt.year(), 2001);
assert_eq!(dt.month(), 1);
assert_eq!(dt.day(), 1);
assert_eq!(dt.hour(), 0);
assert_eq!(dt.minute(), 0);
assert_eq!(dt.second(), 0);
}
#[test]
fn test_apple_timestamp_to_datetime_known_value() {
let timestamp = 694224000000000000i64;
let dt = apple_timestamp_to_datetime(timestamp);
assert_eq!(dt.year(), 2023);
assert_eq!(dt.month(), 1);
assert_eq!(dt.day(), 1);
}
#[test]
fn test_apple_timestamp_roundtrip() {
let original = 694224000000000000i64;
let dt = apple_timestamp_to_datetime(original);
let converted_back = datetime_to_apple_timestamp(dt);
assert_eq!(original, converted_back);
}
#[test]
fn test_datetime_to_apple_timestamp_epoch() {
let dt = Utc.with_ymd_and_hms(2001, 1, 1, 0, 0, 0).unwrap();
let timestamp = datetime_to_apple_timestamp(dt);
assert_eq!(timestamp, 0);
}
#[test]
fn test_negative_apple_timestamp() {
let timestamp = -31536000000000000i64;
let dt = apple_timestamp_to_datetime(timestamp);
assert_eq!(dt.year(), 2000);
}
}

View File

@@ -6,4 +6,4 @@ mod event_loop;
mod winit_bridge;
pub use event_loop::run;
pub use winit_bridge::{drain_as_input_events, push_window_event};
pub use winit_bridge::{drain_as_input_events, push_window_event, set_scale_factor};

View File

@@ -42,6 +42,16 @@ fn get_event_channel() -> &'static (Sender<RawWinitEvent>, Receiver<RawWinitEven
EVENT_CHANNEL.get_or_init(|| unbounded())
}
/// Current scale factor (needed to convert physical to logical pixels)
static SCALE_FACTOR: Mutex<f64> = Mutex::new(1.0);
/// Set the window scale factor (call when window is created or scale changes)
pub fn set_scale_factor(scale_factor: f64) {
if let Ok(mut sf) = SCALE_FACTOR.lock() {
*sf = scale_factor;
}
}
/// Current input state for tracking drags and modifiers
static INPUT_STATE: Mutex<InputState> = Mutex::new(InputState {
left_pressed: false,
@@ -99,18 +109,21 @@ pub fn push_window_event(event: &WindowEvent) {
}
WindowEvent::CursorMoved { position, .. } => {
let pos = Vec2::new(position.x as f32, position.y as f32);
// Convert from physical pixels to logical pixels
let scale_factor = SCALE_FACTOR.lock().map(|sf| *sf).unwrap_or(1.0);
let pos = Vec2::new(
(position.x / scale_factor) as f32,
(position.y / scale_factor) as f32,
);
if let Ok(mut input_state) = INPUT_STATE.lock() {
input_state.last_position = pos;
// Generate drag events for any pressed buttons
if input_state.left_pressed || input_state.right_pressed || input_state.middle_pressed {
// ALWAYS send cursor movement for hover tracking (egui needs this!)
let (sender, _) = get_event_channel();
let _ = sender.send(RawWinitEvent::CursorMoved { position: pos });
}
}
}
WindowEvent::KeyboardInput { event: key_event, .. } => {
// Only handle physical keys
@@ -187,24 +200,34 @@ fn raw_to_input_event(event: RawWinitEvent) -> Option<InputEvent> {
}
RawWinitEvent::CursorMoved { position } => {
// Determine which button is pressed for drag events
// Check if any button is pressed
let input_state = INPUT_STATE.lock().ok()?;
let button = if input_state.left_pressed {
MouseButton::Left
} else if input_state.right_pressed {
MouseButton::Right
} else if input_state.middle_pressed {
MouseButton::Middle
} else {
return None; // No button pressed, ignore
};
if input_state.left_pressed {
// Drag with left button
Some(InputEvent::Mouse {
pos: position,
button,
button: MouseButton::Left,
phase: TouchPhase::Moved,
})
} else if input_state.right_pressed {
// Drag with right button
Some(InputEvent::Mouse {
pos: position,
button: MouseButton::Right,
phase: TouchPhase::Moved,
})
} else if input_state.middle_pressed {
// Drag with middle button
Some(InputEvent::Mouse {
pos: position,
button: MouseButton::Middle,
phase: TouchPhase::Moved,
})
} else {
// No button pressed - hover tracking
Some(InputEvent::MouseMove { pos: position })
}
}
RawWinitEvent::Keyboard { key, state, modifiers } => {

View File

@@ -1,127 +0,0 @@
use chrono::Datelike;
use libmarathon::{
ChatDb,
Result,
};
/// Test that we can get messages from the Dutch phone number conversation
#[test]
fn test_get_our_messages_default_range() -> Result<()> {
let db = ChatDb::open("chat.db")?;
// Get messages from January 2024 to now (default)
let messages = db.get_our_messages(None, None)?;
println!("Found {} messages from January 2024 to now", messages.len());
// Verify we got some messages
assert!(
messages.len() > 0,
"Should find messages in the conversation"
);
// Verify messages are in chronological order (ASC)
for i in 1..messages.len().min(10) {
if let (Some(prev_date), Some(curr_date)) = (messages[i - 1].date, messages[i].date) {
assert!(
prev_date <= curr_date,
"Messages should be in ascending date order"
);
}
}
// Verify all messages are from 2024 or later
for msg in messages.iter().take(10) {
if let Some(date) = msg.date {
assert!(date.year() >= 2024, "Messages should be from 2024 or later");
println!(
"Message date: {}, from_me: {}, text: {:?}",
date,
msg.is_from_me,
msg.text.as_ref().map(|s| &s[..s.len().min(50)])
);
}
}
Ok(())
}
/// Test that we can get messages with a custom date range
#[test]
fn test_get_our_messages_custom_range() -> Result<()> {
use chrono::{
TimeZone,
Utc,
};
let db = ChatDb::open("chat.db")?;
// Get messages from March 2024 to June 2024
let start = Utc.with_ymd_and_hms(2024, 3, 1, 0, 0, 0).unwrap();
let end = Utc.with_ymd_and_hms(2024, 6, 1, 0, 0, 0).unwrap();
let messages = db.get_our_messages(Some(start), Some(end))?;
println!("Found {} messages from March to June 2024", messages.len());
// Verify all messages are within the date range
for msg in &messages {
if let Some(date) = msg.date {
assert!(
date >= start && date <= end,
"Message date {} should be between {} and {}",
date,
start,
end
);
}
}
Ok(())
}
/// Test displaying a summary of the conversation
#[test]
fn test_conversation_summary() -> Result<()> {
let db = ChatDb::open("chat.db")?;
let messages = db.get_our_messages(None, None)?;
println!("\n=== Conversation Summary ===");
println!("Total messages: {}", messages.len());
let from_me = messages.iter().filter(|m| m.is_from_me).count();
let from_them = messages.len() - from_me;
println!("From me: {}", from_me);
println!("From them: {}", from_them);
// Show first few messages
println!("\nFirst 5 messages:");
for (i, msg) in messages.iter().take(5).enumerate() {
if let Some(date) = msg.date {
let sender = if msg.is_from_me { "Me" } else { "Them" };
let text = msg
.text
.as_ref()
.map(|t| {
if t.len() > 60 {
format!("{}...", &t[..60])
} else {
t.clone()
}
})
.unwrap_or_else(|| "[No text]".to_string());
println!(
"{}. {} ({}): {}",
i + 1,
date.format("%Y-%m-%d %H:%M"),
sender,
text
);
}
}
Ok(())
}