initial working demo sans networking

Signed-off-by: Sienna Meridian Satterwhite <sienna@r3t.io>
This commit is contained in:
2025-12-11 22:10:06 +00:00
parent 1504807afe
commit 28c56defe7
40 changed files with 1552 additions and 3145 deletions

1377
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,5 +1,5 @@
[workspace] [workspace]
members = ["crates/lib", "crates/server", "crates/client", "crates/sync-macros"] members = ["crates/lib", "crates/sync-macros", "crates/app"]
resolver = "2" resolver = "2"
[workspace.package] [workspace.package]

50
crates/app/Cargo.toml Normal file
View File

@@ -0,0 +1,50 @@
[package]
name = "app"
version = "0.1.0"
edition = "2021"
[features]
default = ["desktop"]
desktop = [] # macOS only
ios = []
headless = []
[dependencies]
lib = { path = "../lib" }
bevy = { version = "0.17", default-features = false, features = [
"bevy_winit",
"bevy_render",
"bevy_core_pipeline",
"bevy_pbr",
"bevy_ui",
"bevy_text",
"png",
] }
bevy_egui = "0.38"
uuid = { version = "1.0", features = ["v4", "serde"] }
anyhow = "1.0"
tokio = { version = "1", features = ["full"] }
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
serde = { version = "1.0", features = ["derive"] }
iroh = { version = "0.95", features = ["discovery-local-network"] }
iroh-gossip = "0.95"
futures-lite = "2.0"
bincode = "1.3"
bytes = "1.0"
crossbeam-channel = "0.5.15"
[target.'cfg(target_os = "ios")'.dependencies]
objc = "0.2"
[dev-dependencies]
iroh = { version = "0.95", features = ["discovery-local-network"] }
iroh-gossip = "0.95"
tempfile = "3"
futures-lite = "2.0"
bincode = "1.3"
bytes = "1.0"
[lib]
name = "app"
crate-type = ["staticlib", "cdylib", "lib"]

27
crates/app/src/camera.rs Normal file
View File

@@ -0,0 +1,27 @@
//! Camera configuration
//!
//! This module handles the 3D camera setup for the cube demo.
use bevy::prelude::*;
pub struct CameraPlugin;
impl Plugin for CameraPlugin {
fn build(&self, app: &mut App) {
app.add_systems(Startup, setup_camera);
}
}
/// Set up the 3D camera
///
/// Camera is positioned at (4, 3, 6) looking at the cube's initial position (0, 0.5, 0).
/// This provides a good viewing angle to see the cube, ground plane, and any movements.
fn setup_camera(mut commands: Commands) {
info!("Setting up camera");
commands.spawn((
Camera3d::default(),
Transform::from_xyz(4.0, 3.0, 6.0)
.looking_at(Vec3::new(0.0, 0.5, 0.0), Vec3::Y),
));
}

65
crates/app/src/cube.rs Normal file
View File

@@ -0,0 +1,65 @@
//! Cube entity management
use bevy::prelude::*;
use lib::{
networking::{NetworkedEntity, NetworkedTransform, Synced},
persistence::Persisted,
};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
/// Marker component for the replicated cube
#[derive(Component, Reflect, Debug, Clone, Copy, Default, Serialize, Deserialize)]
#[reflect(Component)]
pub struct CubeMarker;
pub struct CubePlugin;
impl Plugin for CubePlugin {
fn build(&self, app: &mut App) {
app.register_type::<CubeMarker>()
.add_systems(Startup, spawn_cube);
}
}
/// Spawn the synced cube on startup
fn spawn_cube(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
node_clock: Option<Res<lib::networking::NodeVectorClock>>,
) {
// Wait until NodeVectorClock is available (after networking plugin initializes)
let Some(clock) = node_clock else {
warn!("NodeVectorClock not ready, deferring cube spawn");
return;
};
let entity_id = Uuid::new_v4();
let node_id = clock.node_id;
info!("Spawning cube with network ID: {}", entity_id);
commands.spawn((
CubeMarker,
// Bevy 3D components
Mesh3d(meshes.add(Cuboid::new(1.0, 1.0, 1.0))),
MeshMaterial3d(materials.add(StandardMaterial {
base_color: Color::srgb(0.8, 0.3, 0.6),
perceptual_roughness: 0.7,
metallic: 0.3,
..default()
})),
Transform::from_xyz(0.0, 0.5, 0.0),
GlobalTransform::default(),
// Networking
NetworkedEntity::with_id(entity_id, node_id),
NetworkedTransform,
// Persistence
Persisted::with_id(entity_id),
// Sync marker
Synced,
));
info!("Cube spawned successfully");
}

View File

@@ -0,0 +1,89 @@
//! Debug UI overlay using egui
use bevy::prelude::*;
use bevy_egui::{egui, EguiContexts, EguiPrimaryContextPass};
use lib::networking::{GossipBridge, NodeVectorClock};
pub struct DebugUiPlugin;
impl Plugin for DebugUiPlugin {
fn build(&self, app: &mut App) {
app.add_systems(EguiPrimaryContextPass, render_debug_ui);
}
}
/// Render the debug UI panel
fn render_debug_ui(
mut contexts: EguiContexts,
node_clock: Option<Res<NodeVectorClock>>,
gossip_bridge: Option<Res<GossipBridge>>,
cube_query: Query<(&Transform, &lib::networking::NetworkedEntity), With<crate::cube::CubeMarker>>,
) {
let Ok(ctx) = contexts.ctx_mut() else {
return;
};
egui::Window::new("Debug Info")
.default_pos([10.0, 10.0])
.default_width(300.0)
.resizable(true)
.show(ctx, |ui| {
ui.heading("Network Status");
ui.separator();
// Node information
if let Some(clock) = &node_clock {
ui.label(format!("Node ID: {}", &clock.node_id.to_string()[..8]));
// Show the current node's clock value (timestamp)
let current_timestamp = clock.clock.clocks.get(&clock.node_id).copied().unwrap_or(0);
ui.label(format!("Clock: {}", current_timestamp));
ui.label(format!("Known nodes: {}", clock.clock.clocks.len()));
} else {
ui.label("Node: Not initialized");
}
ui.add_space(5.0);
// Gossip bridge status
if let Some(bridge) = &gossip_bridge {
ui.label(format!("Bridge Node: {}", &bridge.node_id().to_string()[..8]));
ui.label("Status: Connected");
} else {
ui.label("Gossip: Not ready");
}
ui.add_space(10.0);
ui.heading("Cube State");
ui.separator();
// Cube information
match cube_query.iter().next() {
Some((transform, networked)) => {
let pos = transform.translation;
ui.label(format!("Position: ({:.2}, {:.2}, {:.2})", pos.x, pos.y, pos.z));
let (axis, angle) = transform.rotation.to_axis_angle();
let angle_deg: f32 = angle.to_degrees();
ui.label(format!("Rotation: {:.2}° around ({:.2}, {:.2}, {:.2})",
angle_deg, axis.x, axis.y, axis.z));
ui.label(format!("Scale: ({:.2}, {:.2}, {:.2})",
transform.scale.x, transform.scale.y, transform.scale.z));
ui.add_space(5.0);
ui.label(format!("Network ID: {}", &networked.network_id.to_string()[..8]));
ui.label(format!("Owner: {}", &networked.owner_node_id.to_string()[..8]));
}
None => {
ui.label("Cube: Not spawned yet");
}
}
ui.add_space(10.0);
ui.heading("Controls");
ui.separator();
ui.label("Left drag: Move cube (XY)");
ui.label("Right drag: Rotate cube");
ui.label("Scroll: Move cube (Z)");
});
}

View File

@@ -0,0 +1,79 @@
//! Mouse input handling for macOS
use bevy::prelude::*;
use bevy::input::mouse::{MouseMotion, MouseWheel};
pub struct MouseInputPlugin;
impl Plugin for MouseInputPlugin {
fn build(&self, app: &mut App) {
app.add_systems(Update, handle_mouse_input);
}
}
/// Mouse interaction state
#[derive(Resource, Default)]
struct MouseState {
/// Whether the left mouse button is currently pressed
left_pressed: bool,
/// Whether the right mouse button is currently pressed
right_pressed: bool,
}
/// Handle mouse input to move and rotate the cube
fn handle_mouse_input(
mouse_buttons: Res<ButtonInput<MouseButton>>,
mut mouse_motion: EventReader<MouseMotion>,
mut mouse_wheel: EventReader<MouseWheel>,
mut mouse_state: Local<Option<MouseState>>,
mut cube_query: Query<&mut Transform, With<crate::cube::CubeMarker>>,
) {
// Initialize mouse state if needed
if mouse_state.is_none() {
*mouse_state = Some(MouseState::default());
}
let state = mouse_state.as_mut().unwrap();
// Update button states
state.left_pressed = mouse_buttons.pressed(MouseButton::Left);
state.right_pressed = mouse_buttons.pressed(MouseButton::Right);
// Get total mouse delta this frame
let mut total_delta = Vec2::ZERO;
for motion in mouse_motion.read() {
total_delta += motion.delta;
}
// Process mouse motion
if total_delta != Vec2::ZERO {
for mut transform in cube_query.iter_mut() {
if state.left_pressed {
// Left drag: Move cube in XY plane
// Scale factor for sensitivity
let sensitivity = 0.01;
transform.translation.x += total_delta.x * sensitivity;
transform.translation.y -= total_delta.y * sensitivity; // Invert Y
} else if state.right_pressed {
// Right drag: Rotate cube
let sensitivity = 0.01;
let rotation_x = Quat::from_rotation_y(total_delta.x * sensitivity);
let rotation_y = Quat::from_rotation_x(-total_delta.y * sensitivity);
transform.rotation = rotation_x * transform.rotation * rotation_y;
}
}
}
// Process mouse wheel for Z-axis movement
let mut total_scroll = 0.0;
for wheel in mouse_wheel.read() {
total_scroll += wheel.y;
}
if total_scroll != 0.0 {
for mut transform in cube_query.iter_mut() {
// Scroll: Move in Z axis
let sensitivity = 0.1;
transform.translation.z += total_scroll * sensitivity;
}
}
}

12
crates/app/src/lib.rs Normal file
View File

@@ -0,0 +1,12 @@
//! Replicated cube demo app
//!
//! This app demonstrates real-time CRDT synchronization between iPad and Mac
//! with Apple Pencil input controlling a 3D cube.
pub mod camera;
pub mod cube;
pub mod debug_ui;
pub mod rendering;
pub mod setup;
pub use cube::CubeMarker;

101
crates/app/src/main.rs Normal file
View File

@@ -0,0 +1,101 @@
//! Replicated cube demo - macOS and iPad
//!
//! This demonstrates real-time CRDT synchronization with Apple Pencil input.
use bevy::prelude::*;
use bevy_egui::EguiPlugin;
use lib::{
networking::{NetworkingConfig, NetworkingPlugin},
persistence::{PersistenceConfig, PersistencePlugin},
};
use std::path::PathBuf;
use uuid::Uuid;
mod camera;
mod cube;
mod debug_ui;
mod rendering;
mod setup;
#[cfg(not(target_os = "ios"))]
mod input {
pub mod mouse;
pub use mouse::MouseInputPlugin;
}
#[cfg(target_os = "ios")]
mod input {
pub mod pencil;
pub use pencil::PencilInputPlugin;
}
use camera::*;
use cube::*;
use debug_ui::*;
use input::*;
use rendering::*;
use setup::*;
fn main() {
// Initialize logging
tracing_subscriber::fmt()
.with_env_filter(
tracing_subscriber::EnvFilter::from_default_env()
.add_directive("wgpu=error".parse().unwrap())
.add_directive("naga=warn".parse().unwrap()),
)
.init();
// Create node ID (in production, load from config or generate once)
let node_id = Uuid::new_v4();
info!("Starting app with node ID: {}", node_id);
// Database path
let db_path = PathBuf::from("cube_demo.db");
// Create Bevy app
App::new()
.add_plugins(DefaultPlugins
.set(WindowPlugin {
primary_window: Some(Window {
title: format!("Replicated Cube Demo - Node {}", &node_id.to_string()[..8]),
resolution: (1280, 720).into(),
..default()
}),
..default()
})
.disable::<bevy::log::LogPlugin>() // Disable Bevy's logger, using tracing-subscriber instead
)
.add_plugins(EguiPlugin::default())
// Networking (bridge will be set up in startup)
.add_plugins(NetworkingPlugin::new(NetworkingConfig {
node_id,
sync_interval_secs: 1.0,
prune_interval_secs: 60.0,
tombstone_gc_interval_secs: 300.0,
}))
// Persistence
.add_plugins(PersistencePlugin::with_config(
db_path,
PersistenceConfig {
flush_interval_secs: 2,
checkpoint_interval_secs: 30,
battery_adaptive: true,
..Default::default()
},
))
// Camera
.add_plugins(CameraPlugin)
// Rendering
.add_plugins(RenderingPlugin)
// Input
.add_plugins(MouseInputPlugin)
// Cube management
.add_plugins(CubePlugin)
// Debug UI
.add_plugins(DebugUiPlugin)
// Gossip networking setup
.add_systems(Startup, setup_gossip_networking)
.add_systems(Update, poll_gossip_bridge)
.run();
}

View File

@@ -0,0 +1,53 @@
//! Lighting and ground plane setup
use bevy::prelude::*;
pub struct RenderingPlugin;
impl Plugin for RenderingPlugin {
fn build(&self, app: &mut App) {
app.add_systems(Startup, setup_lighting_and_ground);
}
}
/// Set up lighting and ground plane
///
/// Creates a directional light (sun), ambient light, and a green ground plane.
/// Camera setup is handled separately in the camera module.
fn setup_lighting_and_ground(
mut commands: Commands,
mut meshes: ResMut<Assets<Mesh>>,
mut materials: ResMut<Assets<StandardMaterial>>,
) {
info!("Setting up lighting and ground plane");
// Directional light (sun)
commands.spawn((
DirectionalLight {
illuminance: 10000.0,
shadows_enabled: true,
..default()
},
Transform::from_xyz(4.0, 8.0, 4.0).looking_at(Vec3::ZERO, Vec3::Y),
));
// Ambient light
commands.insert_resource(AmbientLight {
color: Color::WHITE,
brightness: 150.0,
affects_lightmapped_meshes: false,
});
// Ground plane
commands.spawn((
Mesh3d(meshes.add(Plane3d::default().mesh().size(20.0, 20.0))),
MeshMaterial3d(materials.add(StandardMaterial {
base_color: Color::srgb(0.3, 0.5, 0.3),
perceptual_roughness: 0.9,
..default()
})),
Transform::from_xyz(0.0, 0.0, 0.0),
));
info!("Lighting and ground setup complete");
}

279
crates/app/src/setup.rs Normal file
View File

@@ -0,0 +1,279 @@
//! Gossip networking setup with dedicated tokio runtime
//!
//! This module manages iroh-gossip networking with a tokio runtime running as a sidecar to Bevy.
//! The tokio runtime runs in a dedicated background thread, separate from Bevy's ECS loop.
//!
//! # Architecture
//!
//! ```text
//! ┌─────────────────────────────────────┐
//! │ Bevy Main Thread │
//! │ ┌────────────────────────────────┐ │
//! │ │ setup_gossip_networking() │ │ Startup System
//! │ │ - Creates channel │ │
//! │ │ - Spawns background thread │ │
//! │ └────────────────────────────────┘ │
//! │ ┌────────────────────────────────┐ │
//! │ │ poll_gossip_bridge() │ │ Update System
//! │ │ - Receives GossipBridge │ │ (runs every frame)
//! │ │ - Inserts as resource │ │
//! │ └────────────────────────────────┘ │
//! └─────────────────────────────────────┘
//! ↕ (crossbeam channel)
//! ┌─────────────────────────────────────┐
//! │ Background Thread (macOS only) │
//! │ ┌────────────────────────────────┐ │
//! │ │ Tokio Runtime │ │
//! │ │ ┌────────────────────────────┐ │ │
//! │ │ │ init_gossip() │ │ │
//! │ │ │ - Creates iroh endpoint │ │ │
//! │ │ │ - Sets up mDNS discovery │ │ │
//! │ │ │ - Subscribes to topic │ │ │
//! │ │ │ - Creates GossipBridge │ │ │
//! │ │ └────────────────────────────┘ │ │
//! │ │ ┌────────────────────────────┐ │ │
//! │ │ │ spawn_bridge_tasks() │ │ │
//! │ │ │ - Task 1: Forward outgoing │ │ │
//! │ │ │ - Task 2: Forward incoming │ │ │
//! │ │ └────────────────────────────┘ │ │
//! │ └────────────────────────────────┘ │
//! └─────────────────────────────────────┘
//! ```
//!
//! # Communication Pattern
//!
//! 1. **Bevy → Tokio**: GossipBridge's internal queue (try_recv_outgoing)
//! 2. **Tokio → Bevy**: GossipBridge's internal queue (push_incoming)
//! 3. **Thread handoff**: crossbeam_channel (one-time GossipBridge transfer)
use anyhow::Result;
use bevy::prelude::*;
use lib::networking::GossipBridge;
use uuid::Uuid;
/// Channel for receiving the GossipBridge from the background thread
///
/// This resource exists temporarily during startup. Once the GossipBridge
/// is received and inserted, this channel resource is removed.
#[cfg(not(target_os = "ios"))]
#[derive(Resource)]
pub struct GossipBridgeChannel(crossbeam_channel::Receiver<GossipBridge>);
/// Set up gossip networking with iroh
///
/// This is a Bevy startup system that initializes the gossip networking stack.
/// On macOS, it spawns a dedicated thread with a tokio runtime. On iOS, it logs
/// a warning (iOS networking not yet implemented).
///
/// # Platform Support
///
/// - **macOS**: Full support with mDNS discovery
/// - **iOS**: Not yet implemented
pub fn setup_gossip_networking(mut commands: Commands) {
info!("Setting up gossip networking...");
// Spawn dedicated thread with Tokio runtime for gossip initialization
#[cfg(not(target_os = "ios"))]
{
let (sender, receiver) = crossbeam_channel::unbounded();
commands.insert_resource(GossipBridgeChannel(receiver));
std::thread::spawn(move || {
let rt = tokio::runtime::Runtime::new().unwrap();
rt.block_on(async move {
match init_gossip().await {
Ok(bridge) => {
info!("Gossip bridge initialized successfully");
if let Err(e) = sender.send(bridge) {
error!("Failed to send bridge to main thread: {}", e);
}
}
Err(e) => {
error!("Failed to initialize gossip: {}", e);
}
}
});
});
}
#[cfg(target_os = "ios")]
{
warn!("iOS networking not yet implemented - gossip networking disabled");
}
}
/// Poll the channel for the GossipBridge and insert it when ready
///
/// This is a Bevy update system that runs every frame. It checks the channel
/// for the GossipBridge created in the background thread. Once received, it
/// inserts the bridge as a resource and removes the channel.
///
/// # Platform Support
///
/// - **macOS**: Polls the channel and inserts GossipBridge
/// - **iOS**: No-op (networking not implemented)
pub fn poll_gossip_bridge(
mut commands: Commands,
#[cfg(not(target_os = "ios"))]
channel: Option<Res<GossipBridgeChannel>>,
) {
#[cfg(not(target_os = "ios"))]
if let Some(channel) = channel {
if let Ok(bridge) = channel.0.try_recv() {
info!("Inserting GossipBridge resource into Bevy world");
commands.insert_resource(bridge);
commands.remove_resource::<GossipBridgeChannel>();
}
}
}
/// Initialize iroh-gossip networking stack
///
/// This async function runs in the background tokio runtime and:
/// 1. Creates an iroh endpoint with mDNS discovery
/// 2. Spawns the gossip protocol
/// 3. Sets up the router to accept gossip connections
/// 4. Subscribes to a shared topic (ID: [42; 32])
/// 5. Waits for join with a 2-second timeout
/// 6. Creates and configures the GossipBridge
/// 7. Spawns forwarding tasks to bridge messages
///
/// # Returns
///
/// - `Ok(GossipBridge)`: Successfully initialized networking
/// - `Err(anyhow::Error)`: Initialization failed
///
/// # Platform Support
///
/// This function is only compiled on non-iOS platforms.
#[cfg(not(target_os = "ios"))]
async fn init_gossip() -> Result<GossipBridge> {
use iroh::discovery::mdns::MdnsDiscovery;
use iroh::protocol::Router;
use iroh::Endpoint;
use iroh_gossip::net::Gossip;
use iroh_gossip::proto::TopicId;
info!("Creating endpoint with mDNS discovery...");
let endpoint = Endpoint::builder()
.discovery(MdnsDiscovery::builder())
.bind()
.await?;
let endpoint_id = endpoint.addr().id;
info!("Endpoint created: {}", endpoint_id);
// Convert endpoint ID to UUID
let id_bytes = endpoint_id.as_bytes();
let mut uuid_bytes = [0u8; 16];
uuid_bytes.copy_from_slice(&id_bytes[..16]);
let node_id = Uuid::from_bytes(uuid_bytes);
info!("Spawning gossip protocol...");
let gossip = Gossip::builder().spawn(endpoint.clone());
info!("Setting up router...");
let router = Router::builder(endpoint.clone())
.accept(iroh_gossip::ALPN, gossip.clone())
.spawn();
// Subscribe to shared topic
let topic_id = TopicId::from_bytes([42; 32]);
info!("Subscribing to topic...");
let subscribe_handle = gossip.subscribe(topic_id, vec![]).await?;
let (sender, mut receiver) = subscribe_handle.split();
// Wait for join (with timeout since we might be the first node)
info!("Waiting for gossip join...");
match tokio::time::timeout(std::time::Duration::from_secs(2), receiver.joined()).await {
Ok(Ok(())) => info!("Joined gossip swarm"),
Ok(Err(e)) => warn!("Join error: {} (proceeding anyway)", e),
Err(_) => info!("Join timeout (first node in swarm)"),
}
// Create bridge
let bridge = GossipBridge::new(node_id);
info!("GossipBridge created with node ID: {}", node_id);
// Spawn forwarding tasks - pass endpoint, router, gossip to keep them alive
spawn_bridge_tasks(sender, receiver, bridge.clone(), endpoint, router, gossip);
Ok(bridge)
}
/// Spawn tokio tasks to forward messages between iroh-gossip and GossipBridge
///
/// This function spawns two concurrent tokio tasks that run for the lifetime of the application:
///
/// 1. **Outgoing Task**: Polls GossipBridge for outgoing messages and broadcasts them via gossip
/// 2. **Incoming Task**: Receives messages from gossip and pushes them into GossipBridge
///
/// # Lifetime Management
///
/// The iroh resources (endpoint, router, gossip) are moved into the first task to keep them
/// alive for the application lifetime. Without this, they would be dropped immediately and
/// the gossip connection would close.
///
/// # Platform Support
///
/// This function is only compiled on non-iOS platforms.
#[cfg(not(target_os = "ios"))]
fn spawn_bridge_tasks(
sender: iroh_gossip::api::GossipSender,
mut receiver: iroh_gossip::api::GossipReceiver,
bridge: GossipBridge,
_endpoint: iroh::Endpoint,
_router: iroh::protocol::Router,
_gossip: iroh_gossip::net::Gossip,
) {
use bytes::Bytes;
use futures_lite::StreamExt;
use lib::networking::VersionedMessage;
use std::time::Duration;
let node_id = bridge.node_id();
// Task 1: Forward outgoing messages from GossipBridge → iroh-gossip
// Keep endpoint, router, gossip alive by moving them into this task
let bridge_out = bridge.clone();
tokio::spawn(async move {
let _endpoint = _endpoint; // Keep alive for app lifetime
let _router = _router; // Keep alive for app lifetime
let _gossip = _gossip; // Keep alive for app lifetime
loop {
if let Some(msg) = bridge_out.try_recv_outgoing() {
if let Ok(bytes) = bincode::serialize(&msg) {
if let Err(e) = sender.broadcast(Bytes::from(bytes)).await {
error!("[Node {}] Broadcast failed: {}", node_id, e);
}
}
}
tokio::time::sleep(Duration::from_millis(10)).await;
}
});
// Task 2: Forward incoming messages from iroh-gossip → GossipBridge
let bridge_in = bridge.clone();
tokio::spawn(async move {
loop {
match tokio::time::timeout(Duration::from_millis(100), receiver.next()).await {
Ok(Some(Ok(event))) => {
if let iroh_gossip::api::Event::Received(msg) = event {
if let Ok(versioned_msg) =
bincode::deserialize::<VersionedMessage>(&msg.content)
{
if let Err(e) = bridge_in.push_incoming(versioned_msg) {
error!("[Node {}] Push incoming failed: {}", node_id, e);
}
}
}
}
Ok(Some(Err(e))) => error!("[Node {}] Receiver error: {}", node_id, e),
Ok(None) => break,
Err(_) => {} // Timeout
}
}
});
}

View File

@@ -0,0 +1,471 @@
//! Headless integration tests for cube synchronization
//!
//! These tests validate end-to-end CRDT synchronization of the cube entity
//! using multiple headless Bevy apps with real iroh-gossip networking.
use std::{
path::PathBuf,
time::{Duration, Instant},
};
use anyhow::Result;
use app::CubeMarker;
use bevy::{
app::{App, ScheduleRunnerPlugin},
ecs::world::World,
prelude::*,
MinimalPlugins,
};
use bytes::Bytes;
use futures_lite::StreamExt;
use iroh::{protocol::Router, Endpoint};
use iroh_gossip::{
api::{GossipReceiver, GossipSender},
net::Gossip,
proto::TopicId,
};
use lib::{
networking::{
GossipBridge, NetworkedEntity, NetworkedTransform, NetworkingConfig, NetworkingPlugin,
Synced, VersionedMessage,
},
persistence::{Persisted, PersistenceConfig, PersistencePlugin},
};
use tempfile::TempDir;
use uuid::Uuid;
// ============================================================================
// Test Utilities
// ============================================================================
mod test_utils {
use super::*;
/// Test context that manages temporary directories with RAII cleanup
pub struct TestContext {
_temp_dir: TempDir,
db_path: PathBuf,
}
impl TestContext {
pub fn new() -> Self {
let temp_dir = TempDir::new().expect("Failed to create temp directory");
let db_path = temp_dir.path().join("test.db");
Self {
_temp_dir: temp_dir,
db_path,
}
}
pub fn db_path(&self) -> PathBuf {
self.db_path.clone()
}
}
/// Create a headless Bevy app configured for cube testing
pub fn create_test_app(node_id: Uuid, db_path: PathBuf, bridge: GossipBridge) -> App {
let mut app = App::new();
app.add_plugins(
MinimalPlugins.set(ScheduleRunnerPlugin::run_loop(Duration::from_secs_f64(
1.0 / 60.0,
))),
)
.insert_resource(bridge)
.add_plugins(NetworkingPlugin::new(NetworkingConfig {
node_id,
sync_interval_secs: 0.5, // Fast for testing
prune_interval_secs: 10.0,
tombstone_gc_interval_secs: 30.0,
}))
.add_plugins(PersistencePlugin::with_config(
db_path,
PersistenceConfig {
flush_interval_secs: 1,
checkpoint_interval_secs: 5,
battery_adaptive: false,
..Default::default()
},
));
// Register cube component types for reflection
app.register_type::<CubeMarker>();
app
}
/// Count entities with CubeMarker component
pub fn count_cubes(world: &mut World) -> usize {
let mut query = world.query::<&CubeMarker>();
query.iter(world).count()
}
/// Count entities with a specific network ID
pub fn count_entities_with_id(world: &mut World, network_id: Uuid) -> usize {
let mut query = world.query::<&NetworkedEntity>();
query
.iter(world)
.filter(|entity| entity.network_id == network_id)
.count()
}
/// Wait for sync condition to be met, polling both apps
pub async fn wait_for_sync<F>(
app1: &mut App,
app2: &mut App,
timeout: Duration,
check_fn: F,
) -> Result<()>
where
F: Fn(&mut World, &mut World) -> bool,
{
let start = Instant::now();
let mut tick_count = 0;
while start.elapsed() < timeout {
// Tick both apps
app1.update();
app2.update();
tick_count += 1;
if tick_count % 50 == 0 {
println!(
"Waiting for sync... tick {} ({:.1}s elapsed)",
tick_count,
start.elapsed().as_secs_f32()
);
}
// Check condition
if check_fn(app1.world_mut(), app2.world_mut()) {
println!(
"Sync completed after {} ticks ({:.3}s)",
tick_count,
start.elapsed().as_secs_f32()
);
return Ok(());
}
// Small delay to avoid spinning
tokio::time::sleep(Duration::from_millis(16)).await;
}
println!("Sync timeout after {} ticks", tick_count);
anyhow::bail!("Sync timeout after {:?}. Condition not met.", timeout)
}
/// Initialize a single iroh-gossip node
async fn init_gossip_node(
topic_id: TopicId,
bootstrap_addrs: Vec<iroh::EndpointAddr>,
) -> Result<(Endpoint, Gossip, Router, GossipBridge)> {
println!(" Creating endpoint with mDNS discovery...");
let endpoint = Endpoint::builder()
.discovery(iroh::discovery::mdns::MdnsDiscovery::builder())
.bind()
.await?;
let endpoint_id = endpoint.addr().id;
println!(" Endpoint created: {}", endpoint_id);
// Convert 32-byte endpoint ID to 16-byte UUID
let id_bytes = endpoint_id.as_bytes();
let mut uuid_bytes = [0u8; 16];
uuid_bytes.copy_from_slice(&id_bytes[..16]);
let node_id = Uuid::from_bytes(uuid_bytes);
println!(" Spawning gossip protocol...");
let gossip = Gossip::builder().spawn(endpoint.clone());
println!(" Setting up router...");
let router = Router::builder(endpoint.clone())
.accept(iroh_gossip::ALPN, gossip.clone())
.spawn();
let bootstrap_count = bootstrap_addrs.len();
let has_bootstrap_peers = !bootstrap_addrs.is_empty();
let bootstrap_ids: Vec<_> = bootstrap_addrs.iter().map(|a| a.id).collect();
if has_bootstrap_peers {
let static_provider = iroh::discovery::static_provider::StaticProvider::default();
for addr in &bootstrap_addrs {
static_provider.add_endpoint_info(addr.clone());
}
endpoint.discovery().add(static_provider);
println!(
" Added {} bootstrap peers to static discovery",
bootstrap_count
);
println!(" Connecting to bootstrap peers...");
for addr in &bootstrap_addrs {
match endpoint.connect(addr.clone(), iroh_gossip::ALPN).await {
Ok(_conn) => println!(" ✓ Connected to bootstrap peer: {}", addr.id),
Err(e) => {
println!(" ✗ Failed to connect to bootstrap peer {}: {}", addr.id, e)
}
}
}
}
println!(
" Subscribing to topic with {} bootstrap peers...",
bootstrap_count
);
let subscribe_handle = gossip.subscribe(topic_id, bootstrap_ids).await?;
println!(" Splitting sender/receiver...");
let (sender, mut receiver) = subscribe_handle.split();
if has_bootstrap_peers {
println!(" Waiting for join to complete (with timeout)...");
match tokio::time::timeout(Duration::from_secs(3), receiver.joined()).await {
Ok(Ok(())) => println!(" Join completed!"),
Ok(Err(e)) => println!(" Join error: {}", e),
Err(_) => {
println!(" Join timeout - proceeding anyway (mDNS may still connect later)")
}
}
} else {
println!(" No bootstrap peers - skipping join wait (first node in swarm)");
}
let bridge = GossipBridge::new(node_id);
println!(" Spawning bridge tasks...");
spawn_gossip_bridge_tasks(sender, receiver, bridge.clone());
println!(" Node initialization complete");
Ok((endpoint, gossip, router, bridge))
}
/// Setup a pair of iroh-gossip nodes connected to the same topic
pub async fn setup_gossip_pair() -> Result<(
Endpoint,
Endpoint,
Router,
Router,
GossipBridge,
GossipBridge,
)> {
let topic_id = TopicId::from_bytes([42; 32]);
println!("Using topic ID: {:?}", topic_id);
println!("Initializing node 1...");
let (ep1, _gossip1, router1, bridge1) = init_gossip_node(topic_id, vec![]).await?;
println!("Node 1 initialized with ID: {}", ep1.addr().id);
let node1_addr = ep1.addr().clone();
println!("Node 1 full address: {:?}", node1_addr);
println!("Initializing node 2 with bootstrap peer: {}", node1_addr.id);
let (ep2, _gossip2, router2, bridge2) =
init_gossip_node(topic_id, vec![node1_addr]).await?;
println!("Node 2 initialized with ID: {}", ep2.addr().id);
println!("Waiting for mDNS/gossip peer discovery...");
tokio::time::sleep(Duration::from_secs(2)).await;
println!("Peer discovery wait complete");
Ok((ep1, ep2, router1, router2, bridge1, bridge2))
}
/// Spawn background tasks to forward messages between iroh-gossip and GossipBridge
fn spawn_gossip_bridge_tasks(
sender: GossipSender,
mut receiver: GossipReceiver,
bridge: GossipBridge,
) {
let node_id = bridge.node_id();
// Task 1: Forward from bridge.outgoing → gossip sender
let bridge_out = bridge.clone();
tokio::spawn(async move {
let mut msg_count = 0;
loop {
if let Some(versioned_msg) = bridge_out.try_recv_outgoing() {
msg_count += 1;
println!(
"[Node {}] Sending message #{} via gossip",
node_id, msg_count
);
match bincode::serialize(&versioned_msg) {
Ok(bytes) => {
if let Err(e) = sender.broadcast(Bytes::from(bytes)).await {
eprintln!("[Node {}] Failed to broadcast message: {}", node_id, e);
} else {
println!(
"[Node {}] Message #{} broadcasted successfully",
node_id, msg_count
);
}
}
Err(e) => eprintln!(
"[Node {}] Failed to serialize message for broadcast: {}",
node_id, e
),
}
}
tokio::time::sleep(Duration::from_millis(10)).await;
}
});
// Task 2: Forward from gossip receiver → bridge.incoming
let bridge_in = bridge.clone();
tokio::spawn(async move {
let mut msg_count = 0;
println!("[Node {}] Gossip receiver task started", node_id);
loop {
match tokio::time::timeout(Duration::from_millis(100), receiver.next()).await {
Ok(Some(Ok(event))) => {
println!(
"[Node {}] Received gossip event: {:?}",
node_id,
std::mem::discriminant(&event)
);
if let iroh_gossip::api::Event::Received(msg) = event {
msg_count += 1;
println!(
"[Node {}] Received message #{} from gossip",
node_id, msg_count
);
match bincode::deserialize::<VersionedMessage>(&msg.content) {
Ok(versioned_msg) => {
if let Err(e) = bridge_in.push_incoming(versioned_msg) {
eprintln!(
"[Node {}] Failed to push to bridge incoming: {}",
node_id, e
);
} else {
println!(
"[Node {}] Message #{} pushed to bridge incoming",
node_id, msg_count
);
}
}
Err(e) => eprintln!(
"[Node {}] Failed to deserialize gossip message: {}",
node_id, e
),
}
}
}
Ok(Some(Err(e))) => {
eprintln!("[Node {}] Gossip receiver error: {}", node_id, e)
}
Ok(None) => {
println!("[Node {}] Gossip stream ended", node_id);
break;
}
Err(_) => {
// Timeout, no message available
}
}
}
});
}
}
// ============================================================================
// Integration Tests
// ============================================================================
/// Test: Basic cube spawn and sync (Node A spawns → Node B receives)
#[tokio::test(flavor = "multi_thread")]
async fn test_cube_spawn_and_sync() -> Result<()> {
use test_utils::*;
println!("=== Starting test_cube_spawn_and_sync ===");
// Setup contexts
println!("Creating test contexts...");
let ctx1 = TestContext::new();
let ctx2 = TestContext::new();
// Setup gossip networking
println!("Setting up gossip pair...");
let (ep1, ep2, router1, router2, bridge1, bridge2) = setup_gossip_pair().await?;
let node1_id = bridge1.node_id();
let node2_id = bridge2.node_id();
// Create headless apps
println!("Creating Bevy apps...");
let mut app1 = create_test_app(node1_id, ctx1.db_path(), bridge1);
let mut app2 = create_test_app(node2_id, ctx2.db_path(), bridge2);
println!("Apps created successfully");
println!("Node 1 ID: {}", node1_id);
println!("Node 2 ID: {}", node2_id);
// Node 1 spawns cube
let entity_id = Uuid::new_v4();
println!("Spawning cube {} on node 1", entity_id);
let spawned_entity = app1
.world_mut()
.spawn((
CubeMarker,
Transform::from_xyz(1.0, 2.0, 3.0),
GlobalTransform::default(),
NetworkedEntity::with_id(entity_id, node1_id),
NetworkedTransform,
Persisted::with_id(entity_id),
Synced,
))
.id();
// IMPORTANT: Trigger change detection for persistence
// Bevy only marks components as "changed" when mutated, not on spawn
{
let world = app1.world_mut();
if let Ok(mut entity_mut) = world.get_entity_mut(spawned_entity) {
if let Some(mut persisted) = entity_mut.get_mut::<Persisted>() {
// Dereferencing the mutable borrow triggers change detection
let _ = &mut *persisted;
}
}
}
println!("Cube spawned, triggered persistence");
println!("Cube spawned, starting sync wait...");
// Wait for cube to sync to node 2
wait_for_sync(&mut app1, &mut app2, Duration::from_secs(10), |_, w2| {
count_entities_with_id(w2, entity_id) > 0
})
.await?;
println!("Cube synced to node 2!");
// Verify cube exists on node 2 with correct Transform
let cube_transform = app2
.world_mut()
.query_filtered::<&Transform, With<CubeMarker>>()
.single(app2.world())
.expect("Cube should exist on node 2");
assert!(
(cube_transform.translation.x - 1.0).abs() < 0.01,
"Transform.x should be 1.0"
);
assert!(
(cube_transform.translation.y - 2.0).abs() < 0.01,
"Transform.y should be 2.0"
);
assert!(
(cube_transform.translation.z - 3.0).abs() < 0.01,
"Transform.z should be 3.0"
);
println!("Transform verified!");
// Cleanup
router1.shutdown().await?;
router2.shutdown().await?;
drop(ep1);
drop(ep2);
println!("=== Test completed successfully ===");
Ok(())
}

View File

@@ -1 +0,0 @@
/target

View File

@@ -1,43 +0,0 @@
[package]
name = "client"
version = "0.1.0"
edition.workspace = true
[[bin]]
name = "client"
path = "src/main.rs"
[dependencies]
# Bevy
bevy = { version = "0.17", default-features = false, features = [
"bevy_winit",
"bevy_render",
"bevy_core_pipeline",
"bevy_sprite",
"bevy_ui",
"bevy_text",
"png",
"x11",
] }
# Iroh - P2P networking and gossip
iroh = { workspace = true }
iroh-gossip = { workspace = true }
# Async runtime
tokio = { version = "1", features = ["full"] }
# Serialization
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
# Error handling
thiserror = "2.0"
anyhow = "1.0"
# Logging
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
# Local dependencies
lib = { path = "../lib" }

View File

@@ -1,14 +0,0 @@
pub fn add(left: u64, right: u64) -> u64 {
left + right
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn it_works() {
let result = add(2, 2);
assert_eq!(result, 4);
}
}

View File

@@ -1,24 +0,0 @@
use bevy::prelude::*;
use tracing::info;
fn main() {
tracing_subscriber::fmt()
.with_env_filter(tracing_subscriber::EnvFilter::from_default_env())
.init();
// Start Bevy app
App::new()
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.add_systems(Update, sync_system)
.run();
}
fn setup(mut commands: Commands) {
commands.spawn(Camera2d);
info!("Client started");
}
fn sync_system() {
// TODO: Implement gossip sync for client
}

View File

@@ -1,58 +0,0 @@
[package]
name = "server"
version = "0.1.0"
edition.workspace = true
[[bin]]
name = "server"
path = "src/main.rs"
[dependencies]
# Bevy (headless)
bevy = { version = "0.17", default-features = false, features = [
"bevy_state",
] }
# Iroh - P2P networking and gossip
iroh = { workspace = true }
iroh-gossip = { workspace = true }
# Async runtime
tokio = { version = "1", features = ["full"] }
tokio-stream = "0.1"
futures-lite = "2.5"
# Database
rusqlite = { version = "0.37.0", features = ["bundled", "column_decltype", "load_extension"] }
# Serialization
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
toml = "0.9"
# Logging
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
# Error handling
thiserror = "2.0"
anyhow = "1.0"
# Date/time
chrono = { version = "0.4", features = ["serde"] }
# Random number generation
rand = "0.8"
# ML/AI - Candle for inference (using newer versions with better compatibility)
candle-core = "0.8"
candle-nn = "0.8"
candle-transformers = "0.8"
tokenizers = "0.20"
hf-hub = "0.3"
# Synchronization
parking_lot = { workspace = true }
# Local dependencies
lib = { path = "../lib" }

View File

@@ -1 +0,0 @@
// Asset loading and management will go here

View File

@@ -1,15 +0,0 @@
use std::sync::Arc;
use bevy::prelude::*;
use parking_lot::Mutex;
use rusqlite::Connection;
use crate::config::Config;
/// Bevy resource wrapping application configuration
#[derive(Resource)]
pub struct AppConfig(pub Config);
/// Bevy resource wrapping database connection
#[derive(Resource)]
pub struct Database(pub Arc<Mutex<Connection>>);

View File

@@ -1,95 +0,0 @@
use std::sync::Arc;
use bevy::prelude::*;
use iroh::{
Endpoint,
protocol::Router,
};
use iroh_gossip::{
api::{
GossipReceiver,
GossipSender,
},
net::Gossip,
proto::TopicId,
};
use parking_lot::Mutex;
use serde::{
Deserialize,
Serialize,
};
/// Message envelope for gossip sync
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SyncMessage {
/// The actual message from iMessage
pub message: lib::Message,
/// Timestamp when this was published to gossip
pub sync_timestamp: i64,
/// ID of the node that published this
pub publisher_node_id: String,
}
/// Bevy resource wrapping the gossip handle
#[derive(Resource, Clone)]
pub struct IrohGossipHandle {
pub gossip: Gossip,
}
/// Bevy resource wrapping the gossip sender
#[derive(Resource)]
pub struct IrohGossipSender {
pub sender: Arc<Mutex<GossipSender>>,
}
/// Bevy resource wrapping the gossip receiver
#[derive(Resource)]
pub struct IrohGossipReceiver {
pub receiver: Arc<Mutex<GossipReceiver>>,
}
/// Bevy resource with Iroh router
#[derive(Resource)]
pub struct IrohRouter {
pub router: Router,
}
/// Bevy resource with Iroh endpoint
#[derive(Resource, Clone)]
pub struct IrohEndpoint {
pub endpoint: Endpoint,
pub node_id: String,
}
/// Bevy resource for gossip topic ID
#[derive(Resource)]
pub struct GossipTopic(pub TopicId);
/// Bevy resource for tracking gossip initialization task
#[derive(Resource)]
pub struct GossipInitTask(
pub bevy::tasks::Task<Option<(Endpoint, Gossip, Router, GossipSender, GossipReceiver)>>,
);
/// Bevy message: a new message that needs to be published to gossip
#[derive(Message, Clone, Debug)]
pub struct PublishMessageEvent {
pub message: lib::Message,
}
/// Bevy message: a message received from gossip that needs to be saved to
/// SQLite
#[derive(Message, Clone, Debug)]
pub struct GossipMessageReceived {
pub sync_message: SyncMessage,
}
/// Helper to serialize a sync message
pub fn serialize_sync_message(msg: &SyncMessage) -> anyhow::Result<Vec<u8>> {
Ok(serde_json::to_vec(msg)?)
}
/// Helper to deserialize a sync message
pub fn deserialize_sync_message(data: &[u8]) -> anyhow::Result<SyncMessage> {
Ok(serde_json::from_slice(data)?)
}

View File

@@ -1,5 +0,0 @@
pub mod database;
pub mod gossip;
pub use database::*;
pub use gossip::*;

View File

@@ -1,89 +0,0 @@
use std::{
fs,
path::Path,
};
use anyhow::{
Context,
Result,
};
use serde::{
Deserialize,
Serialize,
};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Config {
pub database: DatabaseConfig,
pub services: ServicesConfig,
pub models: ModelsConfig,
pub tailscale: TailscaleConfig,
pub grpc: GrpcConfig,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DatabaseConfig {
pub path: String,
pub chat_db_path: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ServicesConfig {
pub poll_interval_ms: u64,
pub training_set_sample_rate: f64,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ModelsConfig {
pub embedding_model: String,
pub emotion_model: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TailscaleConfig {
pub hostname: String,
pub state_dir: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GrpcConfig {
pub port: u16,
}
impl Config {
pub fn from_file<P: AsRef<Path>>(path: P) -> Result<Self> {
let content = fs::read_to_string(path.as_ref())
.context(format!("Failed to read config file: {:?}", path.as_ref()))?;
let config: Config = toml::from_str(&content).context("Failed to parse config file")?;
Ok(config)
}
pub fn default_config() -> Self {
Self {
database: DatabaseConfig {
path: "./us.db".to_string(),
chat_db_path: "./crates/lib/chat.db".to_string(),
},
services: ServicesConfig {
poll_interval_ms: 1000,
training_set_sample_rate: 0.05,
},
models: ModelsConfig {
embedding_model: "Qwen/Qwen3-Embedding-0.6B".to_string(),
emotion_model: "SamLowe/roberta-base-go_emotions".to_string(),
},
tailscale: TailscaleConfig {
hostname: "lonni-daemon".to_string(),
state_dir: "./tailscale-state".to_string(),
},
grpc: GrpcConfig { port: 50051 },
}
}
pub fn save<P: AsRef<Path>>(&self, path: P) -> Result<()> {
let content = toml::to_string_pretty(self).context("Failed to serialize config")?;
fs::write(path.as_ref(), content)
.context(format!("Failed to write config file: {:?}", path.as_ref()))?;
Ok(())
}
}

View File

@@ -1,5 +0,0 @@
pub mod operations;
pub mod schema;
pub use operations::*;
pub use schema::*;

View File

@@ -1,339 +0,0 @@
use chrono::{
TimeZone,
Utc,
};
use rusqlite::{
Connection,
OptionalExtension,
Result,
Row,
params,
};
use crate::{
db::schema::{
deserialize_embedding,
serialize_embedding,
},
models::*,
};
/// Insert a new message into the database
pub fn insert_message(conn: &Connection, msg: &lib::Message) -> Result<i64> {
let timestamp = msg.date.map(|dt| dt.timestamp());
let created_at = Utc::now().timestamp();
conn.execute(
"INSERT INTO messages (chat_db_rowid, text, timestamp, is_from_me, created_at)
VALUES (?1, ?2, ?3, ?4, ?5)
ON CONFLICT(chat_db_rowid) DO NOTHING",
params![msg.rowid, msg.text, timestamp, msg.is_from_me, created_at],
)?;
Ok(conn.last_insert_rowid())
}
/// Get message ID by chat.db rowid
pub fn get_message_id_by_chat_rowid(conn: &Connection, chat_db_rowid: i64) -> Result<Option<i64>> {
conn.query_row(
"SELECT id FROM messages WHERE chat_db_rowid = ?1",
params![chat_db_rowid],
|row| row.get(0),
)
.optional()
}
/// Get message by ID
pub fn get_message(conn: &Connection, id: i64) -> Result<Message> {
conn.query_row(
"SELECT id, chat_db_rowid, text, timestamp, is_from_me, created_at FROM messages WHERE id = ?1",
params![id],
map_message_row,
)
}
fn map_message_row(row: &Row) -> Result<Message> {
let timestamp: Option<i64> = row.get(3)?;
let created_at: i64 = row.get(5)?;
Ok(Message {
id: row.get(0)?,
chat_db_rowid: row.get(1)?,
text: row.get(2)?,
timestamp: timestamp.map(|ts| Utc.timestamp_opt(ts, 0).unwrap()),
is_from_me: row.get(4)?,
created_at: Utc.timestamp_opt(created_at, 0).unwrap(),
})
}
/// Insert message embedding
pub fn insert_message_embedding(
conn: &Connection,
message_id: i64,
embedding: &[f32],
model_name: &str,
) -> Result<i64> {
let embedding_bytes = serialize_embedding(embedding);
let created_at = Utc::now().timestamp();
conn.execute(
"INSERT INTO message_embeddings (message_id, embedding, model_name, created_at)
VALUES (?1, ?2, ?3, ?4)",
params![message_id, embedding_bytes, model_name, created_at],
)?;
Ok(conn.last_insert_rowid())
}
/// Get message embedding
pub fn get_message_embedding(
conn: &Connection,
message_id: i64,
) -> Result<Option<MessageEmbedding>> {
conn.query_row(
"SELECT id, message_id, embedding, model_name, created_at
FROM message_embeddings WHERE message_id = ?1",
params![message_id],
|row| {
let embedding_bytes: Vec<u8> = row.get(2)?;
let created_at: i64 = row.get(4)?;
Ok(MessageEmbedding {
id: row.get(0)?,
message_id: row.get(1)?,
embedding: deserialize_embedding(&embedding_bytes),
model_name: row.get(3)?,
created_at: Utc.timestamp_opt(created_at, 0).unwrap(),
})
},
)
.optional()
}
/// Insert or get word embedding
pub fn insert_word_embedding(
conn: &Connection,
word: &str,
embedding: &[f32],
model_name: &str,
) -> Result<i64> {
let embedding_bytes = serialize_embedding(embedding);
let created_at = Utc::now().timestamp();
conn.execute(
"INSERT INTO word_embeddings (word, embedding, model_name, created_at)
VALUES (?1, ?2, ?3, ?4)
ON CONFLICT(word) DO NOTHING",
params![word, embedding_bytes, model_name, created_at],
)?;
Ok(conn.last_insert_rowid())
}
/// Get word embedding
pub fn get_word_embedding(conn: &Connection, word: &str) -> Result<Option<WordEmbedding>> {
conn.query_row(
"SELECT id, word, embedding, model_name, created_at
FROM word_embeddings WHERE word = ?1",
params![word],
|row| {
let embedding_bytes: Vec<u8> = row.get(2)?;
let created_at: i64 = row.get(4)?;
Ok(WordEmbedding {
id: row.get(0)?,
word: row.get(1)?,
embedding: deserialize_embedding(&embedding_bytes),
model_name: row.get(3)?,
created_at: Utc.timestamp_opt(created_at, 0).unwrap(),
})
},
)
.optional()
}
/// Insert emotion classification
pub fn insert_emotion(
conn: &Connection,
message_id: i64,
emotion: &str,
confidence: f64,
model_version: &str,
) -> Result<i64> {
let now = Utc::now().timestamp();
conn.execute(
"INSERT INTO emotions (message_id, emotion, confidence, model_version, created_at, updated_at)
VALUES (?1, ?2, ?3, ?4, ?5, ?6)",
params![message_id, emotion, confidence, model_version, now, now],
)?;
Ok(conn.last_insert_rowid())
}
/// Update emotion classification
pub fn update_emotion(
conn: &Connection,
message_id: i64,
emotion: &str,
confidence: f64,
) -> Result<()> {
let updated_at = Utc::now().timestamp();
conn.execute(
"UPDATE emotions SET emotion = ?1, confidence = ?2, updated_at = ?3
WHERE message_id = ?4",
params![emotion, confidence, updated_at, message_id],
)?;
Ok(())
}
/// Get emotion by message ID
pub fn get_emotion_by_message_id(conn: &Connection, message_id: i64) -> Result<Option<Emotion>> {
conn.query_row(
"SELECT id, message_id, emotion, confidence, model_version, created_at, updated_at
FROM emotions WHERE message_id = ?1",
params![message_id],
map_emotion_row,
)
.optional()
}
/// Get emotion by ID
pub fn get_emotion_by_id(conn: &Connection, id: i64) -> Result<Option<Emotion>> {
conn.query_row(
"SELECT id, message_id, emotion, confidence, model_version, created_at, updated_at
FROM emotions WHERE id = ?1",
params![id],
map_emotion_row,
)
.optional()
}
/// List all emotions with optional filters
pub fn list_emotions(
conn: &Connection,
emotion_filter: Option<&str>,
min_confidence: Option<f64>,
limit: Option<i32>,
offset: Option<i32>,
) -> Result<Vec<Emotion>> {
let mut query = String::from(
"SELECT id, message_id, emotion, confidence, model_version, created_at, updated_at
FROM emotions WHERE 1=1",
);
if emotion_filter.is_some() {
query.push_str(" AND emotion = ?1");
}
if min_confidence.is_some() {
query.push_str(" AND confidence >= ?2");
}
query.push_str(" ORDER BY created_at DESC");
if limit.is_some() {
query.push_str(" LIMIT ?3");
}
if offset.is_some() {
query.push_str(" OFFSET ?4");
}
let mut stmt = conn.prepare(&query)?;
let emotions = stmt
.query_map(
params![
emotion_filter.unwrap_or(""),
min_confidence.unwrap_or(0.0),
limit.unwrap_or(1000),
offset.unwrap_or(0),
],
map_emotion_row,
)?
.collect::<Result<Vec<_>>>()?;
Ok(emotions)
}
/// Delete emotion by ID
pub fn delete_emotion(conn: &Connection, id: i64) -> Result<()> {
conn.execute("DELETE FROM emotions WHERE id = ?1", params![id])?;
Ok(())
}
/// Count total emotions
pub fn count_emotions(conn: &Connection) -> Result<i32> {
conn.query_row("SELECT COUNT(*) FROM emotions", [], |row| row.get(0))
}
fn map_emotion_row(row: &Row) -> Result<Emotion> {
let created_at: i64 = row.get(5)?;
let updated_at: i64 = row.get(6)?;
Ok(Emotion {
id: row.get(0)?,
message_id: row.get(1)?,
emotion: row.get(2)?,
confidence: row.get(3)?,
model_version: row.get(4)?,
created_at: Utc.timestamp_opt(created_at, 0).unwrap(),
updated_at: Utc.timestamp_opt(updated_at, 0).unwrap(),
})
}
/// Insert emotion training sample
pub fn insert_training_sample(
conn: &Connection,
message_id: Option<i64>,
text: &str,
expected_emotion: &str,
) -> Result<i64> {
let now = Utc::now().timestamp();
conn.execute(
"INSERT INTO emotions_training_set (message_id, text, expected_emotion, created_at, updated_at)
VALUES (?1, ?2, ?3, ?4, ?5)",
params![message_id, text, expected_emotion, now, now],
)?;
Ok(conn.last_insert_rowid())
}
/// Get state value from daemon_state table
pub fn get_state(conn: &Connection, key: &str) -> Result<Option<String>> {
conn.query_row(
"SELECT value FROM daemon_state WHERE key = ?1",
params![key],
|row| row.get(0),
)
.optional()
}
/// Set state value in daemon_state table
pub fn set_state(conn: &Connection, key: &str, value: &str) -> Result<()> {
let updated_at = Utc::now().timestamp();
conn.execute(
"INSERT INTO daemon_state (key, value, updated_at)
VALUES (?1, ?2, ?3)
ON CONFLICT(key) DO UPDATE SET value = ?2, updated_at = ?3",
params![key, value, updated_at],
)?;
Ok(())
}
/// Get last processed chat.db rowid from database or return 0
pub fn get_last_processed_rowid(conn: &Connection) -> Result<i64> {
Ok(get_state(conn, "last_processed_rowid")?
.and_then(|s| s.parse().ok())
.unwrap_or(0))
}
/// Save last processed chat.db rowid to database
pub fn save_last_processed_rowid(conn: &Connection, rowid: i64) -> Result<()> {
set_state(conn, "last_processed_rowid", &rowid.to_string())
}

View File

@@ -1,210 +0,0 @@
use rusqlite::{
Connection,
Result,
};
use tracing::info;
pub fn initialize_database(conn: &Connection) -> Result<()> {
info!("Initializing database schema");
// Load sqlite-vec extension (macOS only)
let vec_path = "./extensions/vec0.dylib";
// Try to load the vector extension (non-fatal if it fails for now)
match unsafe { conn.load_extension_enable() } {
| Ok(_) => {
match unsafe { conn.load_extension(vec_path, None::<&str>) } {
| Ok(_) => info!("Loaded sqlite-vec extension"),
| Err(e) => info!(
"Could not load sqlite-vec extension: {}. Vector operations will not be available.",
e
),
}
let _ = unsafe { conn.load_extension_disable() };
},
| Err(e) => info!("Extension loading not enabled: {}", e),
}
// Create messages table
conn.execute(
"CREATE TABLE IF NOT EXISTS messages (
id INTEGER PRIMARY KEY AUTOINCREMENT,
chat_db_rowid INTEGER UNIQUE NOT NULL,
text TEXT,
timestamp INTEGER,
is_from_me BOOLEAN NOT NULL,
created_at INTEGER NOT NULL
)",
[],
)?;
// Create index on chat_db_rowid for fast lookups
conn.execute(
"CREATE INDEX IF NOT EXISTS idx_messages_chat_db_rowid ON messages(chat_db_rowid)",
[],
)?;
// Create message_embeddings table
conn.execute(
"CREATE TABLE IF NOT EXISTS message_embeddings (
id INTEGER PRIMARY KEY AUTOINCREMENT,
message_id INTEGER NOT NULL,
embedding BLOB NOT NULL,
model_name TEXT NOT NULL,
created_at INTEGER NOT NULL,
FOREIGN KEY (message_id) REFERENCES messages(id) ON DELETE CASCADE
)",
[],
)?;
// Create index on message_id
conn.execute(
"CREATE INDEX IF NOT EXISTS idx_message_embeddings_message_id ON message_embeddings(message_id)",
[],
)?;
// Create word_embeddings table
conn.execute(
"CREATE TABLE IF NOT EXISTS word_embeddings (
id INTEGER PRIMARY KEY AUTOINCREMENT,
word TEXT UNIQUE NOT NULL,
embedding BLOB NOT NULL,
model_name TEXT NOT NULL,
created_at INTEGER NOT NULL
)",
[],
)?;
// Create index on word
conn.execute(
"CREATE INDEX IF NOT EXISTS idx_word_embeddings_word ON word_embeddings(word)",
[],
)?;
// Create emotions table
conn.execute(
"CREATE TABLE IF NOT EXISTS emotions (
id INTEGER PRIMARY KEY AUTOINCREMENT,
message_id INTEGER NOT NULL,
emotion TEXT NOT NULL,
confidence REAL NOT NULL,
model_version TEXT NOT NULL,
created_at INTEGER NOT NULL,
updated_at INTEGER NOT NULL,
FOREIGN KEY (message_id) REFERENCES messages(id) ON DELETE CASCADE
)",
[],
)?;
// Create indexes for emotions
conn.execute(
"CREATE INDEX IF NOT EXISTS idx_emotions_message_id ON emotions(message_id)",
[],
)?;
conn.execute(
"CREATE INDEX IF NOT EXISTS idx_emotions_emotion ON emotions(emotion)",
[],
)?;
// Create emotions_training_set table
conn.execute(
"CREATE TABLE IF NOT EXISTS emotions_training_set (
id INTEGER PRIMARY KEY AUTOINCREMENT,
message_id INTEGER,
text TEXT NOT NULL,
expected_emotion TEXT NOT NULL,
actual_emotion TEXT,
confidence REAL,
is_validated BOOLEAN NOT NULL DEFAULT 0,
notes TEXT,
created_at INTEGER NOT NULL,
updated_at INTEGER NOT NULL,
FOREIGN KEY (message_id) REFERENCES messages(id) ON DELETE SET NULL
)",
[],
)?;
// Create index on emotions_training_set
conn.execute(
"CREATE INDEX IF NOT EXISTS idx_emotions_training_set_message_id ON emotions_training_set(message_id)",
[],
)?;
conn.execute(
"CREATE INDEX IF NOT EXISTS idx_emotions_training_set_validated ON emotions_training_set(is_validated)",
[],
)?;
// Create state table for daemon state persistence
conn.execute(
"CREATE TABLE IF NOT EXISTS daemon_state (
key TEXT PRIMARY KEY,
value TEXT NOT NULL,
updated_at INTEGER NOT NULL
)",
[],
)?;
// Create models table for storing ML model files
conn.execute(
"CREATE TABLE IF NOT EXISTS models (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT UNIQUE NOT NULL,
model_type TEXT NOT NULL,
version TEXT NOT NULL,
file_data BLOB NOT NULL,
metadata TEXT,
created_at INTEGER NOT NULL,
updated_at INTEGER NOT NULL
)",
[],
)?;
// Create index on model name and type
conn.execute(
"CREATE INDEX IF NOT EXISTS idx_models_name ON models(name)",
[],
)?;
conn.execute(
"CREATE INDEX IF NOT EXISTS idx_models_type ON models(model_type)",
[],
)?;
info!("Database schema initialized successfully");
Ok(())
}
/// Helper function to serialize f32 vector to bytes for storage
pub fn serialize_embedding(embedding: &[f32]) -> Vec<u8> {
embedding.iter().flat_map(|f| f.to_le_bytes()).collect()
}
/// Helper function to deserialize bytes back to f32 vector
pub fn deserialize_embedding(bytes: &[u8]) -> Vec<f32> {
bytes
.chunks_exact(4)
.map(|chunk| {
let array: [u8; 4] = chunk.try_into().unwrap();
f32::from_le_bytes(array)
})
.collect()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_embedding_serialization() {
let original = vec![1.0f32, 2.5, -3.7, 0.0, 100.5];
let serialized = serialize_embedding(&original);
let deserialized = deserialize_embedding(&serialized);
assert_eq!(original.len(), deserialized.len());
for (a, b) in original.iter().zip(deserialized.iter()) {
assert!((a - b).abs() < 1e-6);
}
}
}

View File

@@ -1 +0,0 @@
// Entity builders and spawners will go here

View File

@@ -1,49 +0,0 @@
use anyhow::Result;
use iroh::{
Endpoint,
protocol::Router,
};
use iroh_gossip::{
api::{
GossipReceiver,
GossipSender,
},
net::Gossip,
proto::TopicId,
};
/// Initialize Iroh endpoint and gossip for the given topic
pub async fn init_iroh_gossip(
topic_id: TopicId,
) -> Result<(Endpoint, Gossip, Router, GossipSender, GossipReceiver)> {
println!("Initializing Iroh endpoint...");
// Create the Iroh endpoint
let endpoint = Endpoint::bind().await?;
println!("Endpoint created");
// Build the gossip protocol
println!("Building gossip protocol...");
let gossip = Gossip::builder().spawn(endpoint.clone());
// Setup the router to handle incoming connections
println!("Setting up router...");
let router = Router::builder(endpoint.clone())
.accept(iroh_gossip::ALPN, gossip.clone())
.spawn();
// Subscribe to the topic (no bootstrap peers for now)
println!("Subscribing to topic: {:?}", topic_id);
let bootstrap_peers = vec![];
let subscribe_handle = gossip.subscribe(topic_id, bootstrap_peers).await?;
// Split into sender and receiver
let (sender, mut receiver) = subscribe_handle.split();
// Wait for join to complete
println!("Waiting for gossip join...");
receiver.joined().await?;
println!("Gossip initialized successfully");
Ok((endpoint, gossip, router, sender, receiver))
}

View File

@@ -1,99 +0,0 @@
mod assets;
mod components;
mod config;
mod db;
mod entities;
mod iroh_sync;
mod models;
mod services;
mod systems;
use std::{
path::Path,
sync::Arc,
};
use anyhow::{
Context,
Result,
};
use bevy::prelude::*;
// Import components and systems
use components::*;
use config::Config;
use iroh_gossip::proto::TopicId;
// Re-export init function
pub use iroh_sync::init_iroh_gossip;
use parking_lot::Mutex;
use rusqlite::Connection;
use systems::*;
fn main() {
println!("Starting server");
// Load configuration and initialize database
let (config, us_db) = match initialize_app() {
| Ok(data) => data,
| Err(e) => {
eprintln!("Failed to initialize app: {}", e);
return;
},
};
// Create a topic ID for gossip (use a fixed topic for now)
let mut topic_bytes = [0u8; 32];
topic_bytes[..10].copy_from_slice(b"us-sync-v1");
let topic_id = TopicId::from_bytes(topic_bytes);
// Start Bevy app (headless)
App::new()
.add_plugins(MinimalPlugins)
.add_message::<PublishMessageEvent>()
.add_message::<GossipMessageReceived>()
.insert_resource(AppConfig(config))
.insert_resource(Database(us_db))
.insert_resource(GossipTopic(topic_id))
.add_systems(Startup, (setup_database, setup_gossip))
.add_systems(
Update,
(
poll_gossip_init,
poll_chat_db,
detect_new_messages,
publish_to_gossip,
receive_from_gossip,
save_gossip_messages,
),
)
.run();
}
/// Initialize configuration and database
fn initialize_app() -> Result<(Config, Arc<Mutex<Connection>>)> {
let config = if Path::new("config.toml").exists() {
println!("Loading config from config.toml");
Config::from_file("config.toml")?
} else {
println!("No config.toml found, using default configuration");
let config = Config::default_config();
config
.save("config.toml")
.context("Failed to save default config")?;
println!("Saved default configuration to config.toml");
config
};
println!("Configuration loaded");
println!(" Database: {}", config.database.path);
println!(" Chat DB: {}", config.database.chat_db_path);
// Initialize database
println!("Initializing database at {}", config.database.path);
let conn = Connection::open(&config.database.path).context("Failed to open database")?;
db::initialize_database(&conn).context("Failed to initialize database schema")?;
let us_db = Arc::new(Mutex::new(conn));
Ok((config, us_db))
}

View File

@@ -1,66 +0,0 @@
use chrono::{
DateTime,
Utc,
};
use serde::{
Deserialize,
Serialize,
};
/// Represents a message stored in our database
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Message {
pub id: i64,
pub chat_db_rowid: i64,
pub text: Option<String>,
pub timestamp: Option<DateTime<Utc>>,
pub is_from_me: bool,
pub created_at: DateTime<Utc>,
}
/// Represents a message embedding (full message vector)
#[derive(Debug, Clone)]
pub struct MessageEmbedding {
pub id: i64,
pub message_id: i64,
pub embedding: Vec<f32>,
pub model_name: String,
pub created_at: DateTime<Utc>,
}
/// Represents a word embedding
#[derive(Debug, Clone)]
pub struct WordEmbedding {
pub id: i64,
pub word: String,
pub embedding: Vec<f32>,
pub model_name: String,
pub created_at: DateTime<Utc>,
}
/// Represents an emotion classification for a message
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Emotion {
pub id: i64,
pub message_id: i64,
pub emotion: String,
pub confidence: f64,
pub model_version: String,
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
}
/// Represents an emotion training sample
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct EmotionTrainingSample {
pub id: i64,
pub message_id: Option<i64>,
pub text: String,
pub expected_emotion: String,
pub actual_emotion: Option<String>,
pub confidence: Option<f64>,
pub is_validated: bool,
pub notes: Option<String>,
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
}

View File

@@ -1,72 +0,0 @@
syntax = "proto3";
package emotions;
// Emotion classification for a message
message Emotion {
int64 id = 1;
int64 message_id = 2;
string emotion = 3;
double confidence = 4;
string model_version = 5;
int64 created_at = 6;
int64 updated_at = 7;
}
// Request to get a single emotion by message ID
message GetEmotionRequest {
int64 message_id = 1;
}
// Request to get multiple emotions with optional filters
message GetEmotionsRequest {
repeated int64 message_ids = 1;
optional string emotion_filter = 2;
optional double min_confidence = 3;
optional int32 limit = 4;
optional int32 offset = 5;
}
// Response containing multiple emotions
message EmotionsResponse {
repeated Emotion emotions = 1;
int32 total_count = 2;
}
// Request to update an emotion (for corrections/fine-tuning)
message UpdateEmotionRequest {
int64 message_id = 1;
string emotion = 2;
double confidence = 3;
optional string notes = 4;
}
// Request to delete an emotion
message DeleteEmotionRequest {
int64 id = 1;
}
// Generic response for mutations
message EmotionResponse {
bool success = 1;
string message = 2;
optional Emotion emotion = 3;
}
// Empty message for list all
message Empty {}
// The emotion service with full CRUD operations
service EmotionService {
// Read operations
rpc GetEmotion(GetEmotionRequest) returns (Emotion);
rpc GetEmotions(GetEmotionsRequest) returns (EmotionsResponse);
rpc ListAllEmotions(Empty) returns (EmotionsResponse);
// Update operations (for classification corrections and fine-tuning)
rpc UpdateEmotion(UpdateEmotionRequest) returns (EmotionResponse);
rpc BatchUpdateEmotions(stream UpdateEmotionRequest) returns (EmotionResponse);
// Delete operation
rpc DeleteEmotion(DeleteEmotionRequest) returns (EmotionResponse);
}

View File

@@ -1,143 +0,0 @@
use std::{
path::Path,
sync::Arc,
time::Duration,
};
use anyhow::{
Context,
Result,
};
use chrono::Utc;
use rusqlite::Connection;
use tokio::{
sync::{
Mutex,
mpsc,
},
time,
};
use tracing::{
debug,
error,
info,
warn,
};
use crate::db;
pub struct ChatPollerService {
chat_db_path: String,
us_db: Arc<Mutex<Connection>>,
tx: mpsc::Sender<lib::Message>,
poll_interval: Duration,
}
impl ChatPollerService {
pub fn new(
chat_db_path: String,
us_db: Arc<Mutex<Connection>>,
tx: mpsc::Sender<lib::Message>,
poll_interval_ms: u64,
) -> Self {
Self {
chat_db_path,
us_db,
tx,
poll_interval: Duration::from_millis(poll_interval_ms),
}
}
pub async fn run(&self) -> Result<()> {
info!("Starting chat poller service");
info!(
"Polling {} every {:?}",
self.chat_db_path, self.poll_interval
);
// Get last processed rowid from database
let us_db = self.us_db.lock().await;
let mut last_rowid =
db::get_last_processed_rowid(&us_db).context("Failed to get last processed rowid")?;
drop(us_db);
info!("Starting from rowid: {}", last_rowid);
let mut interval = time::interval(self.poll_interval);
loop {
interval.tick().await;
match self.poll_messages(last_rowid).await {
| Ok(new_messages) => {
if !new_messages.is_empty() {
info!("Found {} new messages", new_messages.len());
for msg in new_messages {
// Update last_rowid
if msg.rowid > last_rowid {
last_rowid = msg.rowid;
}
// Send message to processing pipeline
if let Err(e) = self.tx.send(msg).await {
error!("Failed to send message to processing pipeline: {}", e);
}
}
// Save state to database
let us_db = self.us_db.lock().await;
if let Err(e) = db::save_last_processed_rowid(&us_db, last_rowid) {
warn!("Failed to save last processed rowid: {}", e);
}
drop(us_db);
} else {
debug!("No new messages");
}
},
| Err(e) => {
error!("Error polling messages: {}", e);
},
}
}
}
async fn poll_messages(&self, last_rowid: i64) -> Result<Vec<lib::Message>> {
// Check if chat.db exists
if !Path::new(&self.chat_db_path).exists() {
return Err(anyhow::anyhow!(
"chat.db not found at {}",
self.chat_db_path
));
}
// Open chat.db (read-only)
let chat_db = lib::ChatDb::open(&self.chat_db_path).context("Failed to open chat.db")?;
// Get messages with rowid > last_rowid
// We'll use the existing get_our_messages but need to filter by rowid
// For now, let's get recent messages and filter in-memory
let start_date = Some(Utc::now() - chrono::Duration::days(7));
let end_date = Some(Utc::now());
let messages = chat_db
.get_our_messages(start_date, end_date)
.context("Failed to get messages from chat.db")?;
// Filter messages with rowid > last_rowid and ensure they're not duplicates
let new_messages: Vec<lib::Message> = messages
.into_iter()
.filter(|msg| msg.rowid > last_rowid)
.collect();
// Insert new messages into our database
let us_db = self.us_db.lock().await;
for msg in &new_messages {
if let Err(e) = db::insert_message(&us_db, msg) {
warn!("Failed to insert message {}: {}", msg.rowid, e);
}
}
Ok(new_messages)
}
}

View File

@@ -1,119 +0,0 @@
use std::sync::Arc;
use anyhow::Result;
use rusqlite::Connection;
use tokio::sync::{
Mutex,
mpsc,
};
use tracing::{
error,
info,
warn,
};
use crate::db;
/// Service responsible for generating embeddings for messages and words
pub struct EmbeddingService {
us_db: Arc<Mutex<Connection>>,
rx: mpsc::Receiver<lib::Message>,
model_name: String,
}
impl EmbeddingService {
pub fn new(
us_db: Arc<Mutex<Connection>>,
rx: mpsc::Receiver<lib::Message>,
model_name: String,
) -> Self {
Self {
us_db,
rx,
model_name,
}
}
pub async fn run(mut self) -> Result<()> {
info!("Starting embedding service with model: {}", self.model_name);
// TODO: Load the embedding model here
// For now, we'll create a placeholder implementation
info!("Loading embedding model...");
// let model = load_embedding_model(&self.model_name)?;
info!("Embedding model loaded (placeholder)");
while let Some(msg) = self.rx.recv().await {
if let Err(e) = self.process_message(&msg).await {
error!("Error processing message {}: {}", msg.rowid, e);
}
}
Ok(())
}
async fn process_message(&self, msg: &lib::Message) -> Result<()> {
// Get message ID from our database
let us_db = self.us_db.lock().await;
let message_id = match db::get_message_id_by_chat_rowid(&us_db, msg.rowid)? {
| Some(id) => id,
| None => {
warn!("Message {} not found in database, skipping", msg.rowid);
return Ok(());
},
};
// Check if embedding already exists
if db::get_message_embedding(&us_db, message_id)?.is_some() {
return Ok(());
}
// Skip if message has no text
let text = match &msg.text {
| Some(t) if !t.is_empty() => t,
| _ => return Ok(()),
};
drop(us_db);
// Generate embedding for the full message
// TODO: Replace with actual model inference
let message_embedding = self.generate_embedding(text)?;
// Store message embedding
let us_db = self.us_db.lock().await;
db::insert_message_embedding(&us_db, message_id, &message_embedding, &self.model_name)?;
// Tokenize and generate word embeddings
let words = self.tokenize(text);
for word in words {
// Check if word embedding exists
if db::get_word_embedding(&us_db, &word)?.is_none() {
// Generate embedding for word
let word_embedding = self.generate_embedding(&word)?;
db::insert_word_embedding(&us_db, &word, &word_embedding, &self.model_name)?;
}
}
drop(us_db);
info!("Generated embeddings for message {}", msg.rowid);
Ok(())
}
fn generate_embedding(&self, text: &str) -> Result<Vec<f32>> {
// TODO: Replace with actual model inference using Candle
// For now, return a placeholder embedding of dimension 1024
let embedding = vec![0.0f32; 1024];
Ok(embedding)
}
fn tokenize(&self, text: &str) -> Vec<String> {
// Simple word tokenization (split on whitespace and punctuation)
// TODO: Replace with proper tokenizer
text.split(|c: char| c.is_whitespace() || c.is_ascii_punctuation())
.filter(|s| !s.is_empty())
.map(|s| s.to_lowercase())
.collect()
}
}

View File

@@ -1,134 +0,0 @@
use std::sync::Arc;
use anyhow::Result;
use rusqlite::Connection;
use tokio::sync::{
Mutex,
mpsc,
};
use tracing::{
error,
info,
warn,
};
use crate::db;
/// Service responsible for classifying emotions in messages
pub struct EmotionService {
us_db: Arc<Mutex<Connection>>,
rx: mpsc::Receiver<lib::Message>,
model_version: String,
training_sample_rate: f64,
}
impl EmotionService {
pub fn new(
us_db: Arc<Mutex<Connection>>,
rx: mpsc::Receiver<lib::Message>,
model_version: String,
training_sample_rate: f64,
) -> Self {
Self {
us_db,
rx,
model_version,
training_sample_rate,
}
}
pub async fn run(mut self) -> Result<()> {
info!(
"Starting emotion classification service with model: {}",
self.model_version
);
info!(
"Training sample rate: {:.2}%",
self.training_sample_rate * 100.0
);
// TODO: Load the RoBERTa emotion classification model here
info!("Loading RoBERTa-base-go_emotions model...");
// let model = load_emotion_model(&self.model_version)?;
info!("Emotion model loaded (placeholder)");
while let Some(msg) = self.rx.recv().await {
if let Err(e) = self.process_message(&msg).await {
error!("Error processing message {}: {}", msg.rowid, e);
}
}
Ok(())
}
async fn process_message(&self, msg: &lib::Message) -> Result<()> {
// Get message ID from our database
let us_db = self.us_db.lock().await;
let message_id = match db::get_message_id_by_chat_rowid(&us_db, msg.rowid)? {
| Some(id) => id,
| None => {
warn!("Message {} not found in database, skipping", msg.rowid);
return Ok(());
},
};
// Check if emotion classification already exists
if db::get_emotion_by_message_id(&us_db, message_id)?.is_some() {
return Ok(());
}
// Skip if message has no text
let text = match &msg.text {
| Some(t) if !t.is_empty() => t,
| _ => return Ok(()),
};
drop(us_db);
// Classify emotion
// TODO: Replace with actual model inference
let (emotion, confidence) = self.classify_emotion(text)?;
// Store emotion classification
let us_db = self.us_db.lock().await;
db::insert_emotion(
&us_db,
message_id,
&emotion,
confidence,
&self.model_version,
)?;
// Randomly add to training set based on sample rate
if rand::random::<f64>() < self.training_sample_rate {
db::insert_training_sample(&us_db, Some(message_id), text, &emotion)?;
info!(
"Added message {} to training set (emotion: {})",
msg.rowid, emotion
);
}
drop(us_db);
info!(
"Classified message {} as {} (confidence: {:.2})",
msg.rowid, emotion, confidence
);
Ok(())
}
fn classify_emotion(&self, text: &str) -> Result<(String, f64)> {
// TODO: Replace with actual RoBERTa-base-go_emotions inference using Candle
// The model outputs probabilities for 28 emotions:
// admiration, amusement, anger, annoyance, approval, caring, confusion,
// curiosity, desire, disappointment, disapproval, disgust, embarrassment,
// excitement, fear, gratitude, grief, joy, love, nervousness, optimism,
// pride, realization, relief, remorse, sadness, surprise, neutral
// For now, return a placeholder
let emotion = "neutral".to_string();
let confidence = 0.85;
Ok((emotion, confidence))
}
}

View File

@@ -1,232 +0,0 @@
use crate::db;
use anyhow::Result;
use rusqlite::Connection;
use std::sync::Arc;
use tokio::sync::Mutex;
use tonic::{Request, Response, Status};
use tracing::{error, info};
// Include the generated protobuf code
pub mod emotions {
tonic::include_proto!("emotions");
}
use emotions::emotion_service_server::{EmotionService as EmotionServiceTrait, EmotionServiceServer};
use emotions::*;
pub struct GrpcServer {
us_db: Arc<Mutex<Connection>>,
address: String,
}
impl GrpcServer {
pub fn new(us_db: Arc<Mutex<Connection>>, address: String) -> Self {
Self { us_db, address }
}
pub async fn run(self) -> Result<()> {
let addr = self.address.parse()?;
info!("Starting gRPC server on {}", self.address);
let service = EmotionServiceImpl {
us_db: self.us_db.clone(),
};
tonic::transport::Server::builder()
.add_service(EmotionServiceServer::new(service))
.serve(addr)
.await?;
Ok(())
}
}
struct EmotionServiceImpl {
us_db: Arc<Mutex<Connection>>,
}
#[tonic::async_trait]
impl EmotionServiceTrait for EmotionServiceImpl {
async fn get_emotion(
&self,
request: Request<GetEmotionRequest>,
) -> Result<Response<Emotion>, Status> {
let req = request.into_inner();
let conn = self.us_db.lock().await;
match db::get_emotion_by_message_id(&conn, req.message_id) {
Ok(Some(emotion)) => Ok(Response::new(emotion_to_proto(emotion))),
Ok(None) => Err(Status::not_found(format!(
"Emotion not found for message_id: {}",
req.message_id
))),
Err(e) => {
error!("Database error: {}", e);
Err(Status::internal("Database error"))
}
}
}
async fn get_emotions(
&self,
request: Request<GetEmotionsRequest>,
) -> Result<Response<EmotionsResponse>, Status> {
let req = request.into_inner();
let conn = self.us_db.lock().await;
let emotion_filter = req.emotion_filter.as_deref();
let min_confidence = req.min_confidence;
let limit = req.limit.map(|l| l as i32);
let offset = req.offset.map(|o| o as i32);
match db::list_emotions(&conn, emotion_filter, min_confidence, limit, offset) {
Ok(emotions) => {
let total_count = db::count_emotions(&conn).unwrap_or(0);
Ok(Response::new(EmotionsResponse {
emotions: emotions.into_iter().map(emotion_to_proto).collect(),
total_count,
}))
}
Err(e) => {
error!("Database error: {}", e);
Err(Status::internal("Database error"))
}
}
}
async fn list_all_emotions(
&self,
_request: Request<Empty>,
) -> Result<Response<EmotionsResponse>, Status> {
let conn = self.us_db.lock().await;
match db::list_emotions(&conn, None, None, None, None) {
Ok(emotions) => {
let total_count = emotions.len() as i32;
Ok(Response::new(EmotionsResponse {
emotions: emotions.into_iter().map(emotion_to_proto).collect(),
total_count,
}))
}
Err(e) => {
error!("Database error: {}", e);
Err(Status::internal("Database error"))
}
}
}
async fn update_emotion(
&self,
request: Request<UpdateEmotionRequest>,
) -> Result<Response<EmotionResponse>, Status> {
let req = request.into_inner();
let conn = self.us_db.lock().await;
match db::update_emotion(&conn, req.message_id, &req.emotion, req.confidence) {
Ok(_) => {
// If notes are provided, add to training set
if let Some(notes) = req.notes {
if let Ok(Some(msg)) = db::get_message(&conn, req.message_id) {
if let Some(text) = msg.text {
let _ = db::insert_training_sample(
&conn,
Some(req.message_id),
&text,
&req.emotion,
);
}
}
}
// Fetch the updated emotion
match db::get_emotion_by_message_id(&conn, req.message_id) {
Ok(Some(emotion)) => Ok(Response::new(EmotionResponse {
success: true,
message: "Emotion updated successfully".to_string(),
emotion: Some(emotion_to_proto(emotion)),
})),
_ => Ok(Response::new(EmotionResponse {
success: true,
message: "Emotion updated successfully".to_string(),
emotion: None,
})),
}
}
Err(e) => {
error!("Database error: {}", e);
Err(Status::internal("Database error"))
}
}
}
async fn batch_update_emotions(
&self,
request: Request<tonic::Streaming<UpdateEmotionRequest>>,
) -> Result<Response<EmotionResponse>, Status> {
let mut stream = request.into_inner();
let mut count = 0;
while let Some(req) = stream.message().await? {
let conn = self.us_db.lock().await;
match db::update_emotion(&conn, req.message_id, &req.emotion, req.confidence) {
Ok(_) => {
count += 1;
if let Some(notes) = req.notes {
if let Ok(Some(msg)) = db::get_message(&conn, req.message_id) {
if let Some(text) = msg.text {
let _ = db::insert_training_sample(
&conn,
Some(req.message_id),
&text,
&req.emotion,
);
}
}
}
}
Err(e) => {
error!("Failed to update emotion for message {}: {}", req.message_id, e);
}
}
drop(conn);
}
Ok(Response::new(EmotionResponse {
success: true,
message: format!("Updated {} emotions", count),
emotion: None,
}))
}
async fn delete_emotion(
&self,
request: Request<DeleteEmotionRequest>,
) -> Result<Response<EmotionResponse>, Status> {
let req = request.into_inner();
let conn = self.us_db.lock().await;
match db::delete_emotion(&conn, req.id) {
Ok(_) => Ok(Response::new(EmotionResponse {
success: true,
message: format!("Emotion {} deleted successfully", req.id),
emotion: None,
})),
Err(e) => {
error!("Database error: {}", e);
Err(Status::internal("Database error"))
}
}
}
}
fn emotion_to_proto(emotion: crate::models::Emotion) -> Emotion {
Emotion {
id: emotion.id,
message_id: emotion.message_id,
emotion: emotion.emotion,
confidence: emotion.confidence,
model_version: emotion.model_version,
created_at: emotion.created_at.timestamp(),
updated_at: emotion.updated_at.timestamp(),
}
}

View File

@@ -1,7 +0,0 @@
pub mod chat_poller;
pub mod embedding_service;
pub mod emotion_service;
pub use chat_poller::ChatPollerService;
pub use embedding_service::EmbeddingService;
pub use emotion_service::EmotionService;

View File

@@ -1,114 +0,0 @@
use bevy::prelude::*;
use lib::sync::{Syncable, SyncMessage};
use crate::components::*;
/// Bevy plugin for transparent CRDT sync via gossip
pub struct SyncPlugin;
impl Plugin for SyncPlugin {
fn build(&self, app: &mut App) {
app.add_systems(Update, (
publish_sync_ops,
receive_sync_ops,
));
}
}
/// Trait for Bevy resources that can be synced
pub trait SyncedResource: Resource + Syncable + Clone + Send + Sync + 'static {}
/// Queue of sync operations to publish
#[derive(Resource, Default)]
pub struct SyncOpQueue<T: Syncable> {
pub ops: Vec<T::Operation>,
}
impl<T: Syncable> SyncOpQueue<T> {
pub fn push(&mut self, op: T::Operation) {
self.ops.push(op);
}
}
/// System to publish sync operations to gossip
fn publish_sync_ops<T: SyncedResource>(
mut queue: ResMut<SyncOpQueue<T>>,
resource: Res<T>,
sender: Option<Res<IrohGossipSender>>,
) {
if sender.is_none() || queue.ops.is_empty() {
return;
}
let sender = sender.unwrap();
let sender_guard = sender.sender.lock();
for op in queue.ops.drain(..) {
let sync_msg = resource.create_sync_message(op);
match sync_msg.to_bytes() {
Ok(bytes) => {
println!("Publishing sync operation: {} bytes", bytes.len());
// TODO: Actually send via gossip
// sender_guard.broadcast(bytes)?;
}
Err(e) => {
eprintln!("Failed to serialize sync operation: {}", e);
}
}
}
}
/// System to receive and apply sync operations from gossip
fn receive_sync_ops<T: SyncedResource>(
mut resource: ResMut<T>,
receiver: Option<Res<IrohGossipReceiver>>,
) {
if receiver.is_none() {
return;
}
// TODO: Poll receiver for messages
// For each message:
// 1. Deserialize SyncMessage<T::Operation>
// 2. Apply to resource with resource.apply_sync_op(&op)
}
/// Helper to register a synced resource
pub trait SyncedResourceExt {
fn add_synced_resource<T: SyncedResource>(&mut self) -> &mut Self;
}
impl SyncedResourceExt for App {
fn add_synced_resource<T: SyncedResource>(&mut self) -> &mut Self {
self.init_resource::<SyncOpQueue<T>>();
self
}
}
/// Example synced resource
#[cfg(test)]
mod tests {
use super::*;
use lib::sync::synced;
#[synced]
pub struct TestConfig {
pub value: i32,
#[sync(skip)]
node_id: String,
}
impl Resource for TestConfig {}
impl SyncedResource for TestConfig {}
#[test]
fn test_sync_plugin() {
let mut app = App::new();
app.add_plugins(MinimalPlugins);
app.add_plugins(SyncPlugin);
app.add_synced_resource::<TestConfig>();
// TODO: Test that operations are queued and published
}
}

View File

@@ -1,9 +0,0 @@
use bevy::prelude::*;
use crate::components::*;
/// System: Poll chat.db for new messages using Bevy's task system
pub fn poll_chat_db(_config: Res<AppConfig>, _db: Res<Database>) {
// TODO: Use Bevy's AsyncComputeTaskPool to poll chat.db
// This will replace the tokio::spawn chat poller
}

View File

@@ -1,117 +0,0 @@
use std::sync::Arc;
use bevy::prelude::*;
use parking_lot::Mutex;
use crate::components::*;
/// System: Poll the gossip init task and insert resources when complete
pub fn poll_gossip_init(mut commands: Commands, mut init_task: Option<ResMut<GossipInitTask>>) {
if let Some(mut task) = init_task {
// Check if the task is finished (non-blocking)
if let Some(result) =
bevy::tasks::block_on(bevy::tasks::futures_lite::future::poll_once(&mut task.0))
{
if let Some((endpoint, gossip, router, sender, receiver)) = result {
println!("Inserting gossip resources");
// Insert all the resources
commands.insert_resource(IrohEndpoint {
endpoint,
node_id: "TODO".to_string(), // TODO: Figure out how to get node_id in iroh 0.95
});
commands.insert_resource(IrohGossipHandle { gossip });
commands.insert_resource(IrohRouter { router });
commands.insert_resource(IrohGossipSender {
sender: Arc::new(Mutex::new(sender)),
});
commands.insert_resource(IrohGossipReceiver {
receiver: Arc::new(Mutex::new(receiver)),
});
// Remove the init task
commands.remove_resource::<GossipInitTask>();
}
}
}
}
/// System: Detect new messages in SQLite that need to be published to gossip
pub fn detect_new_messages(
_db: Res<Database>,
_last_synced: Local<i64>,
_publish_events: MessageWriter<PublishMessageEvent>,
) {
// TODO: Query SQLite for messages with rowid > last_synced
// When we detect new messages, we'll send PublishMessageEvent
}
/// System: Publish messages to gossip when PublishMessageEvent is triggered
pub fn publish_to_gossip(
mut events: MessageReader<PublishMessageEvent>,
sender: Option<Res<IrohGossipSender>>,
endpoint: Option<Res<IrohEndpoint>>,
) {
if sender.is_none() || endpoint.is_none() {
// Gossip not initialized yet, skip
return;
}
let sender = sender.unwrap();
let endpoint = endpoint.unwrap();
for event in events.read() {
println!("Publishing message {} to gossip", event.message.rowid);
// Create sync message
let sync_message = SyncMessage {
message: event.message.clone(),
sync_timestamp: chrono::Utc::now().timestamp(),
publisher_node_id: endpoint.node_id.clone(),
};
// Serialize the message
match serialize_sync_message(&sync_message) {
| Ok(bytes) => {
// TODO: Publish to gossip
// For now, just log that we would publish
println!("Would publish {} bytes to gossip", bytes.len());
// Note: Direct async broadcasting from Bevy systems is tricky
// due to Sync requirements We'll need to use a
// different approach, possibly with channels or a dedicated
// task
},
| Err(e) => {
eprintln!("Failed to serialize sync message: {}", e);
},
}
}
}
/// System: Receive messages from gossip
pub fn receive_from_gossip(
mut _gossip_events: MessageWriter<GossipMessageReceived>,
receiver: Option<Res<IrohGossipReceiver>>,
) {
if receiver.is_none() {
// Gossip not initialized yet, skip
return;
}
// TODO: Implement proper async message reception
// This will require spawning a long-running task that listens for gossip
// events and sends them as Bevy messages. For now, this is a
// placeholder.
}
/// System: Save received gossip messages to SQLite
pub fn save_gossip_messages(mut events: MessageReader<GossipMessageReceived>, _db: Res<Database>) {
for event in events.read() {
println!(
"Received message {} from gossip (published by {})",
event.sync_message.message.rowid, event.sync_message.publisher_node_id
);
// TODO: Save to SQLite if we don't already have it
}
}

View File

@@ -1,7 +0,0 @@
pub mod database;
pub mod gossip;
pub mod setup;
pub use database::*;
pub use gossip::*;
pub use setup::*;

View File

@@ -1,24 +0,0 @@
use bevy::{
prelude::*,
tasks::AsyncComputeTaskPool,
};
use crate::components::*;
/// Startup system: Initialize database
pub fn setup_database(_db: Res<Database>) {
println!("Database resource initialized");
}
/// Startup system: Initialize Iroh gossip
pub fn setup_gossip(mut commands: Commands, topic: Res<GossipTopic>) {
println!("Setting up Iroh gossip for topic: {:?}", topic.0);
let topic_id = topic.0;
// TODO: Initialize gossip properly
// For now, skip async initialization due to Sync requirements in Bevy tasks
// We'll need to use a different initialization strategy
println!("Gossip initialization skipped (TODO: implement proper async init)");
}