chore: initial commit

This commit is contained in:
2025-11-15 23:42:12 +00:00
commit 3c456abadc
47 changed files with 14645 additions and 0 deletions

1
crates/client/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
/target

43
crates/client/Cargo.toml Normal file
View File

@@ -0,0 +1,43 @@
[package]
name = "client"
version = "0.1.0"
edition.workspace = true
[[bin]]
name = "client"
path = "src/main.rs"
[dependencies]
# Bevy
bevy = { version = "0.17", default-features = false, features = [
"bevy_winit",
"bevy_render",
"bevy_core_pipeline",
"bevy_sprite",
"bevy_ui",
"bevy_text",
"png",
"x11",
] }
# Iroh - P2P networking and gossip
iroh = { workspace = true }
iroh-gossip = { workspace = true }
# Async runtime
tokio = { version = "1", features = ["full"] }
# Serialization
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
# Error handling
thiserror = "2.0"
anyhow = "1.0"
# Logging
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
# Local dependencies
lib = { path = "../lib" }

14
crates/client/src/lib.rs Normal file
View File

@@ -0,0 +1,14 @@
pub fn add(left: u64, right: u64) -> u64 {
left + right
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn it_works() {
let result = add(2, 2);
assert_eq!(result, 4);
}
}

24
crates/client/src/main.rs Normal file
View File

@@ -0,0 +1,24 @@
use bevy::prelude::*;
use tracing::info;
fn main() {
tracing_subscriber::fmt()
.with_env_filter(tracing_subscriber::EnvFilter::from_default_env())
.init();
// Start Bevy app
App::new()
.add_plugins(DefaultPlugins)
.add_systems(Startup, setup)
.add_systems(Update, sync_system)
.run();
}
fn setup(mut commands: Commands) {
commands.spawn(Camera2d);
info!("Client started");
}
fn sync_system() {
// TODO: Implement gossip sync for client
}

4
crates/lib/.gitignore vendored Normal file
View File

@@ -0,0 +1,4 @@
/target
chat.db
*.db-shm
*.db-wal

20
crates/lib/Cargo.toml Normal file
View File

@@ -0,0 +1,20 @@
[package]
name = "lib"
version = "0.1.0"
edition.workspace = true
[dependencies]
rusqlite = { version = "0.37.0", features = ["bundled"] }
chrono = { version = "0.4", features = ["serde"] }
thiserror = "2.0"
serde = { version = "1.0", features = ["derive"] }
serde_json.workspace = true
crdts.workspace = true
anyhow.workspace = true
sync-macros = { path = "../sync-macros" }
[dev-dependencies]
tokio.workspace = true
iroh.workspace = true
iroh-gossip.workspace = true
futures-lite = "2.0"

139
crates/lib/src/db.rs Normal file
View File

@@ -0,0 +1,139 @@
use crate::error::Result;
use crate::models::*;
use rusqlite::{Connection, OpenFlags, Row, params};
pub struct ChatDb {
conn: Connection,
}
impl ChatDb {
/// Open a connection to the chat database in read-only mode
pub fn open(path: &str) -> Result<Self> {
let conn = Connection::open_with_flags(path, OpenFlags::SQLITE_OPEN_READ_ONLY)?;
Ok(Self { conn })
}
/// Get messages from the conversation with +31 6 39 13 29 13
///
/// Returns messages from January 1, 2024 to present from the conversation
/// with the specified Dutch phone number.
///
/// # Arguments
///
/// * `start_date` - Start date (defaults to January 1, 2024 if None)
/// * `end_date` - End date (defaults to current time if None)
pub fn get_our_messages(
&self,
start_date: Option<chrono::DateTime<chrono::Utc>>,
end_date: Option<chrono::DateTime<chrono::Utc>>,
) -> Result<Vec<Message>> {
use chrono::{TimeZone, Utc};
// Default date range: January 1, 2024 to now
let start =
start_date.unwrap_or_else(|| Utc.with_ymd_and_hms(2024, 1, 1, 0, 0, 0).unwrap());
let end = end_date.unwrap_or_else(|| Utc::now());
// Convert to Apple timestamps (nanoseconds since 2001-01-01)
let start_timestamp = datetime_to_apple_timestamp(start);
let end_timestamp = datetime_to_apple_timestamp(end);
// The phone number might be stored with or without spaces
let phone_with_spaces = "+31 6 39 13 29 13";
let phone_without_spaces = "+31639132913";
// Find the chat with this phone number (try both formats)
let chat = self
.get_chat_for_phone_number(phone_with_spaces)
.or_else(|_| self.get_chat_for_phone_number(phone_without_spaces))?;
// Get messages from this chat within the date range
let mut stmt = self.conn.prepare(
"SELECT m.ROWID, m.guid, m.text, m.service, m.handle_id, m.date, m.date_read, m.date_delivered,
m.is_from_me, m.is_read, m.is_delivered, m.is_sent, m.is_emote, m.is_audio_message,
m.cache_has_attachments, m.associated_message_guid, m.associated_message_type,
m.thread_originator_guid, m.reply_to_guid, m.is_spam
FROM message m
INNER JOIN chat_message_join cmj ON m.ROWID = cmj.message_id
WHERE cmj.chat_id = ?
AND m.date >= ?
AND m.date <= ?
ORDER BY m.date ASC"
)?;
let messages = stmt
.query_map(
params![chat.rowid, start_timestamp, end_timestamp],
map_message_row,
)?
.collect::<std::result::Result<Vec<_>, _>>()?;
Ok(messages)
}
/// Helper function to find the largest chat with a specific phone number
fn get_chat_for_phone_number(&self, phone_number: &str) -> Result<Chat> {
let mut stmt = self.conn.prepare(
"SELECT c.ROWID, c.guid, c.chat_identifier, c.service_name, c.display_name,
c.group_id, c.room_name, c.is_archived, c.is_filtered,
c.last_read_message_timestamp, COUNT(cmj.message_id) as msg_count
FROM chat c
INNER JOIN chat_handle_join chj ON c.ROWID = chj.chat_id
INNER JOIN handle h ON chj.handle_id = h.ROWID
INNER JOIN chat_message_join cmj ON c.ROWID = cmj.chat_id
WHERE h.id = ?
GROUP BY c.ROWID
ORDER BY msg_count DESC
LIMIT 1"
)?;
let chat = stmt.query_row(params![phone_number], |row| {
Ok(Chat {
rowid: row.get(0)?,
guid: row.get(1)?,
chat_identifier: row.get(2)?,
service_name: row.get(3)?,
display_name: row.get(4)?,
group_id: row.get(5)?,
room_name: row.get(6)?,
is_archived: row.get::<_, i64>(7)? != 0,
is_filtered: row.get::<_, i64>(8)? != 0,
last_read_message_timestamp: row.get::<_, Option<i64>>(9)?.map(apple_timestamp_to_datetime),
})
})?;
Ok(chat)
}
}
// Helper function to map database rows to structs
fn map_message_row(row: &Row) -> rusqlite::Result<Message> {
Ok(Message {
rowid: row.get(0)?,
guid: row.get(1)?,
text: row.get(2)?,
service: row.get(3)?,
handle_id: row.get(4)?,
date: row
.get::<_, Option<i64>>(5)?
.map(apple_timestamp_to_datetime),
date_read: row
.get::<_, Option<i64>>(6)?
.map(apple_timestamp_to_datetime),
date_delivered: row
.get::<_, Option<i64>>(7)?
.map(apple_timestamp_to_datetime),
is_from_me: row.get::<_, i64>(8)? != 0,
is_read: row.get::<_, i64>(9)? != 0,
is_delivered: row.get::<_, i64>(10)? != 0,
is_sent: row.get::<_, i64>(11)? != 0,
is_emote: row.get::<_, i64>(12)? != 0,
is_audio_message: row.get::<_, i64>(13)? != 0,
cache_has_attachments: row.get::<_, i64>(14)? != 0,
associated_message_guid: row.get(15)?,
associated_message_type: row.get(16)?,
thread_originator_guid: row.get(17)?,
reply_to_guid: row.get(18)?,
is_spam: row.get::<_, i64>(19)? != 0,
})
}

15
crates/lib/src/error.rs Normal file
View File

@@ -0,0 +1,15 @@
use thiserror::Error;
#[derive(Error, Debug)]
pub enum ChatDbError {
#[error("Database error: {0}")]
Database(#[from] rusqlite::Error),
#[error("Not found: {0}")]
NotFound(String),
#[error("Invalid data: {0}")]
InvalidData(String),
}
pub type Result<T> = std::result::Result<T, ChatDbError>;

30
crates/lib/src/lib.rs Normal file
View File

@@ -0,0 +1,30 @@
//! Data access layer for iMessage chat.db
//!
//! This library provides a read-only interface to query messages from a specific conversation.
//!
//! # Safety
//!
//! All database connections are opened in read-only mode to prevent any
//! accidental modifications to your iMessage database.
//!
//! # Example
//!
//! ```no_run
//! use lib::ChatDb;
//!
//! let db = ChatDb::open("chat.db")?;
//!
//! // Get all messages from January 2024 to now
//! let messages = db.get_our_messages(None, None)?;
//! println!("Found {} messages", messages.len());
//! # Ok::<(), lib::ChatDbError>(())
//! ```
mod error;
mod models;
mod db;
pub mod sync;
pub use error::{ChatDbError, Result};
pub use models::{Message, Chat};
pub use db::ChatDb;

112
crates/lib/src/models.rs Normal file
View File

@@ -0,0 +1,112 @@
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
/// Represents a message in the iMessage database
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Message {
pub rowid: i64,
pub guid: String,
pub text: Option<String>,
pub service: Option<String>,
pub handle_id: i64,
pub date: Option<DateTime<Utc>>,
pub date_read: Option<DateTime<Utc>>,
pub date_delivered: Option<DateTime<Utc>>,
pub is_from_me: bool,
pub is_read: bool,
pub is_delivered: bool,
pub is_sent: bool,
pub is_emote: bool,
pub is_audio_message: bool,
pub cache_has_attachments: bool,
pub associated_message_guid: Option<String>,
pub associated_message_type: i64,
pub thread_originator_guid: Option<String>,
pub reply_to_guid: Option<String>,
pub is_spam: bool,
}
/// Represents a chat/conversation
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Chat {
pub rowid: i64,
pub guid: String,
pub chat_identifier: Option<String>,
pub service_name: Option<String>,
pub display_name: Option<String>,
pub group_id: Option<String>,
pub room_name: Option<String>,
pub is_archived: bool,
pub is_filtered: bool,
pub last_read_message_timestamp: Option<DateTime<Utc>>,
}
/// Helper function to convert Apple's Cocoa timestamp (seconds since 2001-01-01) to DateTime
pub fn apple_timestamp_to_datetime(timestamp: i64) -> DateTime<Utc> {
// Apple's Cocoa timestamps are in nanoseconds since 2001-01-01 00:00:00 UTC
// Convert to Unix timestamp (seconds since 1970-01-01 00:00:00 UTC)
const APPLE_EPOCH_OFFSET: i64 = 978307200; // Seconds between 1970-01-01 and 2001-01-01
let seconds = timestamp / 1_000_000_000 + APPLE_EPOCH_OFFSET;
let nanos = (timestamp % 1_000_000_000) as u32;
DateTime::from_timestamp(seconds, nanos).unwrap_or_else(|| DateTime::from_timestamp(0, 0).unwrap())
}
/// Helper function to convert DateTime to Apple's Cocoa timestamp
pub fn datetime_to_apple_timestamp(dt: DateTime<Utc>) -> i64 {
const APPLE_EPOCH_OFFSET: i64 = 978307200;
let unix_timestamp = dt.timestamp();
let nanos = dt.timestamp_subsec_nanos() as i64;
(unix_timestamp - APPLE_EPOCH_OFFSET) * 1_000_000_000 + nanos
}
#[cfg(test)]
mod tests {
use super::*;
use chrono::{Datelike, TimeZone, Timelike};
#[test]
fn test_apple_timestamp_to_datetime_zero() {
let dt = apple_timestamp_to_datetime(0);
assert_eq!(dt.year(), 2001);
assert_eq!(dt.month(), 1);
assert_eq!(dt.day(), 1);
assert_eq!(dt.hour(), 0);
assert_eq!(dt.minute(), 0);
assert_eq!(dt.second(), 0);
}
#[test]
fn test_apple_timestamp_to_datetime_known_value() {
let timestamp = 694224000000000000i64;
let dt = apple_timestamp_to_datetime(timestamp);
assert_eq!(dt.year(), 2023);
assert_eq!(dt.month(), 1);
assert_eq!(dt.day(), 1);
}
#[test]
fn test_apple_timestamp_roundtrip() {
let original = 694224000000000000i64;
let dt = apple_timestamp_to_datetime(original);
let converted_back = datetime_to_apple_timestamp(dt);
assert_eq!(original, converted_back);
}
#[test]
fn test_datetime_to_apple_timestamp_epoch() {
let dt = Utc.with_ymd_and_hms(2001, 1, 1, 0, 0, 0).unwrap();
let timestamp = datetime_to_apple_timestamp(dt);
assert_eq!(timestamp, 0);
}
#[test]
fn test_negative_apple_timestamp() {
let timestamp = -31536000000000000i64;
let dt = apple_timestamp_to_datetime(timestamp);
assert_eq!(dt.year(), 2000);
}
}

165
crates/lib/src/sync.rs Normal file
View File

@@ -0,0 +1,165 @@
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use std::ops::{Deref, DerefMut};
// Re-export the macros
pub use sync_macros::{synced, Synced};
// Re-export common CRDT types from the crdts library
pub use crdts::{
ctx::ReadCtx,
lwwreg::LWWReg,
map::Map,
orswot::Orswot,
CmRDT, CvRDT,
};
pub type NodeId = String;
/// Transparent wrapper for synced values
///
/// This wraps any value with LWW semantics but allows you to use it like a normal value
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SyncedValue<T: Clone> {
value: T,
timestamp: DateTime<Utc>,
node_id: NodeId,
}
impl<T: Clone> SyncedValue<T> {
pub fn new(value: T, node_id: NodeId) -> Self {
Self {
value,
timestamp: Utc::now(),
node_id,
}
}
pub fn get(&self) -> &T {
&self.value
}
pub fn set(&mut self, value: T, node_id: NodeId) {
self.value = value;
self.timestamp = Utc::now();
self.node_id = node_id;
}
pub fn apply_lww(&mut self, value: T, timestamp: DateTime<Utc>, node_id: NodeId) {
if timestamp > self.timestamp || (timestamp == self.timestamp && node_id > self.node_id) {
self.value = value;
self.timestamp = timestamp;
self.node_id = node_id;
}
}
pub fn merge(&mut self, other: &Self) {
self.apply_lww(other.value.clone(), other.timestamp, other.node_id.clone());
}
}
// Allow transparent access to the inner value
impl<T: Clone> Deref for SyncedValue<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
&self.value
}
}
impl<T: Clone> DerefMut for SyncedValue<T> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.value
}
}
/// Wrapper for a sync message that goes over gossip
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SyncMessage<T> {
/// Unique message ID
pub message_id: String,
/// Node that sent this
pub node_id: NodeId,
/// When it was sent
pub timestamp: DateTime<Utc>,
/// The actual sync operation
pub operation: T,
}
impl<T: Serialize> SyncMessage<T> {
pub fn new(node_id: NodeId, operation: T) -> Self {
use std::sync::atomic::{AtomicU64, Ordering};
static COUNTER: AtomicU64 = AtomicU64::new(0);
let seq = COUNTER.fetch_add(1, Ordering::SeqCst);
Self {
message_id: format!("{}-{}-{}", node_id, Utc::now().timestamp_millis(), seq),
node_id,
timestamp: Utc::now(),
operation,
}
}
pub fn to_bytes(&self) -> anyhow::Result<Vec<u8>> {
Ok(serde_json::to_vec(self)?)
}
}
impl<T: for<'de> Deserialize<'de>> SyncMessage<T> {
pub fn from_bytes(bytes: &[u8]) -> anyhow::Result<Self> {
Ok(serde_json::from_slice(bytes)?)
}
}
/// Helper trait for types that can be synced
pub trait Syncable: Sized {
type Operation: Serialize + for<'de> Deserialize<'de> + Clone;
/// Apply a sync operation to this value
fn apply_sync_op(&mut self, op: &Self::Operation);
/// Get the node ID for this instance
fn node_id(&self) -> &NodeId;
/// Create a sync message for an operation
fn create_sync_message(&self, op: Self::Operation) -> SyncMessage<Self::Operation> {
SyncMessage::new(self.node_id().clone(), op)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_synced_value() {
let mut val = SyncedValue::new(42, "node1".to_string());
assert_eq!(*val.get(), 42);
val.set(100, "node1".to_string());
assert_eq!(*val.get(), 100);
// Test LWW semantics
let old_time = Utc::now() - chrono::Duration::seconds(10);
val.apply_lww(50, old_time, "node2".to_string());
assert_eq!(*val.get(), 100); // Should not update with older timestamp
}
#[test]
fn test_sync_message() {
#[derive(Debug, Clone, Serialize, Deserialize)]
struct TestOp {
value: i32,
}
let op = TestOp { value: 42 };
let msg = SyncMessage::new("node1".to_string(), op);
let bytes = msg.to_bytes().unwrap();
let decoded = SyncMessage::<TestOp>::from_bytes(&bytes).unwrap();
assert_eq!(decoded.node_id, "node1");
assert_eq!(decoded.operation.value, 42);
}
}

View File

@@ -0,0 +1,98 @@
use lib::{ChatDb, Result};
use chrono::Datelike;
/// Test that we can get messages from the Dutch phone number conversation
#[test]
fn test_get_our_messages_default_range() -> Result<()> {
let db = ChatDb::open("chat.db")?;
// Get messages from January 2024 to now (default)
let messages = db.get_our_messages(None, None)?;
println!("Found {} messages from January 2024 to now", messages.len());
// Verify we got some messages
assert!(messages.len() > 0, "Should find messages in the conversation");
// Verify messages are in chronological order (ASC)
for i in 1..messages.len().min(10) {
if let (Some(prev_date), Some(curr_date)) = (messages[i-1].date, messages[i].date) {
assert!(
prev_date <= curr_date,
"Messages should be in ascending date order"
);
}
}
// Verify all messages are from 2024 or later
for msg in messages.iter().take(10) {
if let Some(date) = msg.date {
assert!(date.year() >= 2024, "Messages should be from 2024 or later");
println!("Message date: {}, from_me: {}, text: {:?}",
date, msg.is_from_me, msg.text.as_ref().map(|s| &s[..s.len().min(50)]));
}
}
Ok(())
}
/// Test that we can get messages with a custom date range
#[test]
fn test_get_our_messages_custom_range() -> Result<()> {
use chrono::{TimeZone, Utc};
let db = ChatDb::open("chat.db")?;
// Get messages from March 2024 to June 2024
let start = Utc.with_ymd_and_hms(2024, 3, 1, 0, 0, 0).unwrap();
let end = Utc.with_ymd_and_hms(2024, 6, 1, 0, 0, 0).unwrap();
let messages = db.get_our_messages(Some(start), Some(end))?;
println!("Found {} messages from March to June 2024", messages.len());
// Verify all messages are within the date range
for msg in &messages {
if let Some(date) = msg.date {
assert!(
date >= start && date <= end,
"Message date {} should be between {} and {}",
date, start, end
);
}
}
Ok(())
}
/// Test displaying a summary of the conversation
#[test]
fn test_conversation_summary() -> Result<()> {
let db = ChatDb::open("chat.db")?;
let messages = db.get_our_messages(None, None)?;
println!("\n=== Conversation Summary ===");
println!("Total messages: {}", messages.len());
let from_me = messages.iter().filter(|m| m.is_from_me).count();
let from_them = messages.len() - from_me;
println!("From me: {}", from_me);
println!("From them: {}", from_them);
// Show first few messages
println!("\nFirst 5 messages:");
for (i, msg) in messages.iter().take(5).enumerate() {
if let Some(date) = msg.date {
let sender = if msg.is_from_me { "Me" } else { "Them" };
let text = msg.text.as_ref()
.map(|t| if t.len() > 60 { format!("{}...", &t[..60]) } else { t.clone() })
.unwrap_or_else(|| "[No text]".to_string());
println!("{}. {} ({}): {}", i + 1, date.format("%Y-%m-%d %H:%M"), sender, text);
}
}
Ok(())
}

View File

@@ -0,0 +1,157 @@
use lib::sync::{synced, SyncMessage, Syncable};
use iroh::{Endpoint, protocol::{Router, ProtocolHandler, AcceptError}};
use anyhow::Result;
use std::sync::Arc;
use tokio::sync::Mutex;
/// Test configuration that can be synced
#[synced]
struct TestConfig {
value: i32,
name: String,
#[sync(skip)]
node_id: String,
}
/// ALPN identifier for our sync protocol
const SYNC_ALPN: &[u8] = b"/lonni/sync/1";
/// Protocol handler for receiving sync messages
#[derive(Debug, Clone)]
struct SyncProtocol {
config: Arc<Mutex<TestConfig>>,
}
impl ProtocolHandler for SyncProtocol {
async fn accept(&self, connection: iroh::endpoint::Connection) -> Result<(), AcceptError> {
println!("Accepting connection from: {}", connection.remote_id());
// Accept the bidirectional stream
let (mut send, mut recv) = connection.accept_bi().await
.map_err(AcceptError::from_err)?;
println!("Stream accepted, reading message...");
// Read the sync message
let bytes = recv.read_to_end(1024 * 1024).await
.map_err(AcceptError::from_err)?;
println!("Received {} bytes", bytes.len());
// Deserialize and apply
let msg = SyncMessage::<TestConfigOp>::from_bytes(&bytes)
.map_err(|e| AcceptError::from_err(std::io::Error::new(std::io::ErrorKind::InvalidData, e)))?;
println!("Applying operation from node: {}", msg.node_id);
let mut config = self.config.lock().await;
config.apply_op(&msg.operation);
println!("Operation applied successfully");
// Close the stream
send.finish()
.map_err(AcceptError::from_err)?;
Ok(())
}
}
#[tokio::test(flavor = "multi_thread")]
async fn test_sync_between_two_nodes() -> Result<()> {
println!("\n=== Testing Sync Between Two Nodes ===\n");
// Create two endpoints
let node1 = Endpoint::builder().bind().await?;
let node2 = Endpoint::builder().bind().await?;
let node1_addr = node1.addr();
let node2_addr = node2.addr();
let node1_id = node1_addr.id.to_string();
let node2_id = node2_addr.id.to_string();
println!("Node 1: {}", node1_id);
println!("Node 2: {}", node2_id);
// Create synced configs on both nodes
let mut config1 = TestConfig::new(
42,
"initial".to_string(),
node1_id.clone(),
);
let config2 = TestConfig::new(
42,
"initial".to_string(),
node2_id.clone(),
);
let config2_shared = Arc::new(Mutex::new(config2));
println!("\nInitial state:");
println!(" Node 1: value={}, name={}", config1.value(), config1.name());
{
let config2 = config2_shared.lock().await;
println!(" Node 2: value={}, name={}", config2.value(), config2.name());
}
// Set up router on node2 to accept incoming connections
println!("\nSetting up node2 router...");
let protocol = SyncProtocol {
config: config2_shared.clone(),
};
let router = Router::builder(node2)
.accept(SYNC_ALPN, protocol)
.spawn();
router.endpoint().online().await;
println!("✓ Node2 router ready");
// Node 1 changes the value
println!("\nNode 1 changing value to 100...");
let op = config1.set_value(100);
// Serialize the operation
let sync_msg = SyncMessage::new(node1_id.clone(), op);
let bytes = sync_msg.to_bytes()?;
println!("Serialized to {} bytes", bytes.len());
// Establish QUIC connection from node1 to node2
println!("\nEstablishing QUIC connection...");
let conn = node1.connect(node2_addr.clone(), SYNC_ALPN).await?;
println!("✓ Connection established");
// Open a bidirectional stream
let (mut send, _recv) = conn.open_bi().await?;
// Send the sync message
println!("Sending sync message...");
send.write_all(&bytes).await?;
send.finish()?;
println!("✓ Message sent");
// Wait a bit for the message to be processed
tokio::time::sleep(tokio::time::Duration::from_millis(100)).await;
// Verify both configs have the same value
println!("\nFinal state:");
println!(" Node 1: value={}, name={}", config1.value(), config1.name());
{
let config2 = config2_shared.lock().await;
println!(" Node 2: value={}, name={}", config2.value(), config2.name());
assert_eq!(*config1.value(), 100);
assert_eq!(*config2.value(), 100);
assert_eq!(config1.name(), "initial");
assert_eq!(config2.name(), "initial");
}
println!("\n✓ Sync successful!");
// Cleanup
router.shutdown().await?;
node1.close().await;
Ok(())
}

58
crates/server/Cargo.toml Normal file
View File

@@ -0,0 +1,58 @@
[package]
name = "server"
version = "0.1.0"
edition.workspace = true
[[bin]]
name = "server"
path = "src/main.rs"
[dependencies]
# Bevy (headless)
bevy = { version = "0.17", default-features = false, features = [
"bevy_state",
] }
# Iroh - P2P networking and gossip
iroh = { workspace = true }
iroh-gossip = { workspace = true }
# Async runtime
tokio = { version = "1", features = ["full"] }
tokio-stream = "0.1"
futures-lite = "2.5"
# Database
rusqlite = { version = "0.37.0", features = ["bundled", "column_decltype", "load_extension"] }
# Serialization
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
toml = "0.9"
# Logging
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
# Error handling
thiserror = "2.0"
anyhow = "1.0"
# Date/time
chrono = { version = "0.4", features = ["serde"] }
# Random number generation
rand = "0.8"
# ML/AI - Candle for inference (using newer versions with better compatibility)
candle-core = "0.8"
candle-nn = "0.8"
candle-transformers = "0.8"
tokenizers = "0.20"
hf-hub = "0.3"
# Synchronization
parking_lot = { workspace = true }
# Local dependencies
lib = { path = "../lib" }

View File

@@ -0,0 +1 @@
// Asset loading and management will go here

View File

@@ -0,0 +1,14 @@
use bevy::prelude::*;
use parking_lot::Mutex;
use rusqlite::Connection;
use std::sync::Arc;
use crate::config::Config;
/// Bevy resource wrapping application configuration
#[derive(Resource)]
pub struct AppConfig(pub Config);
/// Bevy resource wrapping database connection
#[derive(Resource)]
pub struct Database(pub Arc<Mutex<Connection>>);

View File

@@ -0,0 +1,87 @@
use bevy::prelude::*;
use iroh::protocol::Router;
use iroh::Endpoint;
use iroh_gossip::api::{GossipReceiver, GossipSender};
use iroh_gossip::net::Gossip;
use iroh_gossip::proto::TopicId;
use parking_lot::Mutex;
use serde::{Deserialize, Serialize};
use std::sync::Arc;
/// Message envelope for gossip sync
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SyncMessage {
/// The actual message from iMessage
pub message: lib::Message,
/// Timestamp when this was published to gossip
pub sync_timestamp: i64,
/// ID of the node that published this
pub publisher_node_id: String,
}
/// Bevy resource wrapping the gossip handle
#[derive(Resource, Clone)]
pub struct IrohGossipHandle {
pub gossip: Gossip,
}
/// Bevy resource wrapping the gossip sender
#[derive(Resource)]
pub struct IrohGossipSender {
pub sender: Arc<Mutex<GossipSender>>,
}
/// Bevy resource wrapping the gossip receiver
#[derive(Resource)]
pub struct IrohGossipReceiver {
pub receiver: Arc<Mutex<GossipReceiver>>,
}
/// Bevy resource with Iroh router
#[derive(Resource)]
pub struct IrohRouter {
pub router: Router,
}
/// Bevy resource with Iroh endpoint
#[derive(Resource, Clone)]
pub struct IrohEndpoint {
pub endpoint: Endpoint,
pub node_id: String,
}
/// Bevy resource for gossip topic ID
#[derive(Resource)]
pub struct GossipTopic(pub TopicId);
/// Bevy resource for tracking gossip initialization task
#[derive(Resource)]
pub struct GossipInitTask(pub bevy::tasks::Task<Option<(
Endpoint,
Gossip,
Router,
GossipSender,
GossipReceiver,
)>>);
/// Bevy message: a new message that needs to be published to gossip
#[derive(Message, Clone, Debug)]
pub struct PublishMessageEvent {
pub message: lib::Message,
}
/// Bevy message: a message received from gossip that needs to be saved to SQLite
#[derive(Message, Clone, Debug)]
pub struct GossipMessageReceived {
pub sync_message: SyncMessage,
}
/// Helper to serialize a sync message
pub fn serialize_sync_message(msg: &SyncMessage) -> anyhow::Result<Vec<u8>> {
Ok(serde_json::to_vec(msg)?)
}
/// Helper to deserialize a sync message
pub fn deserialize_sync_message(data: &[u8]) -> anyhow::Result<SyncMessage> {
Ok(serde_json::from_slice(data)?)
}

View File

@@ -0,0 +1,5 @@
pub mod database;
pub mod gossip;
pub use database::*;
pub use gossip::*;

View File

@@ -0,0 +1,84 @@
use anyhow::{Context, Result};
use serde::{Deserialize, Serialize};
use std::fs;
use std::path::Path;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Config {
pub database: DatabaseConfig,
pub services: ServicesConfig,
pub models: ModelsConfig,
pub tailscale: TailscaleConfig,
pub grpc: GrpcConfig,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DatabaseConfig {
pub path: String,
pub chat_db_path: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ServicesConfig {
pub poll_interval_ms: u64,
pub training_set_sample_rate: f64,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ModelsConfig {
pub embedding_model: String,
pub emotion_model: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TailscaleConfig {
pub hostname: String,
pub state_dir: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GrpcConfig {
pub port: u16,
}
impl Config {
pub fn from_file<P: AsRef<Path>>(path: P) -> Result<Self> {
let content = fs::read_to_string(path.as_ref())
.context(format!("Failed to read config file: {:?}", path.as_ref()))?;
let config: Config = toml::from_str(&content)
.context("Failed to parse config file")?;
Ok(config)
}
pub fn default_config() -> Self {
Self {
database: DatabaseConfig {
path: "./us.db".to_string(),
chat_db_path: "./crates/lib/chat.db".to_string(),
},
services: ServicesConfig {
poll_interval_ms: 1000,
training_set_sample_rate: 0.05,
},
models: ModelsConfig {
embedding_model: "Qwen/Qwen3-Embedding-0.6B".to_string(),
emotion_model: "SamLowe/roberta-base-go_emotions".to_string(),
},
tailscale: TailscaleConfig {
hostname: "lonni-daemon".to_string(),
state_dir: "./tailscale-state".to_string(),
},
grpc: GrpcConfig {
port: 50051,
},
}
}
pub fn save<P: AsRef<Path>>(&self, path: P) -> Result<()> {
let content = toml::to_string_pretty(self)
.context("Failed to serialize config")?;
fs::write(path.as_ref(), content)
.context(format!("Failed to write config file: {:?}", path.as_ref()))?;
Ok(())
}
}

View File

@@ -0,0 +1,5 @@
pub mod operations;
pub mod schema;
pub use operations::*;
pub use schema::*;

View File

@@ -0,0 +1,321 @@
use crate::db::schema::{deserialize_embedding, serialize_embedding};
use crate::models::*;
use chrono::{TimeZone, Utc};
use rusqlite::{params, Connection, OptionalExtension, Result, Row};
/// Insert a new message into the database
pub fn insert_message(conn: &Connection, msg: &lib::Message) -> Result<i64> {
let timestamp = msg.date.map(|dt| dt.timestamp());
let created_at = Utc::now().timestamp();
conn.execute(
"INSERT INTO messages (chat_db_rowid, text, timestamp, is_from_me, created_at)
VALUES (?1, ?2, ?3, ?4, ?5)
ON CONFLICT(chat_db_rowid) DO NOTHING",
params![msg.rowid, msg.text, timestamp, msg.is_from_me, created_at],
)?;
Ok(conn.last_insert_rowid())
}
/// Get message ID by chat.db rowid
pub fn get_message_id_by_chat_rowid(conn: &Connection, chat_db_rowid: i64) -> Result<Option<i64>> {
conn.query_row(
"SELECT id FROM messages WHERE chat_db_rowid = ?1",
params![chat_db_rowid],
|row| row.get(0),
)
.optional()
}
/// Get message by ID
pub fn get_message(conn: &Connection, id: i64) -> Result<Message> {
conn.query_row(
"SELECT id, chat_db_rowid, text, timestamp, is_from_me, created_at FROM messages WHERE id = ?1",
params![id],
map_message_row,
)
}
fn map_message_row(row: &Row) -> Result<Message> {
let timestamp: Option<i64> = row.get(3)?;
let created_at: i64 = row.get(5)?;
Ok(Message {
id: row.get(0)?,
chat_db_rowid: row.get(1)?,
text: row.get(2)?,
timestamp: timestamp.map(|ts| Utc.timestamp_opt(ts, 0).unwrap()),
is_from_me: row.get(4)?,
created_at: Utc.timestamp_opt(created_at, 0).unwrap(),
})
}
/// Insert message embedding
pub fn insert_message_embedding(
conn: &Connection,
message_id: i64,
embedding: &[f32],
model_name: &str,
) -> Result<i64> {
let embedding_bytes = serialize_embedding(embedding);
let created_at = Utc::now().timestamp();
conn.execute(
"INSERT INTO message_embeddings (message_id, embedding, model_name, created_at)
VALUES (?1, ?2, ?3, ?4)",
params![message_id, embedding_bytes, model_name, created_at],
)?;
Ok(conn.last_insert_rowid())
}
/// Get message embedding
pub fn get_message_embedding(conn: &Connection, message_id: i64) -> Result<Option<MessageEmbedding>> {
conn.query_row(
"SELECT id, message_id, embedding, model_name, created_at
FROM message_embeddings WHERE message_id = ?1",
params![message_id],
|row| {
let embedding_bytes: Vec<u8> = row.get(2)?;
let created_at: i64 = row.get(4)?;
Ok(MessageEmbedding {
id: row.get(0)?,
message_id: row.get(1)?,
embedding: deserialize_embedding(&embedding_bytes),
model_name: row.get(3)?,
created_at: Utc.timestamp_opt(created_at, 0).unwrap(),
})
},
)
.optional()
}
/// Insert or get word embedding
pub fn insert_word_embedding(
conn: &Connection,
word: &str,
embedding: &[f32],
model_name: &str,
) -> Result<i64> {
let embedding_bytes = serialize_embedding(embedding);
let created_at = Utc::now().timestamp();
conn.execute(
"INSERT INTO word_embeddings (word, embedding, model_name, created_at)
VALUES (?1, ?2, ?3, ?4)
ON CONFLICT(word) DO NOTHING",
params![word, embedding_bytes, model_name, created_at],
)?;
Ok(conn.last_insert_rowid())
}
/// Get word embedding
pub fn get_word_embedding(conn: &Connection, word: &str) -> Result<Option<WordEmbedding>> {
conn.query_row(
"SELECT id, word, embedding, model_name, created_at
FROM word_embeddings WHERE word = ?1",
params![word],
|row| {
let embedding_bytes: Vec<u8> = row.get(2)?;
let created_at: i64 = row.get(4)?;
Ok(WordEmbedding {
id: row.get(0)?,
word: row.get(1)?,
embedding: deserialize_embedding(&embedding_bytes),
model_name: row.get(3)?,
created_at: Utc.timestamp_opt(created_at, 0).unwrap(),
})
},
)
.optional()
}
/// Insert emotion classification
pub fn insert_emotion(
conn: &Connection,
message_id: i64,
emotion: &str,
confidence: f64,
model_version: &str,
) -> Result<i64> {
let now = Utc::now().timestamp();
conn.execute(
"INSERT INTO emotions (message_id, emotion, confidence, model_version, created_at, updated_at)
VALUES (?1, ?2, ?3, ?4, ?5, ?6)",
params![message_id, emotion, confidence, model_version, now, now],
)?;
Ok(conn.last_insert_rowid())
}
/// Update emotion classification
pub fn update_emotion(
conn: &Connection,
message_id: i64,
emotion: &str,
confidence: f64,
) -> Result<()> {
let updated_at = Utc::now().timestamp();
conn.execute(
"UPDATE emotions SET emotion = ?1, confidence = ?2, updated_at = ?3
WHERE message_id = ?4",
params![emotion, confidence, updated_at, message_id],
)?;
Ok(())
}
/// Get emotion by message ID
pub fn get_emotion_by_message_id(conn: &Connection, message_id: i64) -> Result<Option<Emotion>> {
conn.query_row(
"SELECT id, message_id, emotion, confidence, model_version, created_at, updated_at
FROM emotions WHERE message_id = ?1",
params![message_id],
map_emotion_row,
)
.optional()
}
/// Get emotion by ID
pub fn get_emotion_by_id(conn: &Connection, id: i64) -> Result<Option<Emotion>> {
conn.query_row(
"SELECT id, message_id, emotion, confidence, model_version, created_at, updated_at
FROM emotions WHERE id = ?1",
params![id],
map_emotion_row,
)
.optional()
}
/// List all emotions with optional filters
pub fn list_emotions(
conn: &Connection,
emotion_filter: Option<&str>,
min_confidence: Option<f64>,
limit: Option<i32>,
offset: Option<i32>,
) -> Result<Vec<Emotion>> {
let mut query = String::from(
"SELECT id, message_id, emotion, confidence, model_version, created_at, updated_at
FROM emotions WHERE 1=1"
);
if emotion_filter.is_some() {
query.push_str(" AND emotion = ?1");
}
if min_confidence.is_some() {
query.push_str(" AND confidence >= ?2");
}
query.push_str(" ORDER BY created_at DESC");
if limit.is_some() {
query.push_str(" LIMIT ?3");
}
if offset.is_some() {
query.push_str(" OFFSET ?4");
}
let mut stmt = conn.prepare(&query)?;
let emotions = stmt
.query_map(
params![
emotion_filter.unwrap_or(""),
min_confidence.unwrap_or(0.0),
limit.unwrap_or(1000),
offset.unwrap_or(0),
],
map_emotion_row,
)?
.collect::<Result<Vec<_>>>()?;
Ok(emotions)
}
/// Delete emotion by ID
pub fn delete_emotion(conn: &Connection, id: i64) -> Result<()> {
conn.execute("DELETE FROM emotions WHERE id = ?1", params![id])?;
Ok(())
}
/// Count total emotions
pub fn count_emotions(conn: &Connection) -> Result<i32> {
conn.query_row("SELECT COUNT(*) FROM emotions", [], |row| row.get(0))
}
fn map_emotion_row(row: &Row) -> Result<Emotion> {
let created_at: i64 = row.get(5)?;
let updated_at: i64 = row.get(6)?;
Ok(Emotion {
id: row.get(0)?,
message_id: row.get(1)?,
emotion: row.get(2)?,
confidence: row.get(3)?,
model_version: row.get(4)?,
created_at: Utc.timestamp_opt(created_at, 0).unwrap(),
updated_at: Utc.timestamp_opt(updated_at, 0).unwrap(),
})
}
/// Insert emotion training sample
pub fn insert_training_sample(
conn: &Connection,
message_id: Option<i64>,
text: &str,
expected_emotion: &str,
) -> Result<i64> {
let now = Utc::now().timestamp();
conn.execute(
"INSERT INTO emotions_training_set (message_id, text, expected_emotion, created_at, updated_at)
VALUES (?1, ?2, ?3, ?4, ?5)",
params![message_id, text, expected_emotion, now, now],
)?;
Ok(conn.last_insert_rowid())
}
/// Get state value from daemon_state table
pub fn get_state(conn: &Connection, key: &str) -> Result<Option<String>> {
conn.query_row(
"SELECT value FROM daemon_state WHERE key = ?1",
params![key],
|row| row.get(0),
)
.optional()
}
/// Set state value in daemon_state table
pub fn set_state(conn: &Connection, key: &str, value: &str) -> Result<()> {
let updated_at = Utc::now().timestamp();
conn.execute(
"INSERT INTO daemon_state (key, value, updated_at)
VALUES (?1, ?2, ?3)
ON CONFLICT(key) DO UPDATE SET value = ?2, updated_at = ?3",
params![key, value, updated_at],
)?;
Ok(())
}
/// Get last processed chat.db rowid from database or return 0
pub fn get_last_processed_rowid(conn: &Connection) -> Result<i64> {
Ok(get_state(conn, "last_processed_rowid")?
.and_then(|s| s.parse().ok())
.unwrap_or(0))
}
/// Save last processed chat.db rowid to database
pub fn save_last_processed_rowid(conn: &Connection, rowid: i64) -> Result<()> {
set_state(conn, "last_processed_rowid", &rowid.to_string())
}

View File

@@ -0,0 +1,207 @@
use rusqlite::{Connection, Result};
use tracing::info;
pub fn initialize_database(conn: &Connection) -> Result<()> {
info!("Initializing database schema");
// Load sqlite-vec extension (macOS only)
let vec_path = "./extensions/vec0.dylib";
// Try to load the vector extension (non-fatal if it fails for now)
match unsafe { conn.load_extension_enable() } {
Ok(_) => {
match unsafe { conn.load_extension(vec_path, None::<&str>) } {
Ok(_) => info!("Loaded sqlite-vec extension"),
Err(e) => info!("Could not load sqlite-vec extension: {}. Vector operations will not be available.", e),
}
let _ = unsafe { conn.load_extension_disable() };
}
Err(e) => info!("Extension loading not enabled: {}", e),
}
// Create messages table
conn.execute(
"CREATE TABLE IF NOT EXISTS messages (
id INTEGER PRIMARY KEY AUTOINCREMENT,
chat_db_rowid INTEGER UNIQUE NOT NULL,
text TEXT,
timestamp INTEGER,
is_from_me BOOLEAN NOT NULL,
created_at INTEGER NOT NULL
)",
[],
)?;
// Create index on chat_db_rowid for fast lookups
conn.execute(
"CREATE INDEX IF NOT EXISTS idx_messages_chat_db_rowid ON messages(chat_db_rowid)",
[],
)?;
// Create message_embeddings table
conn.execute(
"CREATE TABLE IF NOT EXISTS message_embeddings (
id INTEGER PRIMARY KEY AUTOINCREMENT,
message_id INTEGER NOT NULL,
embedding BLOB NOT NULL,
model_name TEXT NOT NULL,
created_at INTEGER NOT NULL,
FOREIGN KEY (message_id) REFERENCES messages(id) ON DELETE CASCADE
)",
[],
)?;
// Create index on message_id
conn.execute(
"CREATE INDEX IF NOT EXISTS idx_message_embeddings_message_id ON message_embeddings(message_id)",
[],
)?;
// Create word_embeddings table
conn.execute(
"CREATE TABLE IF NOT EXISTS word_embeddings (
id INTEGER PRIMARY KEY AUTOINCREMENT,
word TEXT UNIQUE NOT NULL,
embedding BLOB NOT NULL,
model_name TEXT NOT NULL,
created_at INTEGER NOT NULL
)",
[],
)?;
// Create index on word
conn.execute(
"CREATE INDEX IF NOT EXISTS idx_word_embeddings_word ON word_embeddings(word)",
[],
)?;
// Create emotions table
conn.execute(
"CREATE TABLE IF NOT EXISTS emotions (
id INTEGER PRIMARY KEY AUTOINCREMENT,
message_id INTEGER NOT NULL,
emotion TEXT NOT NULL,
confidence REAL NOT NULL,
model_version TEXT NOT NULL,
created_at INTEGER NOT NULL,
updated_at INTEGER NOT NULL,
FOREIGN KEY (message_id) REFERENCES messages(id) ON DELETE CASCADE
)",
[],
)?;
// Create indexes for emotions
conn.execute(
"CREATE INDEX IF NOT EXISTS idx_emotions_message_id ON emotions(message_id)",
[],
)?;
conn.execute(
"CREATE INDEX IF NOT EXISTS idx_emotions_emotion ON emotions(emotion)",
[],
)?;
// Create emotions_training_set table
conn.execute(
"CREATE TABLE IF NOT EXISTS emotions_training_set (
id INTEGER PRIMARY KEY AUTOINCREMENT,
message_id INTEGER,
text TEXT NOT NULL,
expected_emotion TEXT NOT NULL,
actual_emotion TEXT,
confidence REAL,
is_validated BOOLEAN NOT NULL DEFAULT 0,
notes TEXT,
created_at INTEGER NOT NULL,
updated_at INTEGER NOT NULL,
FOREIGN KEY (message_id) REFERENCES messages(id) ON DELETE SET NULL
)",
[],
)?;
// Create index on emotions_training_set
conn.execute(
"CREATE INDEX IF NOT EXISTS idx_emotions_training_set_message_id ON emotions_training_set(message_id)",
[],
)?;
conn.execute(
"CREATE INDEX IF NOT EXISTS idx_emotions_training_set_validated ON emotions_training_set(is_validated)",
[],
)?;
// Create state table for daemon state persistence
conn.execute(
"CREATE TABLE IF NOT EXISTS daemon_state (
key TEXT PRIMARY KEY,
value TEXT NOT NULL,
updated_at INTEGER NOT NULL
)",
[],
)?;
// Create models table for storing ML model files
conn.execute(
"CREATE TABLE IF NOT EXISTS models (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT UNIQUE NOT NULL,
model_type TEXT NOT NULL,
version TEXT NOT NULL,
file_data BLOB NOT NULL,
metadata TEXT,
created_at INTEGER NOT NULL,
updated_at INTEGER NOT NULL
)",
[],
)?;
// Create index on model name and type
conn.execute(
"CREATE INDEX IF NOT EXISTS idx_models_name ON models(name)",
[],
)?;
conn.execute(
"CREATE INDEX IF NOT EXISTS idx_models_type ON models(model_type)",
[],
)?;
info!("Database schema initialized successfully");
Ok(())
}
/// Helper function to serialize f32 vector to bytes for storage
pub fn serialize_embedding(embedding: &[f32]) -> Vec<u8> {
embedding
.iter()
.flat_map(|f| f.to_le_bytes())
.collect()
}
/// Helper function to deserialize bytes back to f32 vector
pub fn deserialize_embedding(bytes: &[u8]) -> Vec<f32> {
bytes
.chunks_exact(4)
.map(|chunk| {
let array: [u8; 4] = chunk.try_into().unwrap();
f32::from_le_bytes(array)
})
.collect()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_embedding_serialization() {
let original = vec![1.0f32, 2.5, -3.7, 0.0, 100.5];
let serialized = serialize_embedding(&original);
let deserialized = deserialize_embedding(&serialized);
assert_eq!(original.len(), deserialized.len());
for (a, b) in original.iter().zip(deserialized.iter()) {
assert!((a - b).abs() < 1e-6);
}
}
}

View File

@@ -0,0 +1 @@
// Entity builders and spawners will go here

View File

@@ -0,0 +1,42 @@
use anyhow::Result;
use iroh::protocol::Router;
use iroh::Endpoint;
use iroh_gossip::api::{GossipReceiver, GossipSender};
use iroh_gossip::net::Gossip;
use iroh_gossip::proto::TopicId;
/// Initialize Iroh endpoint and gossip for the given topic
pub async fn init_iroh_gossip(
topic_id: TopicId,
) -> Result<(Endpoint, Gossip, Router, GossipSender, GossipReceiver)> {
println!("Initializing Iroh endpoint...");
// Create the Iroh endpoint
let endpoint = Endpoint::bind().await?;
println!("Endpoint created");
// Build the gossip protocol
println!("Building gossip protocol...");
let gossip = Gossip::builder().spawn(endpoint.clone());
// Setup the router to handle incoming connections
println!("Setting up router...");
let router = Router::builder(endpoint.clone())
.accept(iroh_gossip::ALPN, gossip.clone())
.spawn();
// Subscribe to the topic (no bootstrap peers for now)
println!("Subscribing to topic: {:?}", topic_id);
let bootstrap_peers = vec![];
let subscribe_handle = gossip.subscribe(topic_id, bootstrap_peers).await?;
// Split into sender and receiver
let (sender, mut receiver) = subscribe_handle.split();
// Wait for join to complete
println!("Waiting for gossip join...");
receiver.joined().await?;
println!("Gossip initialized successfully");
Ok((endpoint, gossip, router, sender, receiver))
}

96
crates/server/src/main.rs Normal file
View File

@@ -0,0 +1,96 @@
mod assets;
mod components;
mod config;
mod db;
mod entities;
mod iroh_sync;
mod models;
mod services;
mod systems;
use anyhow::{Context, Result};
use bevy::prelude::*;
use config::Config;
use iroh_gossip::proto::TopicId;
use parking_lot::Mutex;
use rusqlite::Connection;
use std::path::Path;
use std::sync::Arc;
// Re-export init function
pub use iroh_sync::init_iroh_gossip;
// Import components and systems
use components::*;
use systems::*;
fn main() {
println!("Starting server");
// Load configuration and initialize database
let (config, us_db) = match initialize_app() {
Ok(data) => data,
Err(e) => {
eprintln!("Failed to initialize app: {}", e);
return;
}
};
// Create a topic ID for gossip (use a fixed topic for now)
let mut topic_bytes = [0u8; 32];
topic_bytes[..10].copy_from_slice(b"us-sync-v1");
let topic_id = TopicId::from_bytes(topic_bytes);
// Start Bevy app (headless)
App::new()
.add_plugins(MinimalPlugins)
.add_message::<PublishMessageEvent>()
.add_message::<GossipMessageReceived>()
.insert_resource(AppConfig(config))
.insert_resource(Database(us_db))
.insert_resource(GossipTopic(topic_id))
.add_systems(Startup, (setup_database, setup_gossip))
.add_systems(
Update,
(
poll_gossip_init,
poll_chat_db,
detect_new_messages,
publish_to_gossip,
receive_from_gossip,
save_gossip_messages,
),
)
.run();
}
/// Initialize configuration and database
fn initialize_app() -> Result<(Config, Arc<Mutex<Connection>>)> {
let config = if Path::new("config.toml").exists() {
println!("Loading config from config.toml");
Config::from_file("config.toml")?
} else {
println!("No config.toml found, using default configuration");
let config = Config::default_config();
config
.save("config.toml")
.context("Failed to save default config")?;
println!("Saved default configuration to config.toml");
config
};
println!("Configuration loaded");
println!(" Database: {}", config.database.path);
println!(" Chat DB: {}", config.database.chat_db_path);
// Initialize database
println!("Initializing database at {}", config.database.path);
let conn =
Connection::open(&config.database.path).context("Failed to open database")?;
db::initialize_database(&conn).context("Failed to initialize database schema")?;
let us_db = Arc::new(Mutex::new(conn));
Ok((config, us_db))
}

View File

@@ -0,0 +1,60 @@
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
/// Represents a message stored in our database
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Message {
pub id: i64,
pub chat_db_rowid: i64,
pub text: Option<String>,
pub timestamp: Option<DateTime<Utc>>,
pub is_from_me: bool,
pub created_at: DateTime<Utc>,
}
/// Represents a message embedding (full message vector)
#[derive(Debug, Clone)]
pub struct MessageEmbedding {
pub id: i64,
pub message_id: i64,
pub embedding: Vec<f32>,
pub model_name: String,
pub created_at: DateTime<Utc>,
}
/// Represents a word embedding
#[derive(Debug, Clone)]
pub struct WordEmbedding {
pub id: i64,
pub word: String,
pub embedding: Vec<f32>,
pub model_name: String,
pub created_at: DateTime<Utc>,
}
/// Represents an emotion classification for a message
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Emotion {
pub id: i64,
pub message_id: i64,
pub emotion: String,
pub confidence: f64,
pub model_version: String,
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
}
/// Represents an emotion training sample
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct EmotionTrainingSample {
pub id: i64,
pub message_id: Option<i64>,
pub text: String,
pub expected_emotion: String,
pub actual_emotion: Option<String>,
pub confidence: Option<f64>,
pub is_validated: bool,
pub notes: Option<String>,
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
}

View File

@@ -0,0 +1,72 @@
syntax = "proto3";
package emotions;
// Emotion classification for a message
message Emotion {
int64 id = 1;
int64 message_id = 2;
string emotion = 3;
double confidence = 4;
string model_version = 5;
int64 created_at = 6;
int64 updated_at = 7;
}
// Request to get a single emotion by message ID
message GetEmotionRequest {
int64 message_id = 1;
}
// Request to get multiple emotions with optional filters
message GetEmotionsRequest {
repeated int64 message_ids = 1;
optional string emotion_filter = 2;
optional double min_confidence = 3;
optional int32 limit = 4;
optional int32 offset = 5;
}
// Response containing multiple emotions
message EmotionsResponse {
repeated Emotion emotions = 1;
int32 total_count = 2;
}
// Request to update an emotion (for corrections/fine-tuning)
message UpdateEmotionRequest {
int64 message_id = 1;
string emotion = 2;
double confidence = 3;
optional string notes = 4;
}
// Request to delete an emotion
message DeleteEmotionRequest {
int64 id = 1;
}
// Generic response for mutations
message EmotionResponse {
bool success = 1;
string message = 2;
optional Emotion emotion = 3;
}
// Empty message for list all
message Empty {}
// The emotion service with full CRUD operations
service EmotionService {
// Read operations
rpc GetEmotion(GetEmotionRequest) returns (Emotion);
rpc GetEmotions(GetEmotionsRequest) returns (EmotionsResponse);
rpc ListAllEmotions(Empty) returns (EmotionsResponse);
// Update operations (for classification corrections and fine-tuning)
rpc UpdateEmotion(UpdateEmotionRequest) returns (EmotionResponse);
rpc BatchUpdateEmotions(stream UpdateEmotionRequest) returns (EmotionResponse);
// Delete operation
rpc DeleteEmotion(DeleteEmotionRequest) returns (EmotionResponse);
}

View File

@@ -0,0 +1,121 @@
use crate::db;
use anyhow::{Context, Result};
use chrono::Utc;
use rusqlite::Connection;
use std::path::Path;
use std::sync::Arc;
use std::time::Duration;
use tokio::sync::{mpsc, Mutex};
use tokio::time;
use tracing::{debug, error, info, warn};
pub struct ChatPollerService {
chat_db_path: String,
us_db: Arc<Mutex<Connection>>,
tx: mpsc::Sender<lib::Message>,
poll_interval: Duration,
}
impl ChatPollerService {
pub fn new(
chat_db_path: String,
us_db: Arc<Mutex<Connection>>,
tx: mpsc::Sender<lib::Message>,
poll_interval_ms: u64,
) -> Self {
Self {
chat_db_path,
us_db,
tx,
poll_interval: Duration::from_millis(poll_interval_ms),
}
}
pub async fn run(&self) -> Result<()> {
info!("Starting chat poller service");
info!("Polling {} every {:?}", self.chat_db_path, self.poll_interval);
// Get last processed rowid from database
let us_db = self.us_db.lock().await;
let mut last_rowid = db::get_last_processed_rowid(&us_db)
.context("Failed to get last processed rowid")?;
drop(us_db);
info!("Starting from rowid: {}", last_rowid);
let mut interval = time::interval(self.poll_interval);
loop {
interval.tick().await;
match self.poll_messages(last_rowid).await {
Ok(new_messages) => {
if !new_messages.is_empty() {
info!("Found {} new messages", new_messages.len());
for msg in new_messages {
// Update last_rowid
if msg.rowid > last_rowid {
last_rowid = msg.rowid;
}
// Send message to processing pipeline
if let Err(e) = self.tx.send(msg).await {
error!("Failed to send message to processing pipeline: {}", e);
}
}
// Save state to database
let us_db = self.us_db.lock().await;
if let Err(e) = db::save_last_processed_rowid(&us_db, last_rowid) {
warn!("Failed to save last processed rowid: {}", e);
}
drop(us_db);
} else {
debug!("No new messages");
}
}
Err(e) => {
error!("Error polling messages: {}", e);
}
}
}
}
async fn poll_messages(&self, last_rowid: i64) -> Result<Vec<lib::Message>> {
// Check if chat.db exists
if !Path::new(&self.chat_db_path).exists() {
return Err(anyhow::anyhow!("chat.db not found at {}", self.chat_db_path));
}
// Open chat.db (read-only)
let chat_db = lib::ChatDb::open(&self.chat_db_path)
.context("Failed to open chat.db")?;
// Get messages with rowid > last_rowid
// We'll use the existing get_our_messages but need to filter by rowid
// For now, let's get recent messages and filter in-memory
let start_date = Some(Utc::now() - chrono::Duration::days(7));
let end_date = Some(Utc::now());
let messages = chat_db
.get_our_messages(start_date, end_date)
.context("Failed to get messages from chat.db")?;
// Filter messages with rowid > last_rowid and ensure they're not duplicates
let new_messages: Vec<lib::Message> = messages
.into_iter()
.filter(|msg| msg.rowid > last_rowid)
.collect();
// Insert new messages into our database
let us_db = self.us_db.lock().await;
for msg in &new_messages {
if let Err(e) = db::insert_message(&us_db, msg) {
warn!("Failed to insert message {}: {}", msg.rowid, e);
}
}
Ok(new_messages)
}
}

View File

@@ -0,0 +1,110 @@
use crate::db;
use anyhow::Result;
use rusqlite::Connection;
use std::sync::Arc;
use tokio::sync::{mpsc, Mutex};
use tracing::{error, info, warn};
/// Service responsible for generating embeddings for messages and words
pub struct EmbeddingService {
us_db: Arc<Mutex<Connection>>,
rx: mpsc::Receiver<lib::Message>,
model_name: String,
}
impl EmbeddingService {
pub fn new(
us_db: Arc<Mutex<Connection>>,
rx: mpsc::Receiver<lib::Message>,
model_name: String,
) -> Self {
Self {
us_db,
rx,
model_name,
}
}
pub async fn run(mut self) -> Result<()> {
info!("Starting embedding service with model: {}", self.model_name);
// TODO: Load the embedding model here
// For now, we'll create a placeholder implementation
info!("Loading embedding model...");
// let model = load_embedding_model(&self.model_name)?;
info!("Embedding model loaded (placeholder)");
while let Some(msg) = self.rx.recv().await {
if let Err(e) = self.process_message(&msg).await {
error!("Error processing message {}: {}", msg.rowid, e);
}
}
Ok(())
}
async fn process_message(&self, msg: &lib::Message) -> Result<()> {
// Get message ID from our database
let us_db = self.us_db.lock().await;
let message_id = match db::get_message_id_by_chat_rowid(&us_db, msg.rowid)? {
Some(id) => id,
None => {
warn!("Message {} not found in database, skipping", msg.rowid);
return Ok(());
}
};
// Check if embedding already exists
if db::get_message_embedding(&us_db, message_id)?.is_some() {
return Ok(());
}
// Skip if message has no text
let text = match &msg.text {
Some(t) if !t.is_empty() => t,
_ => return Ok(()),
};
drop(us_db);
// Generate embedding for the full message
// TODO: Replace with actual model inference
let message_embedding = self.generate_embedding(text)?;
// Store message embedding
let us_db = self.us_db.lock().await;
db::insert_message_embedding(&us_db, message_id, &message_embedding, &self.model_name)?;
// Tokenize and generate word embeddings
let words = self.tokenize(text);
for word in words {
// Check if word embedding exists
if db::get_word_embedding(&us_db, &word)?.is_none() {
// Generate embedding for word
let word_embedding = self.generate_embedding(&word)?;
db::insert_word_embedding(&us_db, &word, &word_embedding, &self.model_name)?;
}
}
drop(us_db);
info!("Generated embeddings for message {}", msg.rowid);
Ok(())
}
fn generate_embedding(&self, text: &str) -> Result<Vec<f32>> {
// TODO: Replace with actual model inference using Candle
// For now, return a placeholder embedding of dimension 1024
let embedding = vec![0.0f32; 1024];
Ok(embedding)
}
fn tokenize(&self, text: &str) -> Vec<String> {
// Simple word tokenization (split on whitespace and punctuation)
// TODO: Replace with proper tokenizer
text.split(|c: char| c.is_whitespace() || c.is_ascii_punctuation())
.filter(|s| !s.is_empty())
.map(|s| s.to_lowercase())
.collect()
}
}

View File

@@ -0,0 +1,119 @@
use crate::db;
use anyhow::Result;
use rusqlite::Connection;
use std::sync::Arc;
use tokio::sync::{mpsc, Mutex};
use tracing::{error, info, warn};
/// Service responsible for classifying emotions in messages
pub struct EmotionService {
us_db: Arc<Mutex<Connection>>,
rx: mpsc::Receiver<lib::Message>,
model_version: String,
training_sample_rate: f64,
}
impl EmotionService {
pub fn new(
us_db: Arc<Mutex<Connection>>,
rx: mpsc::Receiver<lib::Message>,
model_version: String,
training_sample_rate: f64,
) -> Self {
Self {
us_db,
rx,
model_version,
training_sample_rate,
}
}
pub async fn run(mut self) -> Result<()> {
info!(
"Starting emotion classification service with model: {}",
self.model_version
);
info!(
"Training sample rate: {:.2}%",
self.training_sample_rate * 100.0
);
// TODO: Load the RoBERTa emotion classification model here
info!("Loading RoBERTa-base-go_emotions model...");
// let model = load_emotion_model(&self.model_version)?;
info!("Emotion model loaded (placeholder)");
while let Some(msg) = self.rx.recv().await {
if let Err(e) = self.process_message(&msg).await {
error!("Error processing message {}: {}", msg.rowid, e);
}
}
Ok(())
}
async fn process_message(&self, msg: &lib::Message) -> Result<()> {
// Get message ID from our database
let us_db = self.us_db.lock().await;
let message_id = match db::get_message_id_by_chat_rowid(&us_db, msg.rowid)? {
Some(id) => id,
None => {
warn!("Message {} not found in database, skipping", msg.rowid);
return Ok(());
}
};
// Check if emotion classification already exists
if db::get_emotion_by_message_id(&us_db, message_id)?.is_some() {
return Ok(());
}
// Skip if message has no text
let text = match &msg.text {
Some(t) if !t.is_empty() => t,
_ => return Ok(()),
};
drop(us_db);
// Classify emotion
// TODO: Replace with actual model inference
let (emotion, confidence) = self.classify_emotion(text)?;
// Store emotion classification
let us_db = self.us_db.lock().await;
db::insert_emotion(&us_db, message_id, &emotion, confidence, &self.model_version)?;
// Randomly add to training set based on sample rate
if rand::random::<f64>() < self.training_sample_rate {
db::insert_training_sample(&us_db, Some(message_id), text, &emotion)?;
info!(
"Added message {} to training set (emotion: {})",
msg.rowid, emotion
);
}
drop(us_db);
info!(
"Classified message {} as {} (confidence: {:.2})",
msg.rowid, emotion, confidence
);
Ok(())
}
fn classify_emotion(&self, text: &str) -> Result<(String, f64)> {
// TODO: Replace with actual RoBERTa-base-go_emotions inference using Candle
// The model outputs probabilities for 28 emotions:
// admiration, amusement, anger, annoyance, approval, caring, confusion,
// curiosity, desire, disappointment, disapproval, disgust, embarrassment,
// excitement, fear, gratitude, grief, joy, love, nervousness, optimism,
// pride, realization, relief, remorse, sadness, surprise, neutral
// For now, return a placeholder
let emotion = "neutral".to_string();
let confidence = 0.85;
Ok((emotion, confidence))
}
}

View File

@@ -0,0 +1,232 @@
use crate::db;
use anyhow::Result;
use rusqlite::Connection;
use std::sync::Arc;
use tokio::sync::Mutex;
use tonic::{Request, Response, Status};
use tracing::{error, info};
// Include the generated protobuf code
pub mod emotions {
tonic::include_proto!("emotions");
}
use emotions::emotion_service_server::{EmotionService as EmotionServiceTrait, EmotionServiceServer};
use emotions::*;
pub struct GrpcServer {
us_db: Arc<Mutex<Connection>>,
address: String,
}
impl GrpcServer {
pub fn new(us_db: Arc<Mutex<Connection>>, address: String) -> Self {
Self { us_db, address }
}
pub async fn run(self) -> Result<()> {
let addr = self.address.parse()?;
info!("Starting gRPC server on {}", self.address);
let service = EmotionServiceImpl {
us_db: self.us_db.clone(),
};
tonic::transport::Server::builder()
.add_service(EmotionServiceServer::new(service))
.serve(addr)
.await?;
Ok(())
}
}
struct EmotionServiceImpl {
us_db: Arc<Mutex<Connection>>,
}
#[tonic::async_trait]
impl EmotionServiceTrait for EmotionServiceImpl {
async fn get_emotion(
&self,
request: Request<GetEmotionRequest>,
) -> Result<Response<Emotion>, Status> {
let req = request.into_inner();
let conn = self.us_db.lock().await;
match db::get_emotion_by_message_id(&conn, req.message_id) {
Ok(Some(emotion)) => Ok(Response::new(emotion_to_proto(emotion))),
Ok(None) => Err(Status::not_found(format!(
"Emotion not found for message_id: {}",
req.message_id
))),
Err(e) => {
error!("Database error: {}", e);
Err(Status::internal("Database error"))
}
}
}
async fn get_emotions(
&self,
request: Request<GetEmotionsRequest>,
) -> Result<Response<EmotionsResponse>, Status> {
let req = request.into_inner();
let conn = self.us_db.lock().await;
let emotion_filter = req.emotion_filter.as_deref();
let min_confidence = req.min_confidence;
let limit = req.limit.map(|l| l as i32);
let offset = req.offset.map(|o| o as i32);
match db::list_emotions(&conn, emotion_filter, min_confidence, limit, offset) {
Ok(emotions) => {
let total_count = db::count_emotions(&conn).unwrap_or(0);
Ok(Response::new(EmotionsResponse {
emotions: emotions.into_iter().map(emotion_to_proto).collect(),
total_count,
}))
}
Err(e) => {
error!("Database error: {}", e);
Err(Status::internal("Database error"))
}
}
}
async fn list_all_emotions(
&self,
_request: Request<Empty>,
) -> Result<Response<EmotionsResponse>, Status> {
let conn = self.us_db.lock().await;
match db::list_emotions(&conn, None, None, None, None) {
Ok(emotions) => {
let total_count = emotions.len() as i32;
Ok(Response::new(EmotionsResponse {
emotions: emotions.into_iter().map(emotion_to_proto).collect(),
total_count,
}))
}
Err(e) => {
error!("Database error: {}", e);
Err(Status::internal("Database error"))
}
}
}
async fn update_emotion(
&self,
request: Request<UpdateEmotionRequest>,
) -> Result<Response<EmotionResponse>, Status> {
let req = request.into_inner();
let conn = self.us_db.lock().await;
match db::update_emotion(&conn, req.message_id, &req.emotion, req.confidence) {
Ok(_) => {
// If notes are provided, add to training set
if let Some(notes) = req.notes {
if let Ok(Some(msg)) = db::get_message(&conn, req.message_id) {
if let Some(text) = msg.text {
let _ = db::insert_training_sample(
&conn,
Some(req.message_id),
&text,
&req.emotion,
);
}
}
}
// Fetch the updated emotion
match db::get_emotion_by_message_id(&conn, req.message_id) {
Ok(Some(emotion)) => Ok(Response::new(EmotionResponse {
success: true,
message: "Emotion updated successfully".to_string(),
emotion: Some(emotion_to_proto(emotion)),
})),
_ => Ok(Response::new(EmotionResponse {
success: true,
message: "Emotion updated successfully".to_string(),
emotion: None,
})),
}
}
Err(e) => {
error!("Database error: {}", e);
Err(Status::internal("Database error"))
}
}
}
async fn batch_update_emotions(
&self,
request: Request<tonic::Streaming<UpdateEmotionRequest>>,
) -> Result<Response<EmotionResponse>, Status> {
let mut stream = request.into_inner();
let mut count = 0;
while let Some(req) = stream.message().await? {
let conn = self.us_db.lock().await;
match db::update_emotion(&conn, req.message_id, &req.emotion, req.confidence) {
Ok(_) => {
count += 1;
if let Some(notes) = req.notes {
if let Ok(Some(msg)) = db::get_message(&conn, req.message_id) {
if let Some(text) = msg.text {
let _ = db::insert_training_sample(
&conn,
Some(req.message_id),
&text,
&req.emotion,
);
}
}
}
}
Err(e) => {
error!("Failed to update emotion for message {}: {}", req.message_id, e);
}
}
drop(conn);
}
Ok(Response::new(EmotionResponse {
success: true,
message: format!("Updated {} emotions", count),
emotion: None,
}))
}
async fn delete_emotion(
&self,
request: Request<DeleteEmotionRequest>,
) -> Result<Response<EmotionResponse>, Status> {
let req = request.into_inner();
let conn = self.us_db.lock().await;
match db::delete_emotion(&conn, req.id) {
Ok(_) => Ok(Response::new(EmotionResponse {
success: true,
message: format!("Emotion {} deleted successfully", req.id),
emotion: None,
})),
Err(e) => {
error!("Database error: {}", e);
Err(Status::internal("Database error"))
}
}
}
}
fn emotion_to_proto(emotion: crate::models::Emotion) -> Emotion {
Emotion {
id: emotion.id,
message_id: emotion.message_id,
emotion: emotion.emotion,
confidence: emotion.confidence,
model_version: emotion.model_version,
created_at: emotion.created_at.timestamp(),
updated_at: emotion.updated_at.timestamp(),
}
}

View File

@@ -0,0 +1,7 @@
pub mod chat_poller;
pub mod embedding_service;
pub mod emotion_service;
pub use chat_poller::ChatPollerService;
pub use embedding_service::EmbeddingService;
pub use emotion_service::EmotionService;

View File

@@ -0,0 +1,114 @@
use bevy::prelude::*;
use lib::sync::{Syncable, SyncMessage};
use crate::components::*;
/// Bevy plugin for transparent CRDT sync via gossip
pub struct SyncPlugin;
impl Plugin for SyncPlugin {
fn build(&self, app: &mut App) {
app.add_systems(Update, (
publish_sync_ops,
receive_sync_ops,
));
}
}
/// Trait for Bevy resources that can be synced
pub trait SyncedResource: Resource + Syncable + Clone + Send + Sync + 'static {}
/// Queue of sync operations to publish
#[derive(Resource, Default)]
pub struct SyncOpQueue<T: Syncable> {
pub ops: Vec<T::Operation>,
}
impl<T: Syncable> SyncOpQueue<T> {
pub fn push(&mut self, op: T::Operation) {
self.ops.push(op);
}
}
/// System to publish sync operations to gossip
fn publish_sync_ops<T: SyncedResource>(
mut queue: ResMut<SyncOpQueue<T>>,
resource: Res<T>,
sender: Option<Res<IrohGossipSender>>,
) {
if sender.is_none() || queue.ops.is_empty() {
return;
}
let sender = sender.unwrap();
let sender_guard = sender.sender.lock();
for op in queue.ops.drain(..) {
let sync_msg = resource.create_sync_message(op);
match sync_msg.to_bytes() {
Ok(bytes) => {
println!("Publishing sync operation: {} bytes", bytes.len());
// TODO: Actually send via gossip
// sender_guard.broadcast(bytes)?;
}
Err(e) => {
eprintln!("Failed to serialize sync operation: {}", e);
}
}
}
}
/// System to receive and apply sync operations from gossip
fn receive_sync_ops<T: SyncedResource>(
mut resource: ResMut<T>,
receiver: Option<Res<IrohGossipReceiver>>,
) {
if receiver.is_none() {
return;
}
// TODO: Poll receiver for messages
// For each message:
// 1. Deserialize SyncMessage<T::Operation>
// 2. Apply to resource with resource.apply_sync_op(&op)
}
/// Helper to register a synced resource
pub trait SyncedResourceExt {
fn add_synced_resource<T: SyncedResource>(&mut self) -> &mut Self;
}
impl SyncedResourceExt for App {
fn add_synced_resource<T: SyncedResource>(&mut self) -> &mut Self {
self.init_resource::<SyncOpQueue<T>>();
self
}
}
/// Example synced resource
#[cfg(test)]
mod tests {
use super::*;
use lib::sync::synced;
#[synced]
pub struct TestConfig {
pub value: i32,
#[sync(skip)]
node_id: String,
}
impl Resource for TestConfig {}
impl SyncedResource for TestConfig {}
#[test]
fn test_sync_plugin() {
let mut app = App::new();
app.add_plugins(MinimalPlugins);
app.add_plugins(SyncPlugin);
app.add_synced_resource::<TestConfig>();
// TODO: Test that operations are queued and published
}
}

View File

@@ -0,0 +1,12 @@
use bevy::prelude::*;
use crate::components::*;
/// System: Poll chat.db for new messages using Bevy's task system
pub fn poll_chat_db(
_config: Res<AppConfig>,
_db: Res<Database>,
) {
// TODO: Use Bevy's AsyncComputeTaskPool to poll chat.db
// This will replace the tokio::spawn chat poller
}

View File

@@ -0,0 +1,116 @@
use bevy::prelude::*;
use parking_lot::Mutex;
use std::sync::Arc;
use crate::components::*;
/// System: Poll the gossip init task and insert resources when complete
pub fn poll_gossip_init(
mut commands: Commands,
mut init_task: Option<ResMut<GossipInitTask>>,
) {
if let Some(mut task) = init_task {
// Check if the task is finished (non-blocking)
if let Some(result) = bevy::tasks::block_on(bevy::tasks::futures_lite::future::poll_once(&mut task.0)) {
if let Some((endpoint, gossip, router, sender, receiver)) = result {
println!("Inserting gossip resources");
// Insert all the resources
commands.insert_resource(IrohEndpoint {
endpoint,
node_id: "TODO".to_string(), // TODO: Figure out how to get node_id in iroh 0.95
});
commands.insert_resource(IrohGossipHandle { gossip });
commands.insert_resource(IrohRouter { router });
commands.insert_resource(IrohGossipSender {
sender: Arc::new(Mutex::new(sender)),
});
commands.insert_resource(IrohGossipReceiver {
receiver: Arc::new(Mutex::new(receiver)),
});
// Remove the init task
commands.remove_resource::<GossipInitTask>();
}
}
}
}
/// System: Detect new messages in SQLite that need to be published to gossip
pub fn detect_new_messages(
_db: Res<Database>,
_last_synced: Local<i64>,
_publish_events: MessageWriter<PublishMessageEvent>,
) {
// TODO: Query SQLite for messages with rowid > last_synced
// When we detect new messages, we'll send PublishMessageEvent
}
/// System: Publish messages to gossip when PublishMessageEvent is triggered
pub fn publish_to_gossip(
mut events: MessageReader<PublishMessageEvent>,
sender: Option<Res<IrohGossipSender>>,
endpoint: Option<Res<IrohEndpoint>>,
) {
if sender.is_none() || endpoint.is_none() {
// Gossip not initialized yet, skip
return;
}
let sender = sender.unwrap();
let endpoint = endpoint.unwrap();
for event in events.read() {
println!("Publishing message {} to gossip", event.message.rowid);
// Create sync message
let sync_message = SyncMessage {
message: event.message.clone(),
sync_timestamp: chrono::Utc::now().timestamp(),
publisher_node_id: endpoint.node_id.clone(),
};
// Serialize the message
match serialize_sync_message(&sync_message) {
Ok(bytes) => {
// TODO: Publish to gossip
// For now, just log that we would publish
println!("Would publish {} bytes to gossip", bytes.len());
// Note: Direct async broadcasting from Bevy systems is tricky due to Sync requirements
// We'll need to use a different approach, possibly with channels or a dedicated task
}
Err(e) => {
eprintln!("Failed to serialize sync message: {}", e);
}
}
}
}
/// System: Receive messages from gossip
pub fn receive_from_gossip(
mut _gossip_events: MessageWriter<GossipMessageReceived>,
receiver: Option<Res<IrohGossipReceiver>>,
) {
if receiver.is_none() {
// Gossip not initialized yet, skip
return;
}
// TODO: Implement proper async message reception
// This will require spawning a long-running task that listens for gossip events
// and sends them as Bevy messages. For now, this is a placeholder.
}
/// System: Save received gossip messages to SQLite
pub fn save_gossip_messages(
mut events: MessageReader<GossipMessageReceived>,
_db: Res<Database>,
) {
for event in events.read() {
println!("Received message {} from gossip (published by {})",
event.sync_message.message.rowid,
event.sync_message.publisher_node_id);
// TODO: Save to SQLite if we don't already have it
}
}

View File

@@ -0,0 +1,7 @@
pub mod database;
pub mod gossip;
pub mod setup;
pub use database::*;
pub use gossip::*;
pub use setup::*;

View File

@@ -0,0 +1,22 @@
use bevy::prelude::*;
use bevy::tasks::AsyncComputeTaskPool;
use crate::components::*;
/// Startup system: Initialize database
pub fn setup_database(_db: Res<Database>) {
println!("Database resource initialized");
}
/// Startup system: Initialize Iroh gossip
pub fn setup_gossip(mut commands: Commands, topic: Res<GossipTopic>) {
println!("Setting up Iroh gossip for topic: {:?}", topic.0);
let topic_id = topic.0;
// TODO: Initialize gossip properly
// For now, skip async initialization due to Sync requirements in Bevy tasks
// We'll need to use a different initialization strategy
println!("Gossip initialization skipped (TODO: implement proper async init)");
}

View File

@@ -0,0 +1,15 @@
[package]
name = "sync-macros"
version = "0.1.0"
edition.workspace = true
[lib]
proc-macro = true
[dependencies]
syn = { version = "2.0", features = ["full"] }
quote = "1.0"
proc-macro2 = "1.0"
[dev-dependencies]
lib = { path = "../lib" }

View File

@@ -0,0 +1,345 @@
use proc_macro::TokenStream;
use quote::{quote, format_ident};
use syn::{parse_macro_input, DeriveInput, Data, Fields, Type, ItemStruct};
/// Attribute macro for transparent CRDT sync
///
/// Transforms your struct to use CRDTs internally while keeping the API simple.
///
/// # Example
/// ```
/// #[synced]
/// struct EmotionGradientConfig {
/// canvas_width: f32, // Becomes SyncedValue<f32> internally
/// canvas_height: f32, // Auto-generates getters/setters
///
/// #[sync(skip)]
/// node_id: String, // Not synced
/// }
///
/// // Use it like a normal struct:
/// let mut config = EmotionGradientConfig::new("node1".into());
/// config.set_canvas_width(1024.0); // Auto-generates sync operation
/// println!("Width: {}", config.canvas_width()); // Transparent access
/// ```
#[proc_macro_attribute]
pub fn synced(_attr: TokenStream, item: TokenStream) -> TokenStream {
let input = parse_macro_input!(item as ItemStruct);
let name = &input.ident;
let vis = &input.vis;
let op_enum_name = format_ident!("{}Op", name);
let fields = match &input.fields {
Fields::Named(fields) => &fields.named,
_ => panic!("synced only supports structs with named fields"),
};
let mut internal_fields = Vec::new();
let mut field_getters = Vec::new();
let mut field_setters = Vec::new();
let mut op_variants = Vec::new();
let mut apply_arms = Vec::new();
let mut merge_code = Vec::new();
let mut new_params = Vec::new();
let mut new_init = Vec::new();
for field in fields {
let field_name = field.ident.as_ref().unwrap();
let field_vis = &field.vis;
let field_type = &field.ty;
// Check if field should be skipped
let should_skip = field.attrs.iter().any(|attr| {
attr.path().is_ident("sync")
&& attr
.parse_args::<syn::Ident>()
.map(|i| i == "skip")
.unwrap_or(false)
});
if should_skip {
// Keep as-is, no wrapping
internal_fields.push(quote! {
#field_vis #field_name: #field_type
});
new_params.push(quote! { #field_name: #field_type });
new_init.push(quote! { #field_name });
continue;
}
// Wrap in SyncedValue
internal_fields.push(quote! {
#field_name: lib::sync::SyncedValue<#field_type>
});
// Generate getter
field_getters.push(quote! {
#field_vis fn #field_name(&self) -> &#field_type {
self.#field_name.get()
}
});
// Generate setter that returns operation
let setter_name = format_ident!("set_{}", field_name);
let op_variant = format_ident!(
"Set{}",
field_name
.to_string()
.chars()
.enumerate()
.map(|(i, c)| if i == 0 {
c.to_ascii_uppercase()
} else {
c
})
.collect::<String>()
);
field_setters.push(quote! {
#field_vis fn #setter_name(&mut self, value: #field_type) -> #op_enum_name {
let op = #op_enum_name::#op_variant {
value: value.clone(),
timestamp: chrono::Utc::now(),
node_id: self.node_id().clone(),
};
self.#field_name.set(value, self.node_id().clone());
op
}
});
// Generate operation variant
op_variants.push(quote! {
#op_variant {
value: #field_type,
timestamp: chrono::DateTime<chrono::Utc>,
node_id: String,
}
});
// Generate apply arm
apply_arms.push(quote! {
#op_enum_name::#op_variant { value, timestamp, node_id } => {
self.#field_name.apply_lww(value.clone(), timestamp.clone(), node_id.clone());
}
});
// Generate merge code
merge_code.push(quote! {
self.#field_name.merge(&other.#field_name);
});
// Add to new() parameters
new_params.push(quote! { #field_name: #field_type });
new_init.push(quote! {
#field_name: lib::sync::SyncedValue::new(#field_name, node_id.clone())
});
}
let expanded = quote! {
/// Sync operations enum
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
#[serde(tag = "type")]
#vis enum #op_enum_name {
#(#op_variants),*
}
impl #op_enum_name {
pub fn to_bytes(&self) -> anyhow::Result<Vec<u8>> {
Ok(serde_json::to_vec(self)?)
}
pub fn from_bytes(bytes: &[u8]) -> anyhow::Result<Self> {
Ok(serde_json::from_slice(bytes)?)
}
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
#vis struct #name {
#(#internal_fields),*
}
impl #name {
#vis fn new(#(#new_params),*) -> Self {
Self {
#(#new_init),*
}
}
/// Transparent field accessors
#(#field_getters)*
/// Field setters that generate sync operations
#(#field_setters)*
/// Apply a sync operation from another node
#vis fn apply_op(&mut self, op: &#op_enum_name) {
match op {
#(#apply_arms),*
}
}
/// Merge state from another instance
#vis fn merge(&mut self, other: &Self) {
#(#merge_code)*
}
}
impl lib::sync::Syncable for #name {
type Operation = #op_enum_name;
fn apply_sync_op(&mut self, op: &Self::Operation) {
self.apply_op(op);
}
fn node_id(&self) -> &lib::sync::NodeId {
// Assume there's a node_id field marked with #[sync(skip)]
&self.node_id
}
}
};
TokenStream::from(expanded)
}
/// Old derive macro - kept for backwards compatibility
#[proc_macro_derive(Synced, attributes(sync))]
pub fn derive_synced(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
let name = &input.ident;
let op_enum_name = format_ident!("{}Op", name);
let fields = match &input.data {
Data::Struct(data) => match &data.fields {
Fields::Named(fields) => &fields.named,
_ => panic!("Synced only supports structs with named fields"),
},
_ => panic!("Synced only supports structs"),
};
let mut field_ops = Vec::new();
let mut apply_arms = Vec::new();
let mut setter_methods = Vec::new();
let mut merge_code = Vec::new();
for field in fields {
let field_name = field.ident.as_ref().unwrap();
let field_type = &field.ty;
// Check if field should be skipped
let should_skip = field.attrs.iter()
.any(|attr| {
attr.path().is_ident("sync") &&
attr.parse_args::<syn::Ident>()
.map(|i| i == "skip")
.unwrap_or(false)
});
if should_skip {
continue;
}
let op_variant = format_ident!("Set{}",
field_name.to_string()
.chars()
.enumerate()
.map(|(i, c)| if i == 0 { c.to_ascii_uppercase() } else { c })
.collect::<String>()
);
let setter_name = format_ident!("set_{}", field_name);
// Determine CRDT strategy based on type
let crdt_strategy = get_crdt_strategy(field_type);
match crdt_strategy.as_str() {
"lww" => {
// LWW for simple types
field_ops.push(quote! {
#op_variant {
value: #field_type,
timestamp: chrono::DateTime<chrono::Utc>,
node_id: String,
}
});
apply_arms.push(quote! {
#op_enum_name::#op_variant { value, timestamp, node_id } => {
self.#field_name.apply_lww(value.clone(), timestamp.clone(), node_id.clone());
}
});
setter_methods.push(quote! {
pub fn #setter_name(&mut self, value: #field_type) -> #op_enum_name {
let op = #op_enum_name::#op_variant {
value: value.clone(),
timestamp: chrono::Utc::now(),
node_id: self.node_id().clone(),
};
self.#field_name = lib::sync::SyncedValue::new(value, self.node_id().clone());
op
}
});
merge_code.push(quote! {
self.#field_name.merge(&other.#field_name);
});
}
_ => {
// Default to LWW
}
}
}
let expanded = quote! {
/// Auto-generated sync operations enum
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
#[serde(tag = "type")]
pub enum #op_enum_name {
#(#field_ops),*
}
impl #op_enum_name {
pub fn to_bytes(&self) -> anyhow::Result<Vec<u8>> {
Ok(serde_json::to_vec(self)?)
}
pub fn from_bytes(bytes: &[u8]) -> anyhow::Result<Self> {
Ok(serde_json::from_slice(bytes)?)
}
}
impl #name {
/// Apply a sync operation from another node
pub fn apply_op(&mut self, op: &#op_enum_name) {
match op {
#(#apply_arms),*
}
}
/// Merge state from another instance
pub fn merge(&mut self, other: &Self) {
#(#merge_code)*
}
/// Auto-generated setter methods that create sync ops
#(#setter_methods)*
}
impl lib::sync::Syncable for #name {
type Operation = #op_enum_name;
fn apply_sync_op(&mut self, op: &Self::Operation) {
self.apply_op(op);
}
}
};
TokenStream::from(expanded)
}
/// Determine CRDT strategy based on field type
fn get_crdt_strategy(_ty: &Type) -> String {
// For now, default everything to LWW
// TODO: Detect HashMap -> use Map, Vec -> use ORSet, etc.
"lww".to_string()
}