initial commit

Signed-off-by: Sienna Meridian Satterwhite <sienna@r3t.io>
This commit is contained in:
2025-11-15 23:42:12 +00:00
commit 2bad250a04
47 changed files with 14645 additions and 0 deletions

View File

@@ -0,0 +1 @@
// Asset loading and management will go here

View File

@@ -0,0 +1,14 @@
use bevy::prelude::*;
use parking_lot::Mutex;
use rusqlite::Connection;
use std::sync::Arc;
use crate::config::Config;
/// Bevy resource wrapping application configuration
#[derive(Resource)]
pub struct AppConfig(pub Config);
/// Bevy resource wrapping database connection
#[derive(Resource)]
pub struct Database(pub Arc<Mutex<Connection>>);

View File

@@ -0,0 +1,87 @@
use bevy::prelude::*;
use iroh::protocol::Router;
use iroh::Endpoint;
use iroh_gossip::api::{GossipReceiver, GossipSender};
use iroh_gossip::net::Gossip;
use iroh_gossip::proto::TopicId;
use parking_lot::Mutex;
use serde::{Deserialize, Serialize};
use std::sync::Arc;
/// Message envelope for gossip sync
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SyncMessage {
/// The actual message from iMessage
pub message: lib::Message,
/// Timestamp when this was published to gossip
pub sync_timestamp: i64,
/// ID of the node that published this
pub publisher_node_id: String,
}
/// Bevy resource wrapping the gossip handle
#[derive(Resource, Clone)]
pub struct IrohGossipHandle {
pub gossip: Gossip,
}
/// Bevy resource wrapping the gossip sender
#[derive(Resource)]
pub struct IrohGossipSender {
pub sender: Arc<Mutex<GossipSender>>,
}
/// Bevy resource wrapping the gossip receiver
#[derive(Resource)]
pub struct IrohGossipReceiver {
pub receiver: Arc<Mutex<GossipReceiver>>,
}
/// Bevy resource with Iroh router
#[derive(Resource)]
pub struct IrohRouter {
pub router: Router,
}
/// Bevy resource with Iroh endpoint
#[derive(Resource, Clone)]
pub struct IrohEndpoint {
pub endpoint: Endpoint,
pub node_id: String,
}
/// Bevy resource for gossip topic ID
#[derive(Resource)]
pub struct GossipTopic(pub TopicId);
/// Bevy resource for tracking gossip initialization task
#[derive(Resource)]
pub struct GossipInitTask(pub bevy::tasks::Task<Option<(
Endpoint,
Gossip,
Router,
GossipSender,
GossipReceiver,
)>>);
/// Bevy message: a new message that needs to be published to gossip
#[derive(Message, Clone, Debug)]
pub struct PublishMessageEvent {
pub message: lib::Message,
}
/// Bevy message: a message received from gossip that needs to be saved to SQLite
#[derive(Message, Clone, Debug)]
pub struct GossipMessageReceived {
pub sync_message: SyncMessage,
}
/// Helper to serialize a sync message
pub fn serialize_sync_message(msg: &SyncMessage) -> anyhow::Result<Vec<u8>> {
Ok(serde_json::to_vec(msg)?)
}
/// Helper to deserialize a sync message
pub fn deserialize_sync_message(data: &[u8]) -> anyhow::Result<SyncMessage> {
Ok(serde_json::from_slice(data)?)
}

View File

@@ -0,0 +1,5 @@
pub mod database;
pub mod gossip;
pub use database::*;
pub use gossip::*;

View File

@@ -0,0 +1,84 @@
use anyhow::{Context, Result};
use serde::{Deserialize, Serialize};
use std::fs;
use std::path::Path;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Config {
pub database: DatabaseConfig,
pub services: ServicesConfig,
pub models: ModelsConfig,
pub tailscale: TailscaleConfig,
pub grpc: GrpcConfig,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DatabaseConfig {
pub path: String,
pub chat_db_path: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ServicesConfig {
pub poll_interval_ms: u64,
pub training_set_sample_rate: f64,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ModelsConfig {
pub embedding_model: String,
pub emotion_model: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TailscaleConfig {
pub hostname: String,
pub state_dir: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GrpcConfig {
pub port: u16,
}
impl Config {
pub fn from_file<P: AsRef<Path>>(path: P) -> Result<Self> {
let content = fs::read_to_string(path.as_ref())
.context(format!("Failed to read config file: {:?}", path.as_ref()))?;
let config: Config = toml::from_str(&content)
.context("Failed to parse config file")?;
Ok(config)
}
pub fn default_config() -> Self {
Self {
database: DatabaseConfig {
path: "./us.db".to_string(),
chat_db_path: "./crates/lib/chat.db".to_string(),
},
services: ServicesConfig {
poll_interval_ms: 1000,
training_set_sample_rate: 0.05,
},
models: ModelsConfig {
embedding_model: "Qwen/Qwen3-Embedding-0.6B".to_string(),
emotion_model: "SamLowe/roberta-base-go_emotions".to_string(),
},
tailscale: TailscaleConfig {
hostname: "lonni-daemon".to_string(),
state_dir: "./tailscale-state".to_string(),
},
grpc: GrpcConfig {
port: 50051,
},
}
}
pub fn save<P: AsRef<Path>>(&self, path: P) -> Result<()> {
let content = toml::to_string_pretty(self)
.context("Failed to serialize config")?;
fs::write(path.as_ref(), content)
.context(format!("Failed to write config file: {:?}", path.as_ref()))?;
Ok(())
}
}

View File

@@ -0,0 +1,5 @@
pub mod operations;
pub mod schema;
pub use operations::*;
pub use schema::*;

View File

@@ -0,0 +1,321 @@
use crate::db::schema::{deserialize_embedding, serialize_embedding};
use crate::models::*;
use chrono::{TimeZone, Utc};
use rusqlite::{params, Connection, OptionalExtension, Result, Row};
/// Insert a new message into the database
pub fn insert_message(conn: &Connection, msg: &lib::Message) -> Result<i64> {
let timestamp = msg.date.map(|dt| dt.timestamp());
let created_at = Utc::now().timestamp();
conn.execute(
"INSERT INTO messages (chat_db_rowid, text, timestamp, is_from_me, created_at)
VALUES (?1, ?2, ?3, ?4, ?5)
ON CONFLICT(chat_db_rowid) DO NOTHING",
params![msg.rowid, msg.text, timestamp, msg.is_from_me, created_at],
)?;
Ok(conn.last_insert_rowid())
}
/// Get message ID by chat.db rowid
pub fn get_message_id_by_chat_rowid(conn: &Connection, chat_db_rowid: i64) -> Result<Option<i64>> {
conn.query_row(
"SELECT id FROM messages WHERE chat_db_rowid = ?1",
params![chat_db_rowid],
|row| row.get(0),
)
.optional()
}
/// Get message by ID
pub fn get_message(conn: &Connection, id: i64) -> Result<Message> {
conn.query_row(
"SELECT id, chat_db_rowid, text, timestamp, is_from_me, created_at FROM messages WHERE id = ?1",
params![id],
map_message_row,
)
}
fn map_message_row(row: &Row) -> Result<Message> {
let timestamp: Option<i64> = row.get(3)?;
let created_at: i64 = row.get(5)?;
Ok(Message {
id: row.get(0)?,
chat_db_rowid: row.get(1)?,
text: row.get(2)?,
timestamp: timestamp.map(|ts| Utc.timestamp_opt(ts, 0).unwrap()),
is_from_me: row.get(4)?,
created_at: Utc.timestamp_opt(created_at, 0).unwrap(),
})
}
/// Insert message embedding
pub fn insert_message_embedding(
conn: &Connection,
message_id: i64,
embedding: &[f32],
model_name: &str,
) -> Result<i64> {
let embedding_bytes = serialize_embedding(embedding);
let created_at = Utc::now().timestamp();
conn.execute(
"INSERT INTO message_embeddings (message_id, embedding, model_name, created_at)
VALUES (?1, ?2, ?3, ?4)",
params![message_id, embedding_bytes, model_name, created_at],
)?;
Ok(conn.last_insert_rowid())
}
/// Get message embedding
pub fn get_message_embedding(conn: &Connection, message_id: i64) -> Result<Option<MessageEmbedding>> {
conn.query_row(
"SELECT id, message_id, embedding, model_name, created_at
FROM message_embeddings WHERE message_id = ?1",
params![message_id],
|row| {
let embedding_bytes: Vec<u8> = row.get(2)?;
let created_at: i64 = row.get(4)?;
Ok(MessageEmbedding {
id: row.get(0)?,
message_id: row.get(1)?,
embedding: deserialize_embedding(&embedding_bytes),
model_name: row.get(3)?,
created_at: Utc.timestamp_opt(created_at, 0).unwrap(),
})
},
)
.optional()
}
/// Insert or get word embedding
pub fn insert_word_embedding(
conn: &Connection,
word: &str,
embedding: &[f32],
model_name: &str,
) -> Result<i64> {
let embedding_bytes = serialize_embedding(embedding);
let created_at = Utc::now().timestamp();
conn.execute(
"INSERT INTO word_embeddings (word, embedding, model_name, created_at)
VALUES (?1, ?2, ?3, ?4)
ON CONFLICT(word) DO NOTHING",
params![word, embedding_bytes, model_name, created_at],
)?;
Ok(conn.last_insert_rowid())
}
/// Get word embedding
pub fn get_word_embedding(conn: &Connection, word: &str) -> Result<Option<WordEmbedding>> {
conn.query_row(
"SELECT id, word, embedding, model_name, created_at
FROM word_embeddings WHERE word = ?1",
params![word],
|row| {
let embedding_bytes: Vec<u8> = row.get(2)?;
let created_at: i64 = row.get(4)?;
Ok(WordEmbedding {
id: row.get(0)?,
word: row.get(1)?,
embedding: deserialize_embedding(&embedding_bytes),
model_name: row.get(3)?,
created_at: Utc.timestamp_opt(created_at, 0).unwrap(),
})
},
)
.optional()
}
/// Insert emotion classification
pub fn insert_emotion(
conn: &Connection,
message_id: i64,
emotion: &str,
confidence: f64,
model_version: &str,
) -> Result<i64> {
let now = Utc::now().timestamp();
conn.execute(
"INSERT INTO emotions (message_id, emotion, confidence, model_version, created_at, updated_at)
VALUES (?1, ?2, ?3, ?4, ?5, ?6)",
params![message_id, emotion, confidence, model_version, now, now],
)?;
Ok(conn.last_insert_rowid())
}
/// Update emotion classification
pub fn update_emotion(
conn: &Connection,
message_id: i64,
emotion: &str,
confidence: f64,
) -> Result<()> {
let updated_at = Utc::now().timestamp();
conn.execute(
"UPDATE emotions SET emotion = ?1, confidence = ?2, updated_at = ?3
WHERE message_id = ?4",
params![emotion, confidence, updated_at, message_id],
)?;
Ok(())
}
/// Get emotion by message ID
pub fn get_emotion_by_message_id(conn: &Connection, message_id: i64) -> Result<Option<Emotion>> {
conn.query_row(
"SELECT id, message_id, emotion, confidence, model_version, created_at, updated_at
FROM emotions WHERE message_id = ?1",
params![message_id],
map_emotion_row,
)
.optional()
}
/// Get emotion by ID
pub fn get_emotion_by_id(conn: &Connection, id: i64) -> Result<Option<Emotion>> {
conn.query_row(
"SELECT id, message_id, emotion, confidence, model_version, created_at, updated_at
FROM emotions WHERE id = ?1",
params![id],
map_emotion_row,
)
.optional()
}
/// List all emotions with optional filters
pub fn list_emotions(
conn: &Connection,
emotion_filter: Option<&str>,
min_confidence: Option<f64>,
limit: Option<i32>,
offset: Option<i32>,
) -> Result<Vec<Emotion>> {
let mut query = String::from(
"SELECT id, message_id, emotion, confidence, model_version, created_at, updated_at
FROM emotions WHERE 1=1"
);
if emotion_filter.is_some() {
query.push_str(" AND emotion = ?1");
}
if min_confidence.is_some() {
query.push_str(" AND confidence >= ?2");
}
query.push_str(" ORDER BY created_at DESC");
if limit.is_some() {
query.push_str(" LIMIT ?3");
}
if offset.is_some() {
query.push_str(" OFFSET ?4");
}
let mut stmt = conn.prepare(&query)?;
let emotions = stmt
.query_map(
params![
emotion_filter.unwrap_or(""),
min_confidence.unwrap_or(0.0),
limit.unwrap_or(1000),
offset.unwrap_or(0),
],
map_emotion_row,
)?
.collect::<Result<Vec<_>>>()?;
Ok(emotions)
}
/// Delete emotion by ID
pub fn delete_emotion(conn: &Connection, id: i64) -> Result<()> {
conn.execute("DELETE FROM emotions WHERE id = ?1", params![id])?;
Ok(())
}
/// Count total emotions
pub fn count_emotions(conn: &Connection) -> Result<i32> {
conn.query_row("SELECT COUNT(*) FROM emotions", [], |row| row.get(0))
}
fn map_emotion_row(row: &Row) -> Result<Emotion> {
let created_at: i64 = row.get(5)?;
let updated_at: i64 = row.get(6)?;
Ok(Emotion {
id: row.get(0)?,
message_id: row.get(1)?,
emotion: row.get(2)?,
confidence: row.get(3)?,
model_version: row.get(4)?,
created_at: Utc.timestamp_opt(created_at, 0).unwrap(),
updated_at: Utc.timestamp_opt(updated_at, 0).unwrap(),
})
}
/// Insert emotion training sample
pub fn insert_training_sample(
conn: &Connection,
message_id: Option<i64>,
text: &str,
expected_emotion: &str,
) -> Result<i64> {
let now = Utc::now().timestamp();
conn.execute(
"INSERT INTO emotions_training_set (message_id, text, expected_emotion, created_at, updated_at)
VALUES (?1, ?2, ?3, ?4, ?5)",
params![message_id, text, expected_emotion, now, now],
)?;
Ok(conn.last_insert_rowid())
}
/// Get state value from daemon_state table
pub fn get_state(conn: &Connection, key: &str) -> Result<Option<String>> {
conn.query_row(
"SELECT value FROM daemon_state WHERE key = ?1",
params![key],
|row| row.get(0),
)
.optional()
}
/// Set state value in daemon_state table
pub fn set_state(conn: &Connection, key: &str, value: &str) -> Result<()> {
let updated_at = Utc::now().timestamp();
conn.execute(
"INSERT INTO daemon_state (key, value, updated_at)
VALUES (?1, ?2, ?3)
ON CONFLICT(key) DO UPDATE SET value = ?2, updated_at = ?3",
params![key, value, updated_at],
)?;
Ok(())
}
/// Get last processed chat.db rowid from database or return 0
pub fn get_last_processed_rowid(conn: &Connection) -> Result<i64> {
Ok(get_state(conn, "last_processed_rowid")?
.and_then(|s| s.parse().ok())
.unwrap_or(0))
}
/// Save last processed chat.db rowid to database
pub fn save_last_processed_rowid(conn: &Connection, rowid: i64) -> Result<()> {
set_state(conn, "last_processed_rowid", &rowid.to_string())
}

View File

@@ -0,0 +1,207 @@
use rusqlite::{Connection, Result};
use tracing::info;
pub fn initialize_database(conn: &Connection) -> Result<()> {
info!("Initializing database schema");
// Load sqlite-vec extension (macOS only)
let vec_path = "./extensions/vec0.dylib";
// Try to load the vector extension (non-fatal if it fails for now)
match unsafe { conn.load_extension_enable() } {
Ok(_) => {
match unsafe { conn.load_extension(vec_path, None::<&str>) } {
Ok(_) => info!("Loaded sqlite-vec extension"),
Err(e) => info!("Could not load sqlite-vec extension: {}. Vector operations will not be available.", e),
}
let _ = unsafe { conn.load_extension_disable() };
}
Err(e) => info!("Extension loading not enabled: {}", e),
}
// Create messages table
conn.execute(
"CREATE TABLE IF NOT EXISTS messages (
id INTEGER PRIMARY KEY AUTOINCREMENT,
chat_db_rowid INTEGER UNIQUE NOT NULL,
text TEXT,
timestamp INTEGER,
is_from_me BOOLEAN NOT NULL,
created_at INTEGER NOT NULL
)",
[],
)?;
// Create index on chat_db_rowid for fast lookups
conn.execute(
"CREATE INDEX IF NOT EXISTS idx_messages_chat_db_rowid ON messages(chat_db_rowid)",
[],
)?;
// Create message_embeddings table
conn.execute(
"CREATE TABLE IF NOT EXISTS message_embeddings (
id INTEGER PRIMARY KEY AUTOINCREMENT,
message_id INTEGER NOT NULL,
embedding BLOB NOT NULL,
model_name TEXT NOT NULL,
created_at INTEGER NOT NULL,
FOREIGN KEY (message_id) REFERENCES messages(id) ON DELETE CASCADE
)",
[],
)?;
// Create index on message_id
conn.execute(
"CREATE INDEX IF NOT EXISTS idx_message_embeddings_message_id ON message_embeddings(message_id)",
[],
)?;
// Create word_embeddings table
conn.execute(
"CREATE TABLE IF NOT EXISTS word_embeddings (
id INTEGER PRIMARY KEY AUTOINCREMENT,
word TEXT UNIQUE NOT NULL,
embedding BLOB NOT NULL,
model_name TEXT NOT NULL,
created_at INTEGER NOT NULL
)",
[],
)?;
// Create index on word
conn.execute(
"CREATE INDEX IF NOT EXISTS idx_word_embeddings_word ON word_embeddings(word)",
[],
)?;
// Create emotions table
conn.execute(
"CREATE TABLE IF NOT EXISTS emotions (
id INTEGER PRIMARY KEY AUTOINCREMENT,
message_id INTEGER NOT NULL,
emotion TEXT NOT NULL,
confidence REAL NOT NULL,
model_version TEXT NOT NULL,
created_at INTEGER NOT NULL,
updated_at INTEGER NOT NULL,
FOREIGN KEY (message_id) REFERENCES messages(id) ON DELETE CASCADE
)",
[],
)?;
// Create indexes for emotions
conn.execute(
"CREATE INDEX IF NOT EXISTS idx_emotions_message_id ON emotions(message_id)",
[],
)?;
conn.execute(
"CREATE INDEX IF NOT EXISTS idx_emotions_emotion ON emotions(emotion)",
[],
)?;
// Create emotions_training_set table
conn.execute(
"CREATE TABLE IF NOT EXISTS emotions_training_set (
id INTEGER PRIMARY KEY AUTOINCREMENT,
message_id INTEGER,
text TEXT NOT NULL,
expected_emotion TEXT NOT NULL,
actual_emotion TEXT,
confidence REAL,
is_validated BOOLEAN NOT NULL DEFAULT 0,
notes TEXT,
created_at INTEGER NOT NULL,
updated_at INTEGER NOT NULL,
FOREIGN KEY (message_id) REFERENCES messages(id) ON DELETE SET NULL
)",
[],
)?;
// Create index on emotions_training_set
conn.execute(
"CREATE INDEX IF NOT EXISTS idx_emotions_training_set_message_id ON emotions_training_set(message_id)",
[],
)?;
conn.execute(
"CREATE INDEX IF NOT EXISTS idx_emotions_training_set_validated ON emotions_training_set(is_validated)",
[],
)?;
// Create state table for daemon state persistence
conn.execute(
"CREATE TABLE IF NOT EXISTS daemon_state (
key TEXT PRIMARY KEY,
value TEXT NOT NULL,
updated_at INTEGER NOT NULL
)",
[],
)?;
// Create models table for storing ML model files
conn.execute(
"CREATE TABLE IF NOT EXISTS models (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT UNIQUE NOT NULL,
model_type TEXT NOT NULL,
version TEXT NOT NULL,
file_data BLOB NOT NULL,
metadata TEXT,
created_at INTEGER NOT NULL,
updated_at INTEGER NOT NULL
)",
[],
)?;
// Create index on model name and type
conn.execute(
"CREATE INDEX IF NOT EXISTS idx_models_name ON models(name)",
[],
)?;
conn.execute(
"CREATE INDEX IF NOT EXISTS idx_models_type ON models(model_type)",
[],
)?;
info!("Database schema initialized successfully");
Ok(())
}
/// Helper function to serialize f32 vector to bytes for storage
pub fn serialize_embedding(embedding: &[f32]) -> Vec<u8> {
embedding
.iter()
.flat_map(|f| f.to_le_bytes())
.collect()
}
/// Helper function to deserialize bytes back to f32 vector
pub fn deserialize_embedding(bytes: &[u8]) -> Vec<f32> {
bytes
.chunks_exact(4)
.map(|chunk| {
let array: [u8; 4] = chunk.try_into().unwrap();
f32::from_le_bytes(array)
})
.collect()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_embedding_serialization() {
let original = vec![1.0f32, 2.5, -3.7, 0.0, 100.5];
let serialized = serialize_embedding(&original);
let deserialized = deserialize_embedding(&serialized);
assert_eq!(original.len(), deserialized.len());
for (a, b) in original.iter().zip(deserialized.iter()) {
assert!((a - b).abs() < 1e-6);
}
}
}

View File

@@ -0,0 +1 @@
// Entity builders and spawners will go here

View File

@@ -0,0 +1,42 @@
use anyhow::Result;
use iroh::protocol::Router;
use iroh::Endpoint;
use iroh_gossip::api::{GossipReceiver, GossipSender};
use iroh_gossip::net::Gossip;
use iroh_gossip::proto::TopicId;
/// Initialize Iroh endpoint and gossip for the given topic
pub async fn init_iroh_gossip(
topic_id: TopicId,
) -> Result<(Endpoint, Gossip, Router, GossipSender, GossipReceiver)> {
println!("Initializing Iroh endpoint...");
// Create the Iroh endpoint
let endpoint = Endpoint::bind().await?;
println!("Endpoint created");
// Build the gossip protocol
println!("Building gossip protocol...");
let gossip = Gossip::builder().spawn(endpoint.clone());
// Setup the router to handle incoming connections
println!("Setting up router...");
let router = Router::builder(endpoint.clone())
.accept(iroh_gossip::ALPN, gossip.clone())
.spawn();
// Subscribe to the topic (no bootstrap peers for now)
println!("Subscribing to topic: {:?}", topic_id);
let bootstrap_peers = vec![];
let subscribe_handle = gossip.subscribe(topic_id, bootstrap_peers).await?;
// Split into sender and receiver
let (sender, mut receiver) = subscribe_handle.split();
// Wait for join to complete
println!("Waiting for gossip join...");
receiver.joined().await?;
println!("Gossip initialized successfully");
Ok((endpoint, gossip, router, sender, receiver))
}

96
crates/server/src/main.rs Normal file
View File

@@ -0,0 +1,96 @@
mod assets;
mod components;
mod config;
mod db;
mod entities;
mod iroh_sync;
mod models;
mod services;
mod systems;
use anyhow::{Context, Result};
use bevy::prelude::*;
use config::Config;
use iroh_gossip::proto::TopicId;
use parking_lot::Mutex;
use rusqlite::Connection;
use std::path::Path;
use std::sync::Arc;
// Re-export init function
pub use iroh_sync::init_iroh_gossip;
// Import components and systems
use components::*;
use systems::*;
fn main() {
println!("Starting server");
// Load configuration and initialize database
let (config, us_db) = match initialize_app() {
Ok(data) => data,
Err(e) => {
eprintln!("Failed to initialize app: {}", e);
return;
}
};
// Create a topic ID for gossip (use a fixed topic for now)
let mut topic_bytes = [0u8; 32];
topic_bytes[..10].copy_from_slice(b"us-sync-v1");
let topic_id = TopicId::from_bytes(topic_bytes);
// Start Bevy app (headless)
App::new()
.add_plugins(MinimalPlugins)
.add_message::<PublishMessageEvent>()
.add_message::<GossipMessageReceived>()
.insert_resource(AppConfig(config))
.insert_resource(Database(us_db))
.insert_resource(GossipTopic(topic_id))
.add_systems(Startup, (setup_database, setup_gossip))
.add_systems(
Update,
(
poll_gossip_init,
poll_chat_db,
detect_new_messages,
publish_to_gossip,
receive_from_gossip,
save_gossip_messages,
),
)
.run();
}
/// Initialize configuration and database
fn initialize_app() -> Result<(Config, Arc<Mutex<Connection>>)> {
let config = if Path::new("config.toml").exists() {
println!("Loading config from config.toml");
Config::from_file("config.toml")?
} else {
println!("No config.toml found, using default configuration");
let config = Config::default_config();
config
.save("config.toml")
.context("Failed to save default config")?;
println!("Saved default configuration to config.toml");
config
};
println!("Configuration loaded");
println!(" Database: {}", config.database.path);
println!(" Chat DB: {}", config.database.chat_db_path);
// Initialize database
println!("Initializing database at {}", config.database.path);
let conn =
Connection::open(&config.database.path).context("Failed to open database")?;
db::initialize_database(&conn).context("Failed to initialize database schema")?;
let us_db = Arc::new(Mutex::new(conn));
Ok((config, us_db))
}

View File

@@ -0,0 +1,60 @@
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
/// Represents a message stored in our database
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Message {
pub id: i64,
pub chat_db_rowid: i64,
pub text: Option<String>,
pub timestamp: Option<DateTime<Utc>>,
pub is_from_me: bool,
pub created_at: DateTime<Utc>,
}
/// Represents a message embedding (full message vector)
#[derive(Debug, Clone)]
pub struct MessageEmbedding {
pub id: i64,
pub message_id: i64,
pub embedding: Vec<f32>,
pub model_name: String,
pub created_at: DateTime<Utc>,
}
/// Represents a word embedding
#[derive(Debug, Clone)]
pub struct WordEmbedding {
pub id: i64,
pub word: String,
pub embedding: Vec<f32>,
pub model_name: String,
pub created_at: DateTime<Utc>,
}
/// Represents an emotion classification for a message
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Emotion {
pub id: i64,
pub message_id: i64,
pub emotion: String,
pub confidence: f64,
pub model_version: String,
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
}
/// Represents an emotion training sample
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct EmotionTrainingSample {
pub id: i64,
pub message_id: Option<i64>,
pub text: String,
pub expected_emotion: String,
pub actual_emotion: Option<String>,
pub confidence: Option<f64>,
pub is_validated: bool,
pub notes: Option<String>,
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
}

View File

@@ -0,0 +1,72 @@
syntax = "proto3";
package emotions;
// Emotion classification for a message
message Emotion {
int64 id = 1;
int64 message_id = 2;
string emotion = 3;
double confidence = 4;
string model_version = 5;
int64 created_at = 6;
int64 updated_at = 7;
}
// Request to get a single emotion by message ID
message GetEmotionRequest {
int64 message_id = 1;
}
// Request to get multiple emotions with optional filters
message GetEmotionsRequest {
repeated int64 message_ids = 1;
optional string emotion_filter = 2;
optional double min_confidence = 3;
optional int32 limit = 4;
optional int32 offset = 5;
}
// Response containing multiple emotions
message EmotionsResponse {
repeated Emotion emotions = 1;
int32 total_count = 2;
}
// Request to update an emotion (for corrections/fine-tuning)
message UpdateEmotionRequest {
int64 message_id = 1;
string emotion = 2;
double confidence = 3;
optional string notes = 4;
}
// Request to delete an emotion
message DeleteEmotionRequest {
int64 id = 1;
}
// Generic response for mutations
message EmotionResponse {
bool success = 1;
string message = 2;
optional Emotion emotion = 3;
}
// Empty message for list all
message Empty {}
// The emotion service with full CRUD operations
service EmotionService {
// Read operations
rpc GetEmotion(GetEmotionRequest) returns (Emotion);
rpc GetEmotions(GetEmotionsRequest) returns (EmotionsResponse);
rpc ListAllEmotions(Empty) returns (EmotionsResponse);
// Update operations (for classification corrections and fine-tuning)
rpc UpdateEmotion(UpdateEmotionRequest) returns (EmotionResponse);
rpc BatchUpdateEmotions(stream UpdateEmotionRequest) returns (EmotionResponse);
// Delete operation
rpc DeleteEmotion(DeleteEmotionRequest) returns (EmotionResponse);
}

View File

@@ -0,0 +1,121 @@
use crate::db;
use anyhow::{Context, Result};
use chrono::Utc;
use rusqlite::Connection;
use std::path::Path;
use std::sync::Arc;
use std::time::Duration;
use tokio::sync::{mpsc, Mutex};
use tokio::time;
use tracing::{debug, error, info, warn};
pub struct ChatPollerService {
chat_db_path: String,
us_db: Arc<Mutex<Connection>>,
tx: mpsc::Sender<lib::Message>,
poll_interval: Duration,
}
impl ChatPollerService {
pub fn new(
chat_db_path: String,
us_db: Arc<Mutex<Connection>>,
tx: mpsc::Sender<lib::Message>,
poll_interval_ms: u64,
) -> Self {
Self {
chat_db_path,
us_db,
tx,
poll_interval: Duration::from_millis(poll_interval_ms),
}
}
pub async fn run(&self) -> Result<()> {
info!("Starting chat poller service");
info!("Polling {} every {:?}", self.chat_db_path, self.poll_interval);
// Get last processed rowid from database
let us_db = self.us_db.lock().await;
let mut last_rowid = db::get_last_processed_rowid(&us_db)
.context("Failed to get last processed rowid")?;
drop(us_db);
info!("Starting from rowid: {}", last_rowid);
let mut interval = time::interval(self.poll_interval);
loop {
interval.tick().await;
match self.poll_messages(last_rowid).await {
Ok(new_messages) => {
if !new_messages.is_empty() {
info!("Found {} new messages", new_messages.len());
for msg in new_messages {
// Update last_rowid
if msg.rowid > last_rowid {
last_rowid = msg.rowid;
}
// Send message to processing pipeline
if let Err(e) = self.tx.send(msg).await {
error!("Failed to send message to processing pipeline: {}", e);
}
}
// Save state to database
let us_db = self.us_db.lock().await;
if let Err(e) = db::save_last_processed_rowid(&us_db, last_rowid) {
warn!("Failed to save last processed rowid: {}", e);
}
drop(us_db);
} else {
debug!("No new messages");
}
}
Err(e) => {
error!("Error polling messages: {}", e);
}
}
}
}
async fn poll_messages(&self, last_rowid: i64) -> Result<Vec<lib::Message>> {
// Check if chat.db exists
if !Path::new(&self.chat_db_path).exists() {
return Err(anyhow::anyhow!("chat.db not found at {}", self.chat_db_path));
}
// Open chat.db (read-only)
let chat_db = lib::ChatDb::open(&self.chat_db_path)
.context("Failed to open chat.db")?;
// Get messages with rowid > last_rowid
// We'll use the existing get_our_messages but need to filter by rowid
// For now, let's get recent messages and filter in-memory
let start_date = Some(Utc::now() - chrono::Duration::days(7));
let end_date = Some(Utc::now());
let messages = chat_db
.get_our_messages(start_date, end_date)
.context("Failed to get messages from chat.db")?;
// Filter messages with rowid > last_rowid and ensure they're not duplicates
let new_messages: Vec<lib::Message> = messages
.into_iter()
.filter(|msg| msg.rowid > last_rowid)
.collect();
// Insert new messages into our database
let us_db = self.us_db.lock().await;
for msg in &new_messages {
if let Err(e) = db::insert_message(&us_db, msg) {
warn!("Failed to insert message {}: {}", msg.rowid, e);
}
}
Ok(new_messages)
}
}

View File

@@ -0,0 +1,110 @@
use crate::db;
use anyhow::Result;
use rusqlite::Connection;
use std::sync::Arc;
use tokio::sync::{mpsc, Mutex};
use tracing::{error, info, warn};
/// Service responsible for generating embeddings for messages and words
pub struct EmbeddingService {
us_db: Arc<Mutex<Connection>>,
rx: mpsc::Receiver<lib::Message>,
model_name: String,
}
impl EmbeddingService {
pub fn new(
us_db: Arc<Mutex<Connection>>,
rx: mpsc::Receiver<lib::Message>,
model_name: String,
) -> Self {
Self {
us_db,
rx,
model_name,
}
}
pub async fn run(mut self) -> Result<()> {
info!("Starting embedding service with model: {}", self.model_name);
// TODO: Load the embedding model here
// For now, we'll create a placeholder implementation
info!("Loading embedding model...");
// let model = load_embedding_model(&self.model_name)?;
info!("Embedding model loaded (placeholder)");
while let Some(msg) = self.rx.recv().await {
if let Err(e) = self.process_message(&msg).await {
error!("Error processing message {}: {}", msg.rowid, e);
}
}
Ok(())
}
async fn process_message(&self, msg: &lib::Message) -> Result<()> {
// Get message ID from our database
let us_db = self.us_db.lock().await;
let message_id = match db::get_message_id_by_chat_rowid(&us_db, msg.rowid)? {
Some(id) => id,
None => {
warn!("Message {} not found in database, skipping", msg.rowid);
return Ok(());
}
};
// Check if embedding already exists
if db::get_message_embedding(&us_db, message_id)?.is_some() {
return Ok(());
}
// Skip if message has no text
let text = match &msg.text {
Some(t) if !t.is_empty() => t,
_ => return Ok(()),
};
drop(us_db);
// Generate embedding for the full message
// TODO: Replace with actual model inference
let message_embedding = self.generate_embedding(text)?;
// Store message embedding
let us_db = self.us_db.lock().await;
db::insert_message_embedding(&us_db, message_id, &message_embedding, &self.model_name)?;
// Tokenize and generate word embeddings
let words = self.tokenize(text);
for word in words {
// Check if word embedding exists
if db::get_word_embedding(&us_db, &word)?.is_none() {
// Generate embedding for word
let word_embedding = self.generate_embedding(&word)?;
db::insert_word_embedding(&us_db, &word, &word_embedding, &self.model_name)?;
}
}
drop(us_db);
info!("Generated embeddings for message {}", msg.rowid);
Ok(())
}
fn generate_embedding(&self, text: &str) -> Result<Vec<f32>> {
// TODO: Replace with actual model inference using Candle
// For now, return a placeholder embedding of dimension 1024
let embedding = vec![0.0f32; 1024];
Ok(embedding)
}
fn tokenize(&self, text: &str) -> Vec<String> {
// Simple word tokenization (split on whitespace and punctuation)
// TODO: Replace with proper tokenizer
text.split(|c: char| c.is_whitespace() || c.is_ascii_punctuation())
.filter(|s| !s.is_empty())
.map(|s| s.to_lowercase())
.collect()
}
}

View File

@@ -0,0 +1,119 @@
use crate::db;
use anyhow::Result;
use rusqlite::Connection;
use std::sync::Arc;
use tokio::sync::{mpsc, Mutex};
use tracing::{error, info, warn};
/// Service responsible for classifying emotions in messages
pub struct EmotionService {
us_db: Arc<Mutex<Connection>>,
rx: mpsc::Receiver<lib::Message>,
model_version: String,
training_sample_rate: f64,
}
impl EmotionService {
pub fn new(
us_db: Arc<Mutex<Connection>>,
rx: mpsc::Receiver<lib::Message>,
model_version: String,
training_sample_rate: f64,
) -> Self {
Self {
us_db,
rx,
model_version,
training_sample_rate,
}
}
pub async fn run(mut self) -> Result<()> {
info!(
"Starting emotion classification service with model: {}",
self.model_version
);
info!(
"Training sample rate: {:.2}%",
self.training_sample_rate * 100.0
);
// TODO: Load the RoBERTa emotion classification model here
info!("Loading RoBERTa-base-go_emotions model...");
// let model = load_emotion_model(&self.model_version)?;
info!("Emotion model loaded (placeholder)");
while let Some(msg) = self.rx.recv().await {
if let Err(e) = self.process_message(&msg).await {
error!("Error processing message {}: {}", msg.rowid, e);
}
}
Ok(())
}
async fn process_message(&self, msg: &lib::Message) -> Result<()> {
// Get message ID from our database
let us_db = self.us_db.lock().await;
let message_id = match db::get_message_id_by_chat_rowid(&us_db, msg.rowid)? {
Some(id) => id,
None => {
warn!("Message {} not found in database, skipping", msg.rowid);
return Ok(());
}
};
// Check if emotion classification already exists
if db::get_emotion_by_message_id(&us_db, message_id)?.is_some() {
return Ok(());
}
// Skip if message has no text
let text = match &msg.text {
Some(t) if !t.is_empty() => t,
_ => return Ok(()),
};
drop(us_db);
// Classify emotion
// TODO: Replace with actual model inference
let (emotion, confidence) = self.classify_emotion(text)?;
// Store emotion classification
let us_db = self.us_db.lock().await;
db::insert_emotion(&us_db, message_id, &emotion, confidence, &self.model_version)?;
// Randomly add to training set based on sample rate
if rand::random::<f64>() < self.training_sample_rate {
db::insert_training_sample(&us_db, Some(message_id), text, &emotion)?;
info!(
"Added message {} to training set (emotion: {})",
msg.rowid, emotion
);
}
drop(us_db);
info!(
"Classified message {} as {} (confidence: {:.2})",
msg.rowid, emotion, confidence
);
Ok(())
}
fn classify_emotion(&self, text: &str) -> Result<(String, f64)> {
// TODO: Replace with actual RoBERTa-base-go_emotions inference using Candle
// The model outputs probabilities for 28 emotions:
// admiration, amusement, anger, annoyance, approval, caring, confusion,
// curiosity, desire, disappointment, disapproval, disgust, embarrassment,
// excitement, fear, gratitude, grief, joy, love, nervousness, optimism,
// pride, realization, relief, remorse, sadness, surprise, neutral
// For now, return a placeholder
let emotion = "neutral".to_string();
let confidence = 0.85;
Ok((emotion, confidence))
}
}

View File

@@ -0,0 +1,232 @@
use crate::db;
use anyhow::Result;
use rusqlite::Connection;
use std::sync::Arc;
use tokio::sync::Mutex;
use tonic::{Request, Response, Status};
use tracing::{error, info};
// Include the generated protobuf code
pub mod emotions {
tonic::include_proto!("emotions");
}
use emotions::emotion_service_server::{EmotionService as EmotionServiceTrait, EmotionServiceServer};
use emotions::*;
pub struct GrpcServer {
us_db: Arc<Mutex<Connection>>,
address: String,
}
impl GrpcServer {
pub fn new(us_db: Arc<Mutex<Connection>>, address: String) -> Self {
Self { us_db, address }
}
pub async fn run(self) -> Result<()> {
let addr = self.address.parse()?;
info!("Starting gRPC server on {}", self.address);
let service = EmotionServiceImpl {
us_db: self.us_db.clone(),
};
tonic::transport::Server::builder()
.add_service(EmotionServiceServer::new(service))
.serve(addr)
.await?;
Ok(())
}
}
struct EmotionServiceImpl {
us_db: Arc<Mutex<Connection>>,
}
#[tonic::async_trait]
impl EmotionServiceTrait for EmotionServiceImpl {
async fn get_emotion(
&self,
request: Request<GetEmotionRequest>,
) -> Result<Response<Emotion>, Status> {
let req = request.into_inner();
let conn = self.us_db.lock().await;
match db::get_emotion_by_message_id(&conn, req.message_id) {
Ok(Some(emotion)) => Ok(Response::new(emotion_to_proto(emotion))),
Ok(None) => Err(Status::not_found(format!(
"Emotion not found for message_id: {}",
req.message_id
))),
Err(e) => {
error!("Database error: {}", e);
Err(Status::internal("Database error"))
}
}
}
async fn get_emotions(
&self,
request: Request<GetEmotionsRequest>,
) -> Result<Response<EmotionsResponse>, Status> {
let req = request.into_inner();
let conn = self.us_db.lock().await;
let emotion_filter = req.emotion_filter.as_deref();
let min_confidence = req.min_confidence;
let limit = req.limit.map(|l| l as i32);
let offset = req.offset.map(|o| o as i32);
match db::list_emotions(&conn, emotion_filter, min_confidence, limit, offset) {
Ok(emotions) => {
let total_count = db::count_emotions(&conn).unwrap_or(0);
Ok(Response::new(EmotionsResponse {
emotions: emotions.into_iter().map(emotion_to_proto).collect(),
total_count,
}))
}
Err(e) => {
error!("Database error: {}", e);
Err(Status::internal("Database error"))
}
}
}
async fn list_all_emotions(
&self,
_request: Request<Empty>,
) -> Result<Response<EmotionsResponse>, Status> {
let conn = self.us_db.lock().await;
match db::list_emotions(&conn, None, None, None, None) {
Ok(emotions) => {
let total_count = emotions.len() as i32;
Ok(Response::new(EmotionsResponse {
emotions: emotions.into_iter().map(emotion_to_proto).collect(),
total_count,
}))
}
Err(e) => {
error!("Database error: {}", e);
Err(Status::internal("Database error"))
}
}
}
async fn update_emotion(
&self,
request: Request<UpdateEmotionRequest>,
) -> Result<Response<EmotionResponse>, Status> {
let req = request.into_inner();
let conn = self.us_db.lock().await;
match db::update_emotion(&conn, req.message_id, &req.emotion, req.confidence) {
Ok(_) => {
// If notes are provided, add to training set
if let Some(notes) = req.notes {
if let Ok(Some(msg)) = db::get_message(&conn, req.message_id) {
if let Some(text) = msg.text {
let _ = db::insert_training_sample(
&conn,
Some(req.message_id),
&text,
&req.emotion,
);
}
}
}
// Fetch the updated emotion
match db::get_emotion_by_message_id(&conn, req.message_id) {
Ok(Some(emotion)) => Ok(Response::new(EmotionResponse {
success: true,
message: "Emotion updated successfully".to_string(),
emotion: Some(emotion_to_proto(emotion)),
})),
_ => Ok(Response::new(EmotionResponse {
success: true,
message: "Emotion updated successfully".to_string(),
emotion: None,
})),
}
}
Err(e) => {
error!("Database error: {}", e);
Err(Status::internal("Database error"))
}
}
}
async fn batch_update_emotions(
&self,
request: Request<tonic::Streaming<UpdateEmotionRequest>>,
) -> Result<Response<EmotionResponse>, Status> {
let mut stream = request.into_inner();
let mut count = 0;
while let Some(req) = stream.message().await? {
let conn = self.us_db.lock().await;
match db::update_emotion(&conn, req.message_id, &req.emotion, req.confidence) {
Ok(_) => {
count += 1;
if let Some(notes) = req.notes {
if let Ok(Some(msg)) = db::get_message(&conn, req.message_id) {
if let Some(text) = msg.text {
let _ = db::insert_training_sample(
&conn,
Some(req.message_id),
&text,
&req.emotion,
);
}
}
}
}
Err(e) => {
error!("Failed to update emotion for message {}: {}", req.message_id, e);
}
}
drop(conn);
}
Ok(Response::new(EmotionResponse {
success: true,
message: format!("Updated {} emotions", count),
emotion: None,
}))
}
async fn delete_emotion(
&self,
request: Request<DeleteEmotionRequest>,
) -> Result<Response<EmotionResponse>, Status> {
let req = request.into_inner();
let conn = self.us_db.lock().await;
match db::delete_emotion(&conn, req.id) {
Ok(_) => Ok(Response::new(EmotionResponse {
success: true,
message: format!("Emotion {} deleted successfully", req.id),
emotion: None,
})),
Err(e) => {
error!("Database error: {}", e);
Err(Status::internal("Database error"))
}
}
}
}
fn emotion_to_proto(emotion: crate::models::Emotion) -> Emotion {
Emotion {
id: emotion.id,
message_id: emotion.message_id,
emotion: emotion.emotion,
confidence: emotion.confidence,
model_version: emotion.model_version,
created_at: emotion.created_at.timestamp(),
updated_at: emotion.updated_at.timestamp(),
}
}

View File

@@ -0,0 +1,7 @@
pub mod chat_poller;
pub mod embedding_service;
pub mod emotion_service;
pub use chat_poller::ChatPollerService;
pub use embedding_service::EmbeddingService;
pub use emotion_service::EmotionService;

View File

@@ -0,0 +1,114 @@
use bevy::prelude::*;
use lib::sync::{Syncable, SyncMessage};
use crate::components::*;
/// Bevy plugin for transparent CRDT sync via gossip
pub struct SyncPlugin;
impl Plugin for SyncPlugin {
fn build(&self, app: &mut App) {
app.add_systems(Update, (
publish_sync_ops,
receive_sync_ops,
));
}
}
/// Trait for Bevy resources that can be synced
pub trait SyncedResource: Resource + Syncable + Clone + Send + Sync + 'static {}
/// Queue of sync operations to publish
#[derive(Resource, Default)]
pub struct SyncOpQueue<T: Syncable> {
pub ops: Vec<T::Operation>,
}
impl<T: Syncable> SyncOpQueue<T> {
pub fn push(&mut self, op: T::Operation) {
self.ops.push(op);
}
}
/// System to publish sync operations to gossip
fn publish_sync_ops<T: SyncedResource>(
mut queue: ResMut<SyncOpQueue<T>>,
resource: Res<T>,
sender: Option<Res<IrohGossipSender>>,
) {
if sender.is_none() || queue.ops.is_empty() {
return;
}
let sender = sender.unwrap();
let sender_guard = sender.sender.lock();
for op in queue.ops.drain(..) {
let sync_msg = resource.create_sync_message(op);
match sync_msg.to_bytes() {
Ok(bytes) => {
println!("Publishing sync operation: {} bytes", bytes.len());
// TODO: Actually send via gossip
// sender_guard.broadcast(bytes)?;
}
Err(e) => {
eprintln!("Failed to serialize sync operation: {}", e);
}
}
}
}
/// System to receive and apply sync operations from gossip
fn receive_sync_ops<T: SyncedResource>(
mut resource: ResMut<T>,
receiver: Option<Res<IrohGossipReceiver>>,
) {
if receiver.is_none() {
return;
}
// TODO: Poll receiver for messages
// For each message:
// 1. Deserialize SyncMessage<T::Operation>
// 2. Apply to resource with resource.apply_sync_op(&op)
}
/// Helper to register a synced resource
pub trait SyncedResourceExt {
fn add_synced_resource<T: SyncedResource>(&mut self) -> &mut Self;
}
impl SyncedResourceExt for App {
fn add_synced_resource<T: SyncedResource>(&mut self) -> &mut Self {
self.init_resource::<SyncOpQueue<T>>();
self
}
}
/// Example synced resource
#[cfg(test)]
mod tests {
use super::*;
use lib::sync::synced;
#[synced]
pub struct TestConfig {
pub value: i32,
#[sync(skip)]
node_id: String,
}
impl Resource for TestConfig {}
impl SyncedResource for TestConfig {}
#[test]
fn test_sync_plugin() {
let mut app = App::new();
app.add_plugins(MinimalPlugins);
app.add_plugins(SyncPlugin);
app.add_synced_resource::<TestConfig>();
// TODO: Test that operations are queued and published
}
}

View File

@@ -0,0 +1,12 @@
use bevy::prelude::*;
use crate::components::*;
/// System: Poll chat.db for new messages using Bevy's task system
pub fn poll_chat_db(
_config: Res<AppConfig>,
_db: Res<Database>,
) {
// TODO: Use Bevy's AsyncComputeTaskPool to poll chat.db
// This will replace the tokio::spawn chat poller
}

View File

@@ -0,0 +1,116 @@
use bevy::prelude::*;
use parking_lot::Mutex;
use std::sync::Arc;
use crate::components::*;
/// System: Poll the gossip init task and insert resources when complete
pub fn poll_gossip_init(
mut commands: Commands,
mut init_task: Option<ResMut<GossipInitTask>>,
) {
if let Some(mut task) = init_task {
// Check if the task is finished (non-blocking)
if let Some(result) = bevy::tasks::block_on(bevy::tasks::futures_lite::future::poll_once(&mut task.0)) {
if let Some((endpoint, gossip, router, sender, receiver)) = result {
println!("Inserting gossip resources");
// Insert all the resources
commands.insert_resource(IrohEndpoint {
endpoint,
node_id: "TODO".to_string(), // TODO: Figure out how to get node_id in iroh 0.95
});
commands.insert_resource(IrohGossipHandle { gossip });
commands.insert_resource(IrohRouter { router });
commands.insert_resource(IrohGossipSender {
sender: Arc::new(Mutex::new(sender)),
});
commands.insert_resource(IrohGossipReceiver {
receiver: Arc::new(Mutex::new(receiver)),
});
// Remove the init task
commands.remove_resource::<GossipInitTask>();
}
}
}
}
/// System: Detect new messages in SQLite that need to be published to gossip
pub fn detect_new_messages(
_db: Res<Database>,
_last_synced: Local<i64>,
_publish_events: MessageWriter<PublishMessageEvent>,
) {
// TODO: Query SQLite for messages with rowid > last_synced
// When we detect new messages, we'll send PublishMessageEvent
}
/// System: Publish messages to gossip when PublishMessageEvent is triggered
pub fn publish_to_gossip(
mut events: MessageReader<PublishMessageEvent>,
sender: Option<Res<IrohGossipSender>>,
endpoint: Option<Res<IrohEndpoint>>,
) {
if sender.is_none() || endpoint.is_none() {
// Gossip not initialized yet, skip
return;
}
let sender = sender.unwrap();
let endpoint = endpoint.unwrap();
for event in events.read() {
println!("Publishing message {} to gossip", event.message.rowid);
// Create sync message
let sync_message = SyncMessage {
message: event.message.clone(),
sync_timestamp: chrono::Utc::now().timestamp(),
publisher_node_id: endpoint.node_id.clone(),
};
// Serialize the message
match serialize_sync_message(&sync_message) {
Ok(bytes) => {
// TODO: Publish to gossip
// For now, just log that we would publish
println!("Would publish {} bytes to gossip", bytes.len());
// Note: Direct async broadcasting from Bevy systems is tricky due to Sync requirements
// We'll need to use a different approach, possibly with channels or a dedicated task
}
Err(e) => {
eprintln!("Failed to serialize sync message: {}", e);
}
}
}
}
/// System: Receive messages from gossip
pub fn receive_from_gossip(
mut _gossip_events: MessageWriter<GossipMessageReceived>,
receiver: Option<Res<IrohGossipReceiver>>,
) {
if receiver.is_none() {
// Gossip not initialized yet, skip
return;
}
// TODO: Implement proper async message reception
// This will require spawning a long-running task that listens for gossip events
// and sends them as Bevy messages. For now, this is a placeholder.
}
/// System: Save received gossip messages to SQLite
pub fn save_gossip_messages(
mut events: MessageReader<GossipMessageReceived>,
_db: Res<Database>,
) {
for event in events.read() {
println!("Received message {} from gossip (published by {})",
event.sync_message.message.rowid,
event.sync_message.publisher_node_id);
// TODO: Save to SQLite if we don't already have it
}
}

View File

@@ -0,0 +1,7 @@
pub mod database;
pub mod gossip;
pub mod setup;
pub use database::*;
pub use gossip::*;
pub use setup::*;

View File

@@ -0,0 +1,22 @@
use bevy::prelude::*;
use bevy::tasks::AsyncComputeTaskPool;
use crate::components::*;
/// Startup system: Initialize database
pub fn setup_database(_db: Res<Database>) {
println!("Database resource initialized");
}
/// Startup system: Initialize Iroh gossip
pub fn setup_gossip(mut commands: Commands, topic: Res<GossipTopic>) {
println!("Setting up Iroh gossip for topic: {:?}", topic.0);
let topic_id = topic.0;
// TODO: Initialize gossip properly
// For now, skip async initialization due to Sync requirements in Bevy tasks
// We'll need to use a different initialization strategy
println!("Gossip initialization skipped (TODO: implement proper async init)");
}