[matrix] homeserver_url = "http://localhost:8008" user_id = "@sol:sunbeam.local" state_store_path = "data/matrix-state" db_path = "data/sol.db" [opensearch] url = "http://localhost:9200" index = "sol_archive" memory_index = "sol_user_memory" batch_size = 50 flush_interval_ms = 2000 embedding_pipeline = "tuwunel_embedding_pipeline" [mistral] default_model = "mistral-medium-latest" evaluation_model = "ministral-3b-latest" research_model = "mistral-large-latest" max_tool_iterations = 250 # tokenizer_path = "dev/tokenizer.json" # uncomment to use a local tokenizer file [behavior] response_delay_min_ms = 0 response_delay_max_ms = 0 spontaneous_delay_min_ms = 0 spontaneous_delay_max_ms = 0 spontaneous_threshold = 0.85 room_context_window = 200 dm_context_window = 200 instant_responses = true memory_extraction_enabled = false script_fetch_allowlist = ["api.open-meteo.com", "wttr.in"] [agents] orchestrator_model = "mistral-medium-latest" compaction_threshold = 118000 use_conversations_api = true coding_model = "mistral-medium-latest" agent_prefix = "dev" research_model = "ministral-3b-latest" research_max_iterations = 10 research_max_agents = 25 research_max_depth = 4 [grpc] listen_addr = "0.0.0.0:50051" dev_mode = true [vault] url = "http://localhost:8200" role = "sol-agent" mount = "secret" [services.searxng] url = "http://localhost:8888"