Private
Public Access
1
0
Files
Kordophone/kordophoned/src/daemon/mod.rs

731 lines
27 KiB
Rust
Raw Normal View History

pub mod settings;
use settings::keys as SettingsKey;
2025-05-15 20:11:10 -07:00
use settings::Settings;
pub mod events;
use events::*;
2025-04-27 22:44:05 -07:00
pub mod signals;
use signals::*;
use anyhow::Result;
use directories::ProjectDirs;
use std::collections::HashMap;
2025-05-15 20:11:10 -07:00
use std::error::Error;
2025-06-06 16:35:51 -07:00
use std::path::PathBuf;
use std::sync::Arc;
use thiserror::Error;
2025-05-15 20:11:10 -07:00
use tokio::sync::mpsc::{Receiver, Sender};
use tokio::sync::Mutex;
2025-05-02 14:22:43 -07:00
use uuid::Uuid;
use kordophone_db::{
database::{Database, DatabaseAccess},
models::Conversation,
};
2025-05-01 20:45:20 -07:00
use kordophone::api::http_client::HTTPAPIClient;
2025-05-15 20:11:10 -07:00
use kordophone::api::APIInterface;
2025-05-02 14:22:43 -07:00
use kordophone::model::outgoing_message::OutgoingMessage;
2025-06-16 19:25:24 -07:00
use kordophone::model::{ConversationID, MessageID};
mod update_monitor;
use update_monitor::{UpdateMonitor, UpdateMonitorCommand};
2025-05-01 20:45:20 -07:00
mod auth_store;
use auth_store::DatabaseAuthenticationStore;
2025-05-02 14:22:43 -07:00
mod post_office;
use post_office::Event as PostOfficeEvent;
2025-05-15 20:11:10 -07:00
use post_office::PostOffice;
mod models;
pub use models::Attachment;
pub use models::Message;
2025-05-15 20:11:10 -07:00
mod attachment_store;
pub use attachment_store::AttachmentStore;
pub use attachment_store::AttachmentStoreEvent;
2025-05-02 14:22:43 -07:00
2025-06-26 16:23:53 -07:00
pub mod contact_resolver;
use contact_resolver::ContactResolver;
use contact_resolver::EDSContactResolverBackend;
use kordophone_db::models::participant::Participant as DbParticipant;
#[derive(Debug, Error)]
pub enum DaemonError {
#[error("Client Not Configured")]
ClientNotConfigured,
}
pub type DaemonResult<T> = Result<T, Box<dyn Error + Send + Sync>>;
pub mod target {
pub static SYNC: &str = "sync";
pub static EVENT: &str = "event";
2025-05-01 20:45:20 -07:00
pub static SETTINGS: &str = "settings";
pub static UPDATES: &str = "updates";
2025-05-26 16:19:26 -07:00
pub static ATTACHMENTS: &str = "attachments";
2025-06-18 15:02:04 -07:00
pub static DAEMON: &str = "daemon";
}
2025-02-11 23:15:24 -08:00
pub struct Daemon {
pub event_sender: Sender<Event>,
event_receiver: Receiver<Event>,
2025-04-27 22:44:05 -07:00
signal_receiver: Option<Receiver<Signal>>,
signal_sender: Sender<Signal>,
2025-05-02 14:22:43 -07:00
post_office_sink: Sender<PostOfficeEvent>,
post_office_source: Option<Receiver<PostOfficeEvent>>,
outgoing_messages: HashMap<ConversationID, Vec<OutgoingMessage>>,
attachment_store_sink: Option<Sender<AttachmentStoreEvent>>,
update_monitor_command_tx: Option<Sender<UpdateMonitorCommand>>,
2025-05-15 20:11:10 -07:00
version: String,
database: Arc<Mutex<Database>>,
runtime: tokio::runtime::Runtime,
2025-02-11 23:15:24 -08:00
}
impl Daemon {
pub fn new() -> Result<Self> {
let database_path = Self::get_database_path();
log::info!("Database path: {}", database_path.display());
// Create the database directory if it doesn't exist
let database_dir = database_path.parent().unwrap();
std::fs::create_dir_all(database_dir)?;
// Create event channels
let (event_sender, event_receiver) = tokio::sync::mpsc::channel(100);
2025-04-27 22:44:05 -07:00
let (signal_sender, signal_receiver) = tokio::sync::mpsc::channel(100);
2025-05-02 14:22:43 -07:00
let (post_office_sink, post_office_source) = tokio::sync::mpsc::channel(100);
2025-05-15 20:11:10 -07:00
// Create background task runtime
let runtime = tokio::runtime::Builder::new_multi_thread()
.enable_all()
.build()
.unwrap();
let database_impl = Database::new(&database_path.to_string_lossy())?;
let database = Arc::new(Mutex::new(database_impl));
2025-05-15 20:11:10 -07:00
Ok(Self {
version: "0.1.0".to_string(),
database,
event_receiver,
event_sender,
2025-04-27 22:44:05 -07:00
signal_receiver: Some(signal_receiver),
2025-05-15 20:11:10 -07:00
signal_sender,
2025-05-02 14:22:43 -07:00
post_office_sink,
post_office_source: Some(post_office_source),
outgoing_messages: HashMap::new(),
attachment_store_sink: None,
update_monitor_command_tx: None,
2025-05-15 20:11:10 -07:00
runtime,
2025-04-27 22:44:05 -07:00
})
}
pub async fn run(&mut self) {
log::info!("Starting daemon version {}", self.version);
log::debug!("Debug logging enabled.");
2025-05-02 14:22:43 -07:00
// Update monitor
2025-05-15 20:11:10 -07:00
let mut update_monitor =
UpdateMonitor::new(self.database.clone(), self.event_sender.clone());
self.update_monitor_command_tx = Some(update_monitor.take_command_channel());
tokio::spawn(async move {
update_monitor.run().await; // should run indefinitely
});
2025-05-02 14:22:43 -07:00
// Post office
{
let mut database = self.database.clone();
let event_sender = self.event_sender.clone();
let post_office_source = self.post_office_source.take().unwrap();
tokio::spawn(async move {
2025-05-15 20:11:10 -07:00
let mut post_office =
PostOffice::new(post_office_source, event_sender, async move || {
Self::get_client_impl(&mut database).await
});
2025-05-02 14:22:43 -07:00
post_office.run().await;
});
}
// Attachment store
2025-06-06 16:39:31 -07:00
let mut attachment_store =
AttachmentStore::new(self.database.clone(), self.event_sender.clone());
self.attachment_store_sink = Some(attachment_store.get_event_sink());
tokio::spawn(async move {
attachment_store.run().await;
});
while let Some(event) = self.event_receiver.recv().await {
log::debug!(target: target::EVENT, "Received event: {:?}", event);
self.handle_event(event).await;
}
}
fn spawn_conversation_list_sync(&mut self) {
let mut db_clone = self.database.clone();
let signal_sender = self.signal_sender.clone();
self.runtime.spawn(async move {
let result = Self::sync_conversation_list(&mut db_clone, &signal_sender).await;
if let Err(e) = result {
log::error!(target: target::SYNC, "Error handling sync event: {}", e);
}
});
}
async fn handle_event(&mut self, event: Event) {
match event {
Event::GetVersion(reply) => {
reply.send(self.version.clone()).unwrap();
2025-05-15 20:11:10 -07:00
}
Event::SyncConversationList(reply) => {
self.spawn_conversation_list_sync();
2025-05-15 20:11:10 -07:00
// This is a background operation, so return right away.
reply.send(()).unwrap();
2025-05-15 20:11:10 -07:00
}
Event::SyncAllConversations(reply) => {
2025-04-27 23:27:21 -07:00
let mut db_clone = self.database.clone();
2025-04-27 22:44:05 -07:00
let signal_sender = self.signal_sender.clone();
self.runtime.spawn(async move {
2025-05-15 20:11:10 -07:00
let result =
Self::sync_all_conversations_impl(&mut db_clone, &signal_sender).await;
if let Err(e) = result {
2025-05-01 20:45:20 -07:00
log::error!(target: target::SYNC, "Error handling sync event: {}", e);
}
});
2025-05-15 20:11:10 -07:00
// This is a background operation, so return right away.
reply.send(()).unwrap();
2025-05-15 20:11:10 -07:00
}
Event::SyncConversation(conversation_id, reply) => {
let mut db_clone = self.database.clone();
let signal_sender = self.signal_sender.clone();
self.runtime.spawn(async move {
2025-05-15 20:11:10 -07:00
let result = Self::sync_conversation_impl(
&mut db_clone,
&signal_sender,
conversation_id,
)
.await;
if let Err(e) = result {
2025-05-01 20:45:20 -07:00
log::error!(target: target::SYNC, "Error handling sync event: {}", e);
}
});
reply.send(()).unwrap();
2025-05-15 20:11:10 -07:00
}
2025-06-18 15:02:04 -07:00
Event::MarkConversationAsRead(conversation_id, reply) => {
let mut db_clone = self.database.clone();
self.runtime.spawn(async move {
let result = Self::mark_conversation_as_read_impl(&mut db_clone, conversation_id).await;
if let Err(e) = result {
log::error!(target: target::DAEMON, "Error handling mark conversation as read event: {}", e);
}
});
reply.send(()).unwrap();
}
Event::UpdateConversationMetadata(conversation, reply) => {
let mut db_clone = self.database.clone();
let signal_sender = self.signal_sender.clone();
self.runtime.spawn(async move {
let result = Self::update_conversation_metadata_impl(&mut db_clone, conversation, &signal_sender).await;
if let Err(e) = result {
log::error!(target: target::DAEMON, "Error handling update conversation metadata event: {}", e);
}
});
reply.send(()).unwrap();
}
Event::UpdateStreamReconnected => {
log::info!(target: target::UPDATES, "Update stream reconnected");
2025-06-16 19:26:13 -07:00
// The ui client will respond differently, but we'll almost certainly want to do a sync-list in response to this.
self.spawn_conversation_list_sync();
// Send signal to the client that the update stream has been reconnected.
self.signal_sender
.send(Signal::UpdateStreamReconnected)
.await
.unwrap();
}
Event::GetAllConversations(limit, offset, reply) => {
let conversations = self.get_conversations_limit_offset(limit, offset).await;
reply.send(conversations).unwrap();
2025-05-15 20:11:10 -07:00
}
Event::GetAllSettings(reply) => {
2025-05-15 20:11:10 -07:00
let settings = self.get_settings().await.unwrap_or_else(|e| {
log::error!(target: target::SETTINGS, "Failed to get settings: {:#?}", e);
Settings::default()
});
reply.send(settings).unwrap();
2025-05-15 20:11:10 -07:00
}
Event::UpdateSettings(settings, reply) => {
let previous_settings = self.get_settings().await.unwrap_or_default();
let previous_server_url = previous_settings.server_url;
2025-05-15 20:11:10 -07:00
self.update_settings(&settings).await.unwrap_or_else(|e| {
log::error!(target: target::SETTINGS, "Failed to update settings: {}", e);
});
// Only trigger re-sync if both URLs are Some and different, or if one is Some and other is None
if previous_server_url.as_deref() != settings.server_url.as_deref() {
// If the server url has changed, we'll need to do a full re-sync.
self.delete_all_conversations().await.unwrap_or_else(|e| {
log::error!(target: target::SYNC, "Failed to delete all conversations: {}", e);
});
// Do a sync-list to get the new conversations.
self.spawn_conversation_list_sync();
// Also restart the update monitor.
2025-06-16 19:26:13 -07:00
if let Err(e) = self
.update_monitor_command_tx
.as_ref()
.unwrap()
2025-06-16 19:26:13 -07:00
.try_send(UpdateMonitorCommand::Restart)
{
log::warn!(target: target::UPDATES, "Failed to send restart command to update monitor: {}", e);
}
}
reply.send(()).unwrap();
2025-05-15 20:11:10 -07:00
}
Event::GetMessages(conversation_id, last_message_id, reply) => {
let messages = self.get_messages(conversation_id, last_message_id).await;
reply.send(messages).unwrap();
2025-05-15 20:11:10 -07:00
}
2025-05-01 01:08:13 -07:00
Event::DeleteAllConversations(reply) => {
2025-05-15 20:11:10 -07:00
self.delete_all_conversations().await.unwrap_or_else(|e| {
log::error!(target: target::SYNC, "Failed to delete all conversations: {}", e);
});
2025-05-01 01:08:13 -07:00
reply.send(()).unwrap();
2025-05-15 20:11:10 -07:00
}
2025-05-02 14:22:43 -07:00
2025-06-12 20:36:40 -07:00
Event::SendMessage(conversation_id, text, attachment_guids, reply) => {
let conversation_id = conversation_id.clone();
2025-05-15 20:11:10 -07:00
let uuid = self
2025-06-12 20:36:40 -07:00
.enqueue_outgoing_message(text, conversation_id.clone(), attachment_guids)
2025-05-15 20:11:10 -07:00
.await;
2025-05-02 14:22:43 -07:00
reply.send(uuid).unwrap();
2025-05-15 20:11:10 -07:00
// Send message updated signal, we have a placeholder message we will return.
self.signal_sender
.send(Signal::MessagesUpdated(conversation_id.clone()))
.await
.unwrap();
}
Event::MessageSent(message, outgoing_message, conversation_id) => {
log::info!(target: target::EVENT, "Daemon: message sent: {}", message.id);
2025-05-15 20:11:10 -07:00
// Insert the message into the database.
log::debug!(target: target::EVENT, "inserting sent message into database: {}", message.id);
2025-05-15 20:11:10 -07:00
self.database
.lock()
.await
.with_repository(|r| r.insert_message(&conversation_id, message.into()))
2025-05-15 20:11:10 -07:00
.await
.unwrap();
// Remove from outgoing messages.
log::debug!(target: target::EVENT, "Removing message from outgoing messages: {}", outgoing_message.guid);
2025-05-15 20:11:10 -07:00
self.outgoing_messages
.get_mut(&conversation_id)
.map(|messages| messages.retain(|m| m.guid != outgoing_message.guid));
// Send message updated signal.
2025-05-15 20:11:10 -07:00
self.signal_sender
.send(Signal::MessagesUpdated(conversation_id))
.await
.unwrap();
}
Event::GetAttachment(guid, reply) => {
self.attachment_store_sink
.as_ref()
.unwrap()
.send(AttachmentStoreEvent::GetAttachmentInfo(guid, reply))
.await
.unwrap();
}
2025-05-26 16:19:26 -07:00
Event::DownloadAttachment(attachment_id, preview, reply) => {
log::info!(target: target::ATTACHMENTS, "Download requested for attachment: {}, preview: {}", &attachment_id, preview);
self.attachment_store_sink
.as_ref()
.unwrap()
2025-06-06 16:39:31 -07:00
.send(AttachmentStoreEvent::QueueDownloadAttachment(
attachment_id,
preview,
))
.await
.unwrap();
2025-05-26 16:19:26 -07:00
reply.send(()).unwrap();
}
Event::AttachmentDownloaded(attachment_id) => {
log::info!(target: target::ATTACHMENTS, "Daemon: attachment downloaded: {}, sending signal", attachment_id);
// Send signal to the client that the attachment has been downloaded.
self.signal_sender
.send(Signal::AttachmentDownloaded(attachment_id))
.await
.unwrap();
}
2025-06-12 17:58:03 -07:00
Event::UploadAttachment(path, reply) => {
self.attachment_store_sink
.as_ref()
.unwrap()
.send(AttachmentStoreEvent::QueueUploadAttachment(path, reply))
.await
.unwrap();
}
Event::AttachmentUploaded(upload_guid, attachment_guid) => {
log::info!(target: target::ATTACHMENTS, "Daemon: attachment uploaded: {}, {}", upload_guid, attachment_guid);
self.signal_sender
.send(Signal::AttachmentUploaded(upload_guid, attachment_guid))
.await
.unwrap();
}
}
}
2025-05-15 20:11:10 -07:00
/// Panics if the signal receiver has already been taken.
pub fn obtain_signal_receiver(&mut self) -> Receiver<Signal> {
2025-04-27 22:44:05 -07:00
self.signal_receiver.take().unwrap()
}
2025-05-15 20:11:10 -07:00
async fn get_conversations_limit_offset(
&mut self,
limit: i32,
offset: i32,
) -> Vec<Conversation> {
self.database
.lock()
.await
.with_repository(|r| r.all_conversations(limit, offset).unwrap())
.await
}
2025-05-15 20:11:10 -07:00
async fn get_messages(
&mut self,
conversation_id: String,
2025-06-16 19:25:24 -07:00
_last_message_id: Option<MessageID>,
2025-05-15 20:11:10 -07:00
) -> Vec<Message> {
// Get outgoing messages for this conversation.
let empty_vec: Vec<OutgoingMessage> = vec![];
2025-05-15 20:11:10 -07:00
let outgoing_messages: &Vec<OutgoingMessage> = self
.outgoing_messages
.get(&conversation_id)
.unwrap_or(&empty_vec);
2025-05-15 20:11:10 -07:00
self.database
.lock()
.await
.with_repository(|r| {
r.get_messages_for_conversation(&conversation_id)
2025-05-15 20:11:10 -07:00
.unwrap()
.into_iter()
.map(|m| m.into()) // Convert db::Message to daemon::Message
2025-05-15 20:11:10 -07:00
.chain(outgoing_messages.into_iter().map(|m| m.into()))
.collect()
})
.await
}
2025-06-16 19:26:13 -07:00
async fn enqueue_outgoing_message(
&mut self,
text: String,
conversation_id: String,
attachment_guids: Vec<String>,
) -> Uuid {
let conversation_id = conversation_id.clone();
2025-05-02 14:22:43 -07:00
let outgoing_message = OutgoingMessage::builder()
.text(text)
.conversation_id(conversation_id.clone())
2025-06-12 20:36:40 -07:00
.file_transfer_guids(attachment_guids)
2025-05-02 14:22:43 -07:00
.build();
// Keep a record of this so we can provide a consistent model to the client.
2025-05-15 20:11:10 -07:00
self.outgoing_messages
.entry(conversation_id)
.or_insert(vec![])
.push(outgoing_message.clone());
2025-05-02 14:22:43 -07:00
let guid = outgoing_message.guid.clone();
2025-05-15 20:11:10 -07:00
self.post_office_sink
.send(PostOfficeEvent::EnqueueOutgoingMessage(outgoing_message))
.await
.unwrap();
2025-05-02 14:22:43 -07:00
guid
}
2025-05-15 20:11:10 -07:00
async fn sync_conversation_list(
database: &mut Arc<Mutex<Database>>,
signal_sender: &Sender<Signal>,
) -> Result<()> {
log::info!(target: target::SYNC, "Starting list conversation sync");
let mut client = Self::get_client_impl(database).await?;
// Fetch conversations from server
let fetched_conversations = client.get_conversations().await?;
2025-05-15 20:11:10 -07:00
let db_conversations: Vec<kordophone_db::models::Conversation> = fetched_conversations
.into_iter()
.map(kordophone_db::models::Conversation::from)
.collect();
// Insert each conversation
let num_conversations = db_conversations.len();
2025-06-26 16:23:53 -07:00
let contact_resolver = ContactResolver::new(EDSContactResolverBackend::default());
for conversation in db_conversations {
2025-06-26 16:23:53 -07:00
// Insert or update conversation and its participants
2025-05-15 20:11:10 -07:00
database
2025-06-26 16:23:53 -07:00
.with_repository(|r| r.insert_conversation(conversation.clone()))
2025-05-15 20:11:10 -07:00
.await?;
2025-06-26 16:23:53 -07:00
// Resolve any new participants via the contact resolver and store their contact_id
log::trace!(target: target::SYNC, "Resolving participants for conversation: {}", conversation.guid);
let guid = conversation.guid.clone();
if let Some(saved) = database
.with_repository(|r| r.get_conversation_by_guid(&guid))
.await?
{
for p in &saved.participants {
if let DbParticipant::Remote { id: Some(pid), display_name, contact_id: None } = p {
log::trace!(target: target::SYNC, "Resolving contact id for participant: {}", display_name);
if let Some(contact) = contact_resolver.resolve_contact_id(display_name) {
log::trace!(target: target::SYNC, "Resolved contact id for participant: {}", contact);
let _ = database
.with_repository(|r| r.update_participant_contact(*pid, &contact))
.await;
} else {
log::trace!(target: target::SYNC, "No contact id found for participant: {}", display_name);
}
}
}
}
}
// Send conversations updated signal
signal_sender.send(Signal::ConversationsUpdated).await?;
log::info!(target: target::SYNC, "List synchronized: {} conversations", num_conversations);
Ok(())
}
2025-05-15 20:11:10 -07:00
async fn sync_all_conversations_impl(
database: &mut Arc<Mutex<Database>>,
signal_sender: &Sender<Signal>,
) -> Result<()> {
log::info!(target: target::SYNC, "Starting full conversation sync");
2025-04-27 23:27:21 -07:00
let mut client = Self::get_client_impl(database).await?;
2025-05-15 20:11:10 -07:00
// Fetch conversations from server
let fetched_conversations = client.get_conversations().await?;
2025-05-15 20:11:10 -07:00
let db_conversations: Vec<kordophone_db::models::Conversation> = fetched_conversations
.into_iter()
2025-04-28 16:06:51 -07:00
.map(kordophone_db::models::Conversation::from)
.collect();
2025-05-15 20:11:10 -07:00
// Process each conversation
let num_conversations = db_conversations.len();
for conversation in db_conversations {
let conversation_id = conversation.guid.clone();
2025-05-15 20:11:10 -07:00
// Insert the conversation
2025-05-15 20:11:10 -07:00
database
.with_repository(|r| r.insert_conversation(conversation))
.await?;
// Sync individual conversation.
Self::sync_conversation_impl(database, signal_sender, conversation_id).await?;
}
2025-04-27 22:44:05 -07:00
// Send conversations updated signal.
signal_sender.send(Signal::ConversationsUpdated).await?;
log::info!(target: target::SYNC, "Full sync complete, {} conversations processed", num_conversations);
Ok(())
2025-05-15 20:11:10 -07:00
}
2025-05-15 20:11:10 -07:00
async fn sync_conversation_impl(
database: &mut Arc<Mutex<Database>>,
signal_sender: &Sender<Signal>,
conversation_id: String,
) -> Result<()> {
2025-05-03 22:13:03 -07:00
log::debug!(target: target::SYNC, "Starting conversation sync for {}", conversation_id);
let mut client = Self::get_client_impl(database).await?;
// Check if conversation exists in database.
2025-05-15 20:11:10 -07:00
let conversation = database
.with_repository(|r| r.get_conversation_by_guid(&conversation_id))
.await?;
if conversation.is_none() {
2025-05-15 20:11:10 -07:00
// If the conversation doesn't exist, first do a conversation list sync.
log::warn!(target: target::SYNC, "Conversation {} not found, performing list sync", conversation_id);
Self::sync_conversation_list(database, signal_sender).await?;
}
// Fetch and sync messages for this conversation
2025-05-15 20:11:10 -07:00
let last_message_id = database
.with_repository(|r| -> Option<String> {
r.get_last_message_for_conversation(&conversation_id)
.unwrap_or(None)
.map(|m| m.id)
})
.await;
log::debug!(target: target::SYNC, "Fetching messages for conversation {}", &conversation_id);
log::debug!(target: target::SYNC, "Last message id: {:?}", last_message_id);
2025-05-15 20:11:10 -07:00
let messages = client
.get_messages(&conversation_id, None, None, last_message_id)
.await?;
// Filter messages that have an empty body, or a body that is just whitespace.
2025-06-16 19:26:13 -07:00
// This is a workaround for a bug in the server where it returns messages with an empty body, which is usually
// the typing indicator or stuff like that. In the future, we need to move to ChatItems instead of Messages.
let insertable_messages: Vec<kordophone::model::Message> = messages
.into_iter()
2025-06-16 19:26:13 -07:00
.filter(|m| !m.text.is_empty() && !m.text.trim().is_empty())
.collect();
let db_messages: Vec<kordophone_db::models::Message> = insertable_messages
2025-05-15 20:11:10 -07:00
.into_iter()
.map(kordophone_db::models::Message::from)
.collect();
// Insert each message
let num_messages = db_messages.len();
log::debug!(target: target::SYNC, "Inserting {} messages for conversation {}", num_messages, &conversation_id);
2025-05-15 20:11:10 -07:00
database
.with_repository(|r| r.insert_messages(&conversation_id, db_messages))
.await?;
// Send messages updated signal, if we actually inserted any messages.
if num_messages > 0 {
2025-05-15 20:11:10 -07:00
signal_sender
.send(Signal::MessagesUpdated(conversation_id.clone()))
.await?;
}
2025-05-03 22:13:03 -07:00
log::debug!(target: target::SYNC, "Synchronized {} messages for conversation {}", num_messages, &conversation_id);
Ok(())
}
2025-06-18 15:02:04 -07:00
async fn mark_conversation_as_read_impl(
database: &mut Arc<Mutex<Database>>,
conversation_id: String,
) -> Result<()> {
log::debug!(target: target::DAEMON, "Marking conversation as read: {}", conversation_id);
let mut client = Self::get_client_impl(database).await?;
client.mark_conversation_as_read(&conversation_id).await?;
Ok(())
}
async fn update_conversation_metadata_impl(
database: &mut Arc<Mutex<Database>>,
conversation: Conversation,
signal_sender: &Sender<Signal>,
) -> Result<()> {
log::debug!(target: target::DAEMON, "Updating conversation metadata: {}", conversation.guid);
let updated = database.with_repository(|r| r.merge_conversation_metadata(conversation)).await?;
if updated {
signal_sender
.send(Signal::ConversationsUpdated)
.await?;
}
Ok(())
}
async fn get_settings(&mut self) -> Result<Settings> {
let settings = self.database.with_settings(Settings::from_db).await?;
Ok(settings)
}
2025-04-27 23:27:21 -07:00
async fn update_settings(&mut self, settings: &Settings) -> Result<()> {
self.database.with_settings(|s| settings.save(s)).await
}
2025-05-15 20:11:10 -07:00
async fn get_client_impl(
database: &mut Arc<Mutex<Database>>,
) -> Result<HTTPAPIClient<DatabaseAuthenticationStore>> {
let settings = database.with_settings(Settings::from_db).await?;
2025-05-15 20:11:10 -07:00
let server_url = settings
.server_url
.ok_or(DaemonError::ClientNotConfigured)?;
let client = HTTPAPIClient::new(
server_url.parse().unwrap(),
2025-05-15 20:11:10 -07:00
DatabaseAuthenticationStore::new(database.clone()),
);
Ok(client)
}
2025-05-01 01:08:13 -07:00
async fn delete_all_conversations(&mut self) -> Result<()> {
2025-05-15 20:11:10 -07:00
self.database
.with_repository(|r| -> Result<()> {
r.delete_all_conversations()?;
r.delete_all_messages()?;
Ok(())
})
.await?;
self.signal_sender
.send(Signal::ConversationsUpdated)
.await?;
2025-05-01 01:08:13 -07:00
Ok(())
}
2025-05-15 20:11:10 -07:00
fn get_data_dir() -> Option<PathBuf> {
ProjectDirs::from("net", "buzzert", "kordophonecd").map(|p| PathBuf::from(p.data_dir()))
}
fn get_database_path() -> PathBuf {
2025-05-15 20:11:10 -07:00
if let Some(data_dir) = Self::get_data_dir() {
data_dir.join("database.db")
} else {
// Fallback to a local path if we can't get the system directories
PathBuf::from("database.db")
}
2025-02-11 23:15:24 -08:00
}
}