Private
Public Access
1
0

Add 'core/' from commit 'b0dfc4146ca0da535a87f8509aec68817fb2ab14'

git-subtree-dir: core
git-subtree-mainline: a07f3dcd23
git-subtree-split: b0dfc4146c
This commit is contained in:
2025-09-06 19:33:33 -07:00
83 changed files with 12352 additions and 0 deletions

View File

@@ -0,0 +1,16 @@
{
"permissions": {
"allow": [
"Bash(find:*)",
"Bash(cargo build:*)",
"Bash(diesel migration generate:*)",
"Bash(cargo clean:*)",
"Bash(git describe:*)",
"Bash(git add:*)",
"Bash(git commit:*)",
"Bash(git tag:*)",
"Bash(git stash:*)"
],
"deny": []
}
}

1
core/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
**/target

77
core/CLAUDE.md Normal file
View File

@@ -0,0 +1,77 @@
# CLAUDE.md
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
## Commands
### Build & Run
```bash
# Build all workspace members
cargo build
# Build specific package
cargo build -p kordophone
cargo build -p kordophone-db
cargo build -p kordophoned
cargo build -p kpcli
# Run daemon
cargo run --bin kordophoned
# Run CLI tool
cargo run --bin kpcli -- --help
```
### Testing
```bash
# Run all tests
cargo test
# Run tests for specific package
cargo test -p kordophone
cargo test -p kordophone-db
```
### Database Operations
```bash
# Database migrations (from kordophone-db directory)
cd kordophone-db
diesel migration run
diesel migration revert
```
## Architecture
This is a Rust workspace with 4 main packages forming a messaging client/daemon system:
### Core Components
- **kordophone**: Core library providing API client and models for messaging operations
- **kordophone-db**: Database layer using Diesel ORM with SQLite, handles conversations/messages storage
- **kordophoned**: Background daemon that syncs with messaging server and exposes D-Bus interface
- **kpcli**: Command-line interface for interacting with daemon and performing database operations
### Key Architecture Patterns
- **D-Bus IPC**: Daemon exposes functionality via D-Bus at `net.buzzert.kordophonecd`
- **Event-driven**: Daemon uses async channels for internal communication and D-Bus signals for external notifications
- **Repository Pattern**: Database access abstracted through repository layer in kordophone-db
- **Workspace Dependencies**: Packages depend on each other (kordophoned uses both kordophone and kordophone-db)
### Data Flow
1. kpcli/external clients interact with kordophoned via D-Bus
2. kordophoned manages HTTP API client connections to messaging server
3. Background sync processes fetch data and store via kordophone-db repository
4. D-Bus signals notify clients of data updates (ConversationsUpdated, MessagesUpdated)
### Important Files
- `kordophone-db/diesel.toml`: Database configuration
- `kordophone-db/migrations/`: Database schema definitions
- `kordophoned/include/net.buzzert.kordophonecd.Server.xml`: D-Bus interface definition
- `*/build.rs`: D-Bus code generation for dbus-crossroads interfaces
### Settings Storage
Settings are persisted in SQLite database using a key-value store approach. Access via `Settings` struct in kordophone-db.

2760
core/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

14
core/Cargo.toml Normal file
View File

@@ -0,0 +1,14 @@
[workspace]
members = [
"kordophone",
"kordophone-db",
"kordophoned",
"kpcli",
"utilities",
]
resolver = "2"
[profile.release]
lto = "thin"
debug = 1
incremental = false

26
core/Dockerfile Normal file
View File

@@ -0,0 +1,26 @@
FROM fedora:40
RUN dnf update -y && \
dnf install -y \
curl \
gcc \
gcc-c++ \
make \
openssl-devel \
sqlite-devel \
dbus-devel \
systemd-devel \
rpm-build \
&& dnf clean all
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
ENV PATH="/root/.cargo/bin:${PATH}"
RUN cargo install cargo-generate-rpm
WORKDIR /workspace
COPY . .
CMD ["make", "rpm"]

16
core/Makefile Normal file
View File

@@ -0,0 +1,16 @@
.PHONY: all
all:
cargo build
.PHONY: release
release:
cargo build --release
.PHONY: rpm
rpm:
cargo build --release --workspace
strip -s target/release/kordophoned
strip -s target/release/kpcli
cargo generate-rpm -p kordophoned

View File

@@ -0,0 +1,19 @@
[package]
name = "kordophone-db"
version = "1.0.0"
edition = "2021"
[dependencies]
anyhow = "1.0.94"
async-trait = "0.1.88"
bincode = "1.3.3"
chrono = "0.4.38"
diesel = { version = "2.2.6", features = ["chrono", "sqlite", "time"] }
diesel_migrations = { version = "2.2.0", features = ["sqlite"] }
kordophone = { path = "../kordophone" }
log = "0.4.27"
serde = { version = "1.0.215", features = ["derive"] }
serde_json = "1.0"
time = "0.3.37"
tokio = "1.44.2"
uuid = { version = "1.11.0", features = ["v4"] }

View File

@@ -0,0 +1,9 @@
# For documentation on how to configure this file,
# see https://diesel.rs/guides/configuring-diesel-cli
[print_schema]
file = "src/schema.rs"
custom_type_derives = ["diesel::query_builder::QueryId"]
[migrations_directory]
dir = "migrations"

View File

View File

@@ -0,0 +1,7 @@
-- This file should undo anything in `up.sql`
DROP TABLE IF EXISTS `messages`;
DROP TABLE IF EXISTS `conversation_messages`;
DROP TABLE IF EXISTS `settings`;
DROP TABLE IF EXISTS `conversations`;
DROP TABLE IF EXISTS `participants`;
DROP TABLE IF EXISTS `conversation_participants`;

View File

@@ -0,0 +1,46 @@
-- Your SQL goes here
CREATE TABLE `messages`(
`id` TEXT NOT NULL PRIMARY KEY,
`text` TEXT NOT NULL,
`sender_participant_handle` TEXT,
`date` TIMESTAMP NOT NULL,
`file_transfer_guids` TEXT,
`attachment_metadata` TEXT,
FOREIGN KEY (`sender_participant_handle`) REFERENCES `participants`(`handle`)
);
CREATE TABLE `conversation_messages`(
`conversation_id` TEXT NOT NULL,
`message_id` TEXT NOT NULL,
PRIMARY KEY(`conversation_id`, `message_id`),
FOREIGN KEY (`conversation_id`) REFERENCES `conversations`(`id`),
FOREIGN KEY (`message_id`) REFERENCES `messages`(`id`)
);
CREATE TABLE `settings`(
`key` TEXT NOT NULL PRIMARY KEY,
`value` BINARY NOT NULL
);
CREATE TABLE `conversations`(
`id` TEXT NOT NULL PRIMARY KEY,
`unread_count` BIGINT NOT NULL,
`display_name` TEXT,
`last_message_preview` TEXT,
`date` TIMESTAMP NOT NULL
);
CREATE TABLE `participants`(
`handle` TEXT NOT NULL PRIMARY KEY,
`is_me` BOOL NOT NULL,
`contact_id` TEXT
);
CREATE TABLE `conversation_participants`(
`conversation_id` TEXT NOT NULL,
`participant_handle` TEXT NOT NULL,
PRIMARY KEY(`conversation_id`, `participant_handle`),
FOREIGN KEY (`conversation_id`) REFERENCES `conversations`(`id`),
FOREIGN KEY (`participant_handle`) REFERENCES `participants`(`handle`)
);

View File

@@ -0,0 +1,94 @@
use anyhow::Result;
use async_trait::async_trait;
use diesel::prelude::*;
pub use std::sync::Arc;
pub use tokio::sync::Mutex;
use crate::repository::Repository;
use crate::settings::Settings;
use diesel_migrations::{embed_migrations, EmbeddedMigrations, MigrationHarness};
pub const MIGRATIONS: EmbeddedMigrations = embed_migrations!();
#[async_trait]
pub trait DatabaseAccess {
async fn with_repository<F, R>(&mut self, f: F) -> R
where
F: FnOnce(&mut Repository) -> R + Send,
R: Send;
async fn with_settings<F, R>(&mut self, f: F) -> R
where
F: FnOnce(&mut Settings) -> R + Send,
R: Send;
}
pub struct Database {
pub connection: SqliteConnection,
}
impl Database {
pub fn new(path: &str) -> Result<Self> {
let mut connection = SqliteConnection::establish(path)?;
// Performance optimisations for SQLite. These are safe defaults that speed
// up concurrent writes and cut the fsync cost dramatically while still
// keeping durability guarantees that are good enough for an end-user
// application.
diesel::sql_query("PRAGMA journal_mode = WAL;").execute(&mut connection)?;
diesel::sql_query("PRAGMA synchronous = NORMAL;").execute(&mut connection)?;
connection
.run_pending_migrations(MIGRATIONS)
.map_err(|e| anyhow::anyhow!("Error running migrations: {}", e))?;
Ok(Self { connection })
}
pub fn new_in_memory() -> Result<Self> {
Self::new(":memory:")
}
}
#[async_trait]
impl DatabaseAccess for Database {
async fn with_repository<F, R>(&mut self, f: F) -> R
where
F: FnOnce(&mut Repository) -> R + Send,
R: Send,
{
let mut repository = Repository::new(&mut self.connection);
f(&mut repository)
}
async fn with_settings<F, R>(&mut self, f: F) -> R
where
F: FnOnce(&mut Settings) -> R + Send,
R: Send,
{
let mut settings = Settings::new(&mut self.connection);
f(&mut settings)
}
}
#[async_trait]
impl DatabaseAccess for Arc<Mutex<Database>> {
async fn with_repository<F, R>(&mut self, f: F) -> R
where
F: FnOnce(&mut Repository) -> R + Send,
R: Send,
{
let mut database = self.lock().await;
database.with_repository(f).await
}
async fn with_settings<F, R>(&mut self, f: F) -> R
where
F: FnOnce(&mut Settings) -> R + Send,
R: Send,
{
let mut database = self.lock().await;
database.with_settings(f).await
}
}

View File

@@ -0,0 +1,14 @@
pub mod database;
pub mod models;
pub mod repository;
pub mod schema;
pub mod settings;
#[cfg(test)]
mod tests;
pub mod target {
pub static REPOSITORY: &str = "repository";
}
pub use repository::Repository;

View File

@@ -0,0 +1,142 @@
use crate::models::{message::Message, participant::Participant};
use chrono::{DateTime, NaiveDateTime};
use uuid::Uuid;
#[derive(Clone, Debug)]
pub struct Conversation {
pub guid: String,
pub unread_count: u16,
pub display_name: Option<String>,
pub last_message_preview: Option<String>,
pub date: NaiveDateTime,
pub participants: Vec<Participant>,
}
impl Conversation {
pub fn builder() -> ConversationBuilder {
ConversationBuilder::new()
}
pub fn into_builder(&self) -> ConversationBuilder {
ConversationBuilder {
guid: Some(self.guid.clone()),
date: self.date,
participants: None,
unread_count: Some(self.unread_count),
last_message_preview: self.last_message_preview.clone(),
display_name: self.display_name.clone(),
}
}
pub fn merge(&self, other: &Conversation, last_message: Option<&Message>) -> Conversation {
let mut new_conversation = self.clone();
new_conversation.unread_count = other.unread_count;
new_conversation.participants = other.participants.clone();
new_conversation.display_name = other.display_name.clone();
if let Some(last_message) = last_message {
if last_message.date > self.date {
new_conversation.date = last_message.date;
}
if !last_message.text.is_empty() && !last_message.text.trim().is_empty() {
new_conversation.last_message_preview = Some(last_message.text.clone());
}
}
new_conversation
}
}
impl PartialEq for Conversation {
fn eq(&self, other: &Self) -> bool {
self.guid == other.guid
&& self.unread_count == other.unread_count
&& self.display_name == other.display_name
&& self.last_message_preview == other.last_message_preview
&& self.date == other.date
&& self.participants == other.participants
}
}
impl From<kordophone::model::Conversation> for Conversation {
fn from(value: kordophone::model::Conversation) -> Self {
Self {
guid: value.guid,
unread_count: u16::try_from(value.unread_count).unwrap(),
display_name: value.display_name,
last_message_preview: value.last_message_preview,
date: DateTime::from_timestamp(
value.date.unix_timestamp(),
value.date.unix_timestamp_nanos().try_into().unwrap_or(0),
)
.unwrap()
.naive_local(),
participants: value
.participant_display_names
.into_iter()
.map(|p| Participant::Remote {
handle: p,
contact_id: None,
}) // todo: this is wrong
.collect(),
}
}
}
#[derive(Default)]
pub struct ConversationBuilder {
guid: Option<String>,
date: NaiveDateTime,
unread_count: Option<u16>,
last_message_preview: Option<String>,
participants: Option<Vec<Participant>>,
display_name: Option<String>,
}
impl ConversationBuilder {
pub fn new() -> Self {
Self::default()
}
pub fn guid(mut self, guid: &str) -> Self {
self.guid = Some(guid.into());
self
}
pub fn date(mut self, date: NaiveDateTime) -> Self {
self.date = date;
self
}
pub fn unread_count(mut self, unread_count: u16) -> Self {
self.unread_count = Some(unread_count);
self
}
pub fn last_message_preview(mut self, last_message_preview: &str) -> Self {
self.last_message_preview = Some(last_message_preview.into());
self
}
pub fn participants(mut self, participants: Vec<Participant>) -> Self {
self.participants = Some(participants);
self
}
pub fn display_name(mut self, display_name: &str) -> Self {
self.display_name = Some(display_name.into());
self
}
pub fn build(&self) -> Conversation {
Conversation {
guid: self.guid.clone().unwrap_or(Uuid::new_v4().to_string()),
unread_count: self.unread_count.unwrap_or(0),
last_message_preview: self.last_message_preview.clone(),
display_name: self.display_name.clone(),
date: self.date,
participants: self.participants.clone().unwrap_or_default(),
}
}
}

View File

@@ -0,0 +1,53 @@
use crate::models::{db::participant::InsertableRecord as InsertableParticipant, Conversation};
use chrono::NaiveDateTime;
use diesel::prelude::*;
#[derive(Queryable, Selectable, Insertable, AsChangeset, Clone, Identifiable)]
#[diesel(table_name = crate::schema::conversations)]
#[diesel(check_for_backend(diesel::sqlite::Sqlite))]
pub struct Record {
pub id: String,
pub unread_count: i64,
pub display_name: Option<String>,
pub last_message_preview: Option<String>,
pub date: NaiveDateTime,
}
impl From<Conversation> for Record {
fn from(conversation: Conversation) -> Self {
Self {
id: conversation.guid,
unread_count: conversation.unread_count as i64,
display_name: conversation.display_name,
last_message_preview: conversation.last_message_preview,
date: conversation.date,
}
}
}
// This implementation returns the insertable data types for the conversation and participants
impl From<Conversation> for (Record, Vec<InsertableParticipant>) {
fn from(conversation: Conversation) -> Self {
(
Record::from(conversation.clone()),
conversation
.participants
.into_iter()
.map(InsertableParticipant::from)
.collect(),
)
}
}
impl From<Record> for Conversation {
fn from(record: Record) -> Self {
Self {
guid: record.id,
unread_count: record.unread_count as u16,
display_name: record.display_name,
last_message_preview: record.last_message_preview,
date: record.date,
participants: vec![],
}
}
}

View File

@@ -0,0 +1,70 @@
use crate::models::{Message, Participant};
use chrono::NaiveDateTime;
use diesel::prelude::*;
#[derive(Queryable, Selectable, Insertable, AsChangeset, Clone, Identifiable, Debug)]
#[diesel(table_name = crate::schema::messages)]
#[diesel(check_for_backend(diesel::sqlite::Sqlite))]
pub struct Record {
pub id: String,
pub sender_participant_handle: Option<String>,
pub text: String,
pub date: NaiveDateTime,
pub file_transfer_guids: Option<String>, // JSON array
pub attachment_metadata: Option<String>, // JSON string
}
impl From<Message> for Record {
fn from(message: Message) -> Self {
let file_transfer_guids = if message.file_transfer_guids.is_empty() {
None
} else {
Some(serde_json::to_string(&message.file_transfer_guids).unwrap_or_default())
};
let attachment_metadata = message
.attachment_metadata
.map(|metadata| serde_json::to_string(&metadata).unwrap_or_default());
Self {
id: message.id,
sender_participant_handle: match message.sender {
Participant::Me => None,
Participant::Remote { handle, .. } => Some(handle),
},
text: message.text,
date: message.date,
file_transfer_guids,
attachment_metadata,
}
}
}
impl From<Record> for Message {
fn from(record: Record) -> Self {
let file_transfer_guids = record
.file_transfer_guids
.and_then(|json| serde_json::from_str(&json).ok())
.unwrap_or_default();
let attachment_metadata = record
.attachment_metadata
.and_then(|json| serde_json::from_str(&json).ok());
let message_sender = match record.sender_participant_handle {
Some(handle) => Participant::Remote {
handle,
contact_id: None,
},
None => Participant::Me,
};
Self {
id: record.id,
sender: message_sender,
text: record.text,
date: record.date,
file_transfer_guids,
attachment_metadata,
}
}
}

View File

@@ -0,0 +1,3 @@
pub mod conversation;
pub mod message;
pub mod participant;

View File

@@ -0,0 +1,81 @@
use crate::models::Participant;
use crate::schema::conversation_participants;
use diesel::prelude::*;
#[derive(Queryable, Selectable, AsChangeset, Identifiable)]
#[diesel(table_name = crate::schema::participants)]
#[diesel(primary_key(handle))]
pub struct Record {
pub handle: String,
pub is_me: bool,
pub contact_id: Option<String>,
}
#[derive(Insertable)]
#[diesel(table_name = crate::schema::participants)]
pub struct InsertableRecord {
pub handle: String,
pub is_me: bool,
pub contact_id: Option<String>,
}
impl From<Participant> for InsertableRecord {
fn from(participant: Participant) -> Self {
match participant {
Participant::Me => InsertableRecord {
handle: "me".to_string(),
is_me: true,
contact_id: None,
},
Participant::Remote {
handle, contact_id, ..
} => InsertableRecord {
handle,
is_me: false,
contact_id,
},
}
}
}
#[derive(Identifiable, Selectable, Queryable, Associations, Debug)]
#[diesel(belongs_to(super::conversation::Record, foreign_key = conversation_id))]
#[diesel(belongs_to(Record, foreign_key = participant_handle))]
#[diesel(table_name = conversation_participants)]
#[diesel(primary_key(conversation_id, participant_handle))]
pub struct ConversationParticipant {
pub conversation_id: String,
pub participant_handle: String,
}
impl From<Record> for Participant {
fn from(record: Record) -> Self {
if record.is_me {
Participant::Me
} else {
Participant::Remote {
handle: record.handle.clone(),
contact_id: record.contact_id,
}
}
}
}
impl From<Participant> for Record {
fn from(participant: Participant) -> Self {
match participant {
Participant::Me => Record {
handle: "me".to_string(),
is_me: true,
contact_id: None,
},
Participant::Remote {
handle, contact_id, ..
} => Record {
handle,
is_me: false,
contact_id,
},
}
}
}

View File

@@ -0,0 +1,148 @@
use crate::models::participant::Participant;
use chrono::{DateTime, NaiveDateTime};
use kordophone::model::message::AttachmentMetadata;
use kordophone::model::outgoing_message::OutgoingMessage;
use std::collections::HashMap;
use uuid::Uuid;
#[derive(Clone, Debug)]
pub struct Message {
pub id: String,
pub sender: Participant,
pub text: String,
pub date: NaiveDateTime,
pub file_transfer_guids: Vec<String>,
pub attachment_metadata: Option<HashMap<String, AttachmentMetadata>>,
}
impl Message {
pub fn builder() -> MessageBuilder {
MessageBuilder::new()
}
}
impl From<kordophone::model::Message> for Message {
fn from(value: kordophone::model::Message) -> Self {
let sender_participant = match value.sender {
Some(sender) => Participant::Remote {
contact_id: None,
// Weird server quirk: some sender handles are encoded with control characters.
handle: sender
.chars()
.filter(|c| {
!c.is_control()
&& !matches!(
c,
'\u{202A}' | // LRE
'\u{202B}' | // RLE
'\u{202C}' | // PDF
'\u{202D}' | // LRO
'\u{202E}' | // RLO
'\u{2066}' | // LRI
'\u{2067}' | // RLI
'\u{2068}' | // FSI
'\u{2069}' // PDI
)
})
.collect::<String>(),
},
None => Participant::Me,
};
Self {
id: value.guid,
sender: sender_participant,
text: value.text,
date: DateTime::from_timestamp(
value.date.unix_timestamp(),
value.date.unix_timestamp_nanos().try_into().unwrap_or(0),
)
.unwrap()
.naive_local(),
file_transfer_guids: value.file_transfer_guids,
attachment_metadata: value.attachment_metadata,
}
}
}
impl From<&OutgoingMessage> for Message {
fn from(value: &OutgoingMessage) -> Self {
Self {
id: value.guid.to_string(),
sender: Participant::Me,
text: value.text.clone(),
date: value.date,
file_transfer_guids: Vec::new(), // Outgoing messages don't have file transfer GUIDs initially
attachment_metadata: None, // Outgoing messages don't have attachment metadata initially
}
}
}
pub struct MessageBuilder {
id: Option<String>,
sender: Option<Participant>,
text: Option<String>,
date: Option<NaiveDateTime>,
file_transfer_guids: Option<Vec<String>>,
attachment_metadata: Option<HashMap<String, AttachmentMetadata>>,
}
impl Default for MessageBuilder {
fn default() -> Self {
Self::new()
}
}
impl MessageBuilder {
pub fn new() -> Self {
Self {
id: None,
sender: None,
text: None,
date: None,
file_transfer_guids: None,
attachment_metadata: None,
}
}
pub fn sender(mut self, sender: Participant) -> Self {
self.sender = Some(sender);
self
}
pub fn text(mut self, text: String) -> Self {
self.text = Some(text);
self
}
pub fn date(mut self, date: NaiveDateTime) -> Self {
self.date = Some(date);
self
}
pub fn file_transfer_guids(mut self, file_transfer_guids: Vec<String>) -> Self {
self.file_transfer_guids = Some(file_transfer_guids);
self
}
pub fn attachment_metadata(
mut self,
attachment_metadata: HashMap<String, AttachmentMetadata>,
) -> Self {
self.attachment_metadata = Some(attachment_metadata);
self
}
pub fn build(self) -> Message {
Message {
id: self.id.unwrap_or_else(|| Uuid::new_v4().to_string()),
sender: self.sender.unwrap_or(Participant::Me),
text: self.text.unwrap_or_default(),
date: self.date.unwrap_or_else(|| chrono::Utc::now().naive_utc()),
file_transfer_guids: self.file_transfer_guids.unwrap_or_default(),
attachment_metadata: self.attachment_metadata,
}
}
}

View File

@@ -0,0 +1,8 @@
pub mod conversation;
pub mod db;
pub mod message;
pub mod participant;
pub use conversation::Conversation;
pub use message::Message;
pub use participant::Participant;

View File

@@ -0,0 +1,22 @@
#[derive(Debug, Clone, PartialEq)]
pub enum Participant {
Me,
Remote {
handle: String,
contact_id: Option<String>,
},
}
impl Participant {
pub fn handle(&self) -> String {
match self {
Participant::Me => "(Me)".to_string(),
Participant::Remote { handle, .. } => handle.clone(),
}
}
// Temporary alias for backward compatibility
pub fn display_name(&self) -> String {
self.handle()
}
}

View File

@@ -0,0 +1,410 @@
use anyhow::Result;
use diesel::prelude::*;
use diesel::query_dsl::BelongingToDsl;
use std::collections::HashMap;
use crate::{
models::{
db::conversation::Record as ConversationRecord,
db::message::Record as MessageRecord,
db::participant::{
ConversationParticipant, InsertableRecord as InsertableParticipantRecord,
Record as ParticipantRecord,
},
Conversation, Message, Participant,
},
schema, target,
};
pub struct Repository<'a> {
connection: &'a mut SqliteConnection,
}
impl<'a> Repository<'a> {
pub fn new(connection: &'a mut SqliteConnection) -> Self {
Self { connection }
}
pub fn insert_conversation(&mut self, conversation: Conversation) -> Result<()> {
use crate::schema::conversation_participants::dsl::*;
use crate::schema::conversations::dsl::*;
use crate::schema::participants::dsl::*;
let (db_conversation, db_participants) = conversation.into();
diesel::replace_into(conversations)
.values(&db_conversation)
.execute(self.connection)?;
for participant in &db_participants {
diesel::insert_into(participants)
.values(participant)
.on_conflict_do_nothing()
.execute(self.connection)?;
}
// Sqlite backend doesn't support batch insert, so we have to do this manually
for participant in &db_participants {
diesel::replace_into(conversation_participants)
.values((
conversation_id.eq(&db_conversation.id),
participant_handle.eq(&participant.handle),
))
.execute(self.connection)?;
}
Ok(())
}
pub fn get_conversation_by_guid(&mut self, match_guid: &str) -> Result<Option<Conversation>> {
use crate::schema::conversations::dsl::*;
use crate::schema::participants::dsl::*;
let result = conversations
.find(match_guid)
.first::<ConversationRecord>(self.connection)
.optional()?;
if let Some(conversation) = result {
let db_participants = ConversationParticipant::belonging_to(&conversation)
.inner_join(participants)
.select(ParticipantRecord::as_select())
.load::<ParticipantRecord>(self.connection)?;
let mut model_conversation: Conversation = conversation.into();
model_conversation.participants =
db_participants.into_iter().map(|p| p.into()).collect();
return Ok(Some(model_conversation));
}
Ok(None)
}
pub fn all_conversations(&mut self, limit: i32, offset: i32) -> Result<Vec<Conversation>> {
use crate::schema::conversations::dsl::*;
use crate::schema::participants::dsl::*;
let db_conversations = conversations
.order(schema::conversations::date.desc())
.offset(offset as i64)
.limit(limit as i64)
.load::<ConversationRecord>(self.connection)?;
let mut result = Vec::new();
for db_conversation in db_conversations {
let db_participants = ConversationParticipant::belonging_to(&db_conversation)
.inner_join(participants)
.select(ParticipantRecord::as_select())
.load::<ParticipantRecord>(self.connection)?;
let mut model_conversation: Conversation = db_conversation.into();
model_conversation.participants =
db_participants.into_iter().map(|p| p.into()).collect();
result.push(model_conversation);
}
Ok(result)
}
pub fn insert_message(&mut self, conversation_guid: &str, message: Message) -> Result<()> {
use crate::schema::conversation_messages::dsl::*;
use crate::schema::messages::dsl::*;
// Handle participant if message has a remote sender
let sender = message.sender.clone();
let mut db_message: MessageRecord = message.into();
db_message.sender_participant_handle = self.get_or_create_participant(&sender);
diesel::replace_into(messages)
.values(&db_message)
.execute(self.connection)?;
diesel::replace_into(conversation_messages)
.values((
conversation_id.eq(conversation_guid),
message_id.eq(&db_message.id),
))
.execute(self.connection)?;
// Update conversation date
self.update_conversation_metadata(conversation_guid)?;
Ok(())
}
pub fn insert_messages(
&mut self,
conversation_guid: &str,
in_messages: Vec<Message>,
) -> Result<()> {
use crate::schema::conversation_messages::dsl::*;
use crate::schema::messages::dsl::*;
use crate::schema::participants::dsl as participants_dsl;
#[derive(Insertable)]
#[diesel(table_name = crate::schema::conversation_messages)]
struct InsertableConversationMessage {
pub conversation_id: String,
pub message_id: String,
}
if in_messages.is_empty() {
return Ok(());
}
// Use a single transaction for everything this removes the implicit
// autocommit after every statement which costs a lot when we have many
// individual queries.
self.connection
.transaction::<_, diesel::result::Error, _>(|conn| {
// Cache participant handles we have already looked up / created in a
// typical conversation we only have a handful of participants, but we
// might be processing hundreds of messages. Avoiding an extra SELECT per
// message saves a lot of round-trips to SQLite.
let mut participant_cache: HashMap<String, String> = HashMap::new();
// Prepare collections for the batch inserts.
let mut db_messages: Vec<MessageRecord> = Vec::with_capacity(in_messages.len());
let mut conv_msg_records: Vec<InsertableConversationMessage> =
Vec::with_capacity(in_messages.len());
for message in in_messages {
// Resolve/insert the sender participant only once per display name.
let sender_handle_opt = match &message.sender {
Participant::Me => None,
Participant::Remote { handle, contact_id } => {
if participant_cache.contains_key(handle) {
Some(handle.clone())
} else {
// Ensure participant exists in DB
let exists: Option<String> = participants_dsl::participants
.filter(participants_dsl::handle.eq(handle))
.select(participants_dsl::handle)
.first::<String>(conn)
.optional()?;
if exists.is_none() {
let new_participant = InsertableParticipantRecord {
handle: handle.clone(),
is_me: false,
contact_id: contact_id.clone(),
};
diesel::insert_into(participants_dsl::participants)
.values(&new_participant)
.execute(conn)?;
}
participant_cache.insert(handle.clone(), handle.clone());
Some(handle.clone())
}
}
};
// Convert the message into its DB form.
let mut db_message: MessageRecord = message.into();
db_message.sender_participant_handle = sender_handle_opt.clone();
conv_msg_records.push(InsertableConversationMessage {
conversation_id: conversation_guid.to_string(),
message_id: db_message.id.clone(),
});
db_messages.push(db_message);
}
// Execute the actual batch inserts.
diesel::replace_into(messages)
.values(&db_messages)
.execute(conn)?;
diesel::replace_into(conversation_messages)
.values(&conv_msg_records)
.execute(conn)?;
// Update conversation metadata quickly using the last message we just
// processed instead of re-querying the DB.
if let Some(last_msg) = db_messages.last() {
use crate::schema::conversations::dsl as conv_dsl;
diesel::update(
conv_dsl::conversations.filter(conv_dsl::id.eq(conversation_guid)),
)
.set((
conv_dsl::date.eq(last_msg.date),
conv_dsl::last_message_preview
.eq::<Option<String>>(Some(last_msg.text.clone())),
))
.execute(conn)?;
}
Ok(())
})?;
// TODO: May need to update conversation metadata here, but this has a perf impact.
// Ideally we would consolidate this in the code above, assuming we're only inserting *new* messages, but
// this may not necessarily be the case.
Ok(())
}
pub fn get_messages_for_conversation(
&mut self,
conversation_guid: &str,
) -> Result<Vec<Message>> {
use crate::schema::conversation_messages::dsl::*;
use crate::schema::messages::dsl::*;
use crate::schema::participants::dsl::*;
let message_records = conversation_messages
.filter(conversation_id.eq(conversation_guid))
.inner_join(messages)
.select(MessageRecord::as_select())
.order_by(schema::messages::date.asc())
.load::<MessageRecord>(self.connection)?;
let mut result = Vec::new();
for message_record in message_records {
let mut message: Message = message_record.clone().into();
// If the message references a sender participant, load the participant info
if let Some(sender_handle) = message_record.sender_participant_handle {
let participant = participants
.find(sender_handle)
.first::<ParticipantRecord>(self.connection)?;
message.sender = participant.into();
}
result.push(message);
}
Ok(result)
}
pub fn get_last_message_for_conversation(
&mut self,
conversation_guid: &str,
) -> Result<Option<Message>> {
use crate::schema::conversation_messages::dsl::*;
use crate::schema::messages::dsl::*;
let message_record = conversation_messages
.filter(conversation_id.eq(conversation_guid))
.inner_join(messages)
.select(MessageRecord::as_select())
.order_by(schema::messages::date.desc())
.first::<MessageRecord>(self.connection)
.optional()?;
Ok(message_record.map(|r| r.into()))
}
pub fn delete_all_conversations(&mut self) -> Result<()> {
use crate::schema::conversations::dsl::*;
diesel::delete(conversations).execute(self.connection)?;
Ok(())
}
pub fn delete_all_messages(&mut self) -> Result<()> {
use crate::schema::messages::dsl::*;
diesel::delete(messages).execute(self.connection)?;
Ok(())
}
pub fn merge_conversation_metadata(&mut self, in_conversation: Conversation) -> Result<bool> {
let mut updated = false;
let conversation = self.get_conversation_by_guid(&in_conversation.guid)?;
if let Some(conversation) = conversation {
let merged_conversation = conversation.merge(&in_conversation, None);
if merged_conversation != conversation {
self.insert_conversation(merged_conversation)?;
updated = true;
}
}
log::debug!(target: target::REPOSITORY, "Merged conversation metadata: {} updated: {}", in_conversation.guid, updated);
Ok(updated)
}
fn update_conversation_metadata(&mut self, conversation_guid: &str) -> Result<()> {
let conversation = self.get_conversation_by_guid(conversation_guid)?;
if let Some(conversation) = conversation {
if let Some(last_message) = self.get_last_message_for_conversation(conversation_guid)? {
log::debug!(
target: target::REPOSITORY,
"Updating conversation metadata: {} message: {:?}",
conversation_guid,
last_message
);
let merged_conversation = conversation.merge(&conversation, Some(&last_message));
self.insert_conversation(merged_conversation)?;
}
}
Ok(())
}
// Helper function to get the last inserted row ID
// This is a workaround since the Sqlite backend doesn't support `RETURNING`
// Huge caveat with this is that it depends on whatever the last insert was, prevents concurrent inserts.
fn last_insert_id(&mut self) -> Result<i32> {
Ok(
diesel::select(diesel::dsl::sql::<diesel::sql_types::Integer>(
"last_insert_rowid()",
))
.get_result(self.connection)?,
)
}
/// Update the contact_id for an existing participant record.
pub fn update_participant_contact(
&mut self,
participant_handle: &str,
new_contact_id: &str,
) -> Result<()> {
use crate::schema::participants::dsl::*;
log::debug!(target: target::REPOSITORY, "Updating participant contact {} => {}", participant_handle, new_contact_id);
diesel::update(participants.filter(handle.eq(participant_handle)))
.set(contact_id.eq(Some(new_contact_id.to_string())))
.execute(self.connection)?;
Ok(())
}
fn get_or_create_participant(&mut self, participant: &Participant) -> Option<String> {
match participant {
Participant::Me => None,
Participant::Remote {
handle: p_handle,
contact_id: c_id,
..
} => {
use crate::schema::participants::dsl::*;
let existing_participant = participants
.filter(handle.eq(p_handle))
.first::<ParticipantRecord>(self.connection)
.optional()
.unwrap();
if existing_participant.is_none() {
let participant_record = InsertableParticipantRecord {
handle: p_handle.clone(),
is_me: false,
contact_id: c_id.clone(),
};
diesel::insert_into(participants)
.values(&participant_record)
.execute(self.connection)
.unwrap();
}
Some(p_handle.clone())
}
}
}
}

View File

@@ -0,0 +1,66 @@
// When this file changes, run the following command to generate a new migration:
// DATABASE_URL=/tmp/db.sql diesel migration generate --diff-schema create_conversations
diesel::table! {
conversations (id) {
id -> Text,
unread_count -> BigInt,
display_name -> Nullable<Text>,
last_message_preview -> Nullable<Text>,
date -> Timestamp,
}
}
diesel::table! {
participants (handle) {
handle -> Text,
is_me -> Bool,
contact_id -> Nullable<Text>,
}
}
diesel::table! {
conversation_participants (conversation_id, participant_handle) {
conversation_id -> Text,
participant_handle -> Text,
}
}
diesel::table! {
messages (id) {
id -> Text, // guid
text -> Text,
sender_participant_handle -> Nullable<Text>,
date -> Timestamp,
file_transfer_guids -> Nullable<Text>, // JSON array of file transfer GUIDs
attachment_metadata -> Nullable<Text>, // JSON string of attachment metadata
}
}
diesel::table! {
conversation_messages (conversation_id, message_id) {
conversation_id -> Text, // guid
message_id -> Text, // guid
}
}
diesel::table! {
settings (key) {
key -> Text,
value -> Binary,
}
}
diesel::joinable!(conversation_participants -> conversations (conversation_id));
diesel::joinable!(conversation_participants -> participants (participant_handle));
diesel::joinable!(messages -> participants (sender_participant_handle));
diesel::joinable!(conversation_messages -> conversations (conversation_id));
diesel::joinable!(conversation_messages -> messages (message_id));
diesel::allow_tables_to_appear_in_same_query!(
conversations,
participants,
conversation_participants,
messages,
conversation_messages,
settings,
);

View File

@@ -0,0 +1,63 @@
use anyhow::Result;
use diesel::*;
use serde::{de::DeserializeOwned, Serialize};
#[derive(Insertable, Queryable, AsChangeset)]
#[diesel(table_name = crate::schema::settings)]
struct SettingsRow<'a> {
key: &'a str,
value: &'a [u8],
}
pub struct Settings<'a> {
connection: &'a mut SqliteConnection,
}
impl<'a> Settings<'a> {
pub fn new(connection: &'a mut SqliteConnection) -> Self {
Self { connection }
}
pub fn put<T: Serialize>(&mut self, k: &str, v: &T) -> Result<()> {
use crate::schema::settings::dsl::*;
let bytes = bincode::serialize(v)?;
diesel::insert_into(settings)
.values(SettingsRow {
key: k,
value: &bytes,
})
.on_conflict(key)
.do_update()
.set(value.eq(&bytes))
.execute(self.connection)?;
Ok(())
}
pub fn get<T: DeserializeOwned>(&mut self, k: &str) -> Result<Option<T>> {
use crate::schema::settings::dsl::*;
let blob: Option<Vec<u8>> = settings
.select(value)
.filter(key.eq(k))
.first(self.connection)
.optional()?;
Ok(match blob {
Some(b) => Some(bincode::deserialize(&b)?),
None => None,
})
}
pub fn del(&mut self, k: &str) -> Result<usize> {
use crate::schema::settings::dsl::*;
Ok(diesel::delete(settings.filter(key.eq(k))).execute(self.connection)?)
}
pub fn list_keys(&mut self) -> Result<Vec<String>> {
use crate::schema::settings::dsl::*;
let keys: Vec<String> = settings.select(key).load(self.connection)?;
Ok(keys)
}
}

View File

@@ -0,0 +1,423 @@
use crate::{
database::{Database, DatabaseAccess},
models::{
conversation::{Conversation, ConversationBuilder},
message::Message,
participant::Participant,
},
};
// Helper function to compare participants ignoring database IDs
fn participants_equal_ignoring_id(a: &Participant, b: &Participant) -> bool {
match (a, b) {
(Participant::Me, Participant::Me) => true,
(
Participant::Remote { handle: name_a, .. },
Participant::Remote { handle: name_b, .. },
) => name_a == name_b,
_ => false,
}
}
fn participants_vec_equal_ignoring_id(a: &[Participant], b: &[Participant]) -> bool {
if a.len() != b.len() {
return false;
}
// For each participant in a, check if there is a matching participant in b
a.iter().all(|a_participant| {
b.iter().any(|b_participant| participants_equal_ignoring_id(a_participant, b_participant))
}) &&
// Also check the reverse to ensure no duplicates
b.iter().all(|b_participant| {
a.iter().any(|a_participant| participants_equal_ignoring_id(b_participant, a_participant))
})
}
#[tokio::test]
async fn test_database_init() {
let _ = Database::new_in_memory().unwrap();
}
#[tokio::test]
async fn test_add_conversation() {
let mut db = Database::new_in_memory().unwrap();
db.with_repository(|repository| {
let guid = "test";
let test_conversation = Conversation::builder()
.guid(guid)
.unread_count(2)
.display_name("Test Conversation")
.build();
repository
.insert_conversation(test_conversation.clone())
.unwrap();
// Try to fetch with id now
let conversation = repository.get_conversation_by_guid(guid).unwrap().unwrap();
assert_eq!(conversation.guid, "test");
// Modify the conversation and update it
let modified_conversation = test_conversation
.into_builder()
.display_name("Modified Conversation")
.build();
repository
.insert_conversation(modified_conversation.clone())
.unwrap();
// Make sure we still only have one conversation.
let all_conversations = repository.all_conversations(i32::MAX, 0).unwrap();
assert_eq!(all_conversations.len(), 1);
// And make sure the display name was updated
let conversation = repository.get_conversation_by_guid(guid).unwrap().unwrap();
assert_eq!(conversation.display_name.unwrap(), "Modified Conversation");
})
.await;
}
#[tokio::test]
async fn test_conversation_participants() {
let mut db = Database::new_in_memory().unwrap();
db.with_repository(|repository| {
let participants: Vec<Participant> = vec!["one".into(), "two".into()];
let guid = uuid::Uuid::new_v4().to_string();
let conversation = ConversationBuilder::new()
.guid(&guid)
.display_name("Test")
.participants(participants.clone())
.build();
repository.insert_conversation(conversation).unwrap();
let read_conversation = repository.get_conversation_by_guid(&guid).unwrap().unwrap();
let read_participants = read_conversation.participants;
assert!(participants_vec_equal_ignoring_id(
&participants,
&read_participants
));
// Try making another conversation with the same participants
let conversation = ConversationBuilder::new()
.display_name("A Different Test")
.participants(participants.clone())
.build();
repository.insert_conversation(conversation).unwrap();
let read_conversation = repository.get_conversation_by_guid(&guid).unwrap().unwrap();
let read_participants: Vec<Participant> = read_conversation.participants;
assert!(participants_vec_equal_ignoring_id(
&participants,
&read_participants
));
})
.await;
}
#[tokio::test]
async fn test_all_conversations_with_participants() {
let mut db = Database::new_in_memory().unwrap();
db.with_repository(|repository| {
// Create two conversations with different participants
let participants1: Vec<Participant> = vec!["one".into(), "two".into()];
let participants2: Vec<Participant> = vec!["three".into(), "four".into()];
let guid1 = uuid::Uuid::new_v4().to_string();
let conversation1 = ConversationBuilder::new()
.guid(&guid1)
.display_name("Test 1")
.participants(participants1.clone())
.build();
let guid2 = uuid::Uuid::new_v4().to_string();
let conversation2 = ConversationBuilder::new()
.guid(&guid2)
.display_name("Test 2")
.participants(participants2.clone())
.build();
// Insert both conversations
repository.insert_conversation(conversation1).unwrap();
repository.insert_conversation(conversation2).unwrap();
// Get all conversations and verify the results
let all_conversations = repository.all_conversations(i32::MAX, 0).unwrap();
assert_eq!(all_conversations.len(), 2);
// Find and verify each conversation's participants
let conv1 = all_conversations.iter().find(|c| c.guid == guid1).unwrap();
let conv2 = all_conversations.iter().find(|c| c.guid == guid2).unwrap();
assert!(participants_vec_equal_ignoring_id(
&conv1.participants,
&participants1
));
assert!(participants_vec_equal_ignoring_id(
&conv2.participants,
&participants2
));
})
.await;
}
#[tokio::test]
async fn test_messages() {
let mut db = Database::new_in_memory().unwrap();
db.with_repository(|repository| {
// First create a conversation with participants
let participants = vec!["Alice".into(), "Bob".into()];
let conversation = ConversationBuilder::new()
.display_name("Test Chat")
.participants(participants)
.build();
let conversation_id = conversation.guid.clone();
repository.insert_conversation(conversation).unwrap();
// Create and insert a message from Me
let message1 = Message::builder()
.text("Hello everyone!".to_string())
.build();
// Create and insert a message from a remote participant
let message2 = Message::builder()
.text("Hi there!".to_string())
.sender("Alice".into())
.build();
// Insert both messages
repository
.insert_message(&conversation_id, message1.clone())
.unwrap();
repository
.insert_message(&conversation_id, message2.clone())
.unwrap();
// Retrieve messages
let messages = repository
.get_messages_for_conversation(&conversation_id)
.unwrap();
assert_eq!(messages.len(), 2);
// Verify first message (from Me)
let retrieved_message1 = messages.iter().find(|m| m.id == message1.id).unwrap();
assert_eq!(retrieved_message1.text, "Hello everyone!");
assert!(matches!(retrieved_message1.sender, Participant::Me));
// Verify second message (from Alice)
let retrieved_message2 = messages.iter().find(|m| m.id == message2.id).unwrap();
assert_eq!(retrieved_message2.text, "Hi there!");
if let Participant::Remote { handle, .. } = &retrieved_message2.sender {
assert_eq!(handle, "Alice");
} else {
panic!(
"Expected Remote participant. Got: {:?}",
retrieved_message2.sender
);
}
})
.await;
}
#[tokio::test]
async fn test_message_ordering() {
let mut db = Database::new_in_memory().unwrap();
db.with_repository(|repository| {
// Create a conversation
let conversation = ConversationBuilder::new().display_name("Test Chat").build();
let conversation_id = conversation.guid.clone();
repository.insert_conversation(conversation).unwrap();
// Create messages with specific timestamps
let now = chrono::Utc::now().naive_utc();
let message1 = Message::builder()
.text("First message".to_string())
.date(now)
.build();
let message2 = Message::builder()
.text("Second message".to_string())
.date(now + chrono::Duration::minutes(1))
.build();
let message3 = Message::builder()
.text("Third message".to_string())
.date(now + chrono::Duration::minutes(2))
.build();
// Insert messages
repository
.insert_message(&conversation_id, message1)
.unwrap();
repository
.insert_message(&conversation_id, message2)
.unwrap();
repository
.insert_message(&conversation_id, message3)
.unwrap();
// Retrieve messages and verify order
let messages = repository
.get_messages_for_conversation(&conversation_id)
.unwrap();
assert_eq!(messages.len(), 3);
// Messages should be ordered by date
for i in 1..messages.len() {
assert!(messages[i].date > messages[i - 1].date);
}
})
.await;
}
#[tokio::test]
async fn test_insert_messages_batch() {
let mut db = Database::new_in_memory().unwrap();
db.with_repository(|repository| {
// Create a conversation with two remote participants
let participants: Vec<Participant> = vec!["Alice".into(), "Bob".into()];
let conversation = ConversationBuilder::new()
.display_name("Batch Chat")
.participants(participants.clone())
.build();
let conversation_id = conversation.guid.clone();
repository.insert_conversation(conversation).unwrap();
// Prepare a batch of messages with increasing timestamps
let now = chrono::Utc::now().naive_utc();
let message1 = Message::builder().text("Hi".to_string()).date(now).build();
let message2 = Message::builder()
.text("Hello".to_string())
.sender("Alice".into())
.date(now + chrono::Duration::seconds(1))
.build();
let message3 = Message::builder()
.text("How are you?".to_string())
.sender("Bob".into())
.date(now + chrono::Duration::seconds(2))
.build();
let message4 = Message::builder()
.text("Great!".to_string())
.date(now + chrono::Duration::seconds(3))
.build();
let original_messages = vec![
message1.clone(),
message2.clone(),
message3.clone(),
message4.clone(),
];
// Batch insert the messages
repository
.insert_messages(&conversation_id, original_messages.clone())
.unwrap();
// Retrieve messages and verify
let retrieved_messages = repository
.get_messages_for_conversation(&conversation_id)
.unwrap();
assert_eq!(retrieved_messages.len(), original_messages.len());
// Ensure ordering by date
for i in 1..retrieved_messages.len() {
assert!(retrieved_messages[i].date > retrieved_messages[i - 1].date);
}
// Verify that all messages are present with correct content and sender
for original in &original_messages {
let retrieved = retrieved_messages
.iter()
.find(|m| m.id == original.id)
.expect("Message not found");
assert_eq!(retrieved.text, original.text);
match (&original.sender, &retrieved.sender) {
(Participant::Me, Participant::Me) => {}
(
Participant::Remote { handle: o_name, .. },
Participant::Remote { handle: r_name, .. },
) => assert_eq!(o_name, r_name),
_ => panic!(
"Sender mismatch: original {:?}, retrieved {:?}",
original.sender, retrieved.sender
),
}
}
// Make sure the last message is the last one we inserted
let last_message = repository
.get_last_message_for_conversation(&conversation_id)
.unwrap()
.unwrap();
assert_eq!(last_message.id, message4.id);
})
.await;
}
#[tokio::test]
async fn test_settings() {
let mut db = Database::new_in_memory().unwrap();
db.with_settings(|settings| {
settings.put("test", &"test".to_string()).unwrap();
assert_eq!(settings.get::<String>("test").unwrap().unwrap(), "test");
settings.del("test").unwrap();
assert!(settings.get::<String>("test").unwrap().is_none());
let keys = settings.list_keys().unwrap();
assert_eq!(keys.len(), 0);
// Try encoding a struct
#[derive(serde::Serialize, serde::Deserialize, Debug, PartialEq, Eq)]
struct TestStruct {
name: String,
age: u32,
}
let test_struct = TestStruct {
name: "James".to_string(),
age: 35,
};
settings.put("test_struct", &test_struct).unwrap();
assert_eq!(
settings.get::<TestStruct>("test_struct").unwrap().unwrap(),
test_struct
);
// Test with an option<string>
settings
.put("test_struct_option", &Option::<String>::None)
.unwrap();
assert!(settings
.get::<Option<String>>("test_struct_option")
.unwrap()
.unwrap()
.is_none());
settings
.put(
"test_struct_option",
&Option::<String>::Some("test".to_string()),
)
.unwrap();
assert_eq!(
settings
.get::<Option<String>>("test_struct_option")
.unwrap()
.unwrap(),
Some("test".to_string())
);
})
.await;
}

View File

@@ -0,0 +1,29 @@
[package]
name = "kordophone"
version = "1.0.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
async-trait = "0.1.80"
base64 = "0.22.1"
bytes = "1.10.1"
chrono = { version = "0.4.38", features = ["serde"] }
ctor = "0.2.8"
env_logger = "0.11.5"
futures-util = "0.3.31"
hyper = { version = "0.14", features = ["full"] }
hyper-tls = "0.5.0"
log = { version = "0.4.21", features = [] }
serde = { version = "1.0.152", features = ["derive"] }
serde_json = "1.0.91"
serde_plain = "1.0.2"
time = { version = "0.3.17", features = ["parsing", "serde"] }
tokio = { version = "1.37.0", features = ["full"] }
tokio-tungstenite = { version = "0.26.2", features = ["rustls-tls-webpki-roots"] }
tokio-util = { version = "0.7.15", features = ["futures-util"] }
tungstenite = { version = "0.26.2", features = ["rustls-tls-webpki-roots"] }
urlencoding = "2.1.3"
uuid = { version = "1.6.1", features = ["v4", "fast-rng", "macro-diagnostics"] }
rustls = { version = "0.23", default-features = false, features = ["ring"] }

View File

@@ -0,0 +1,45 @@
use crate::api::Credentials;
use crate::api::JwtToken;
use async_trait::async_trait;
#[async_trait]
pub trait AuthenticationStore {
async fn get_credentials(&mut self) -> Option<Credentials>;
async fn get_token(&mut self) -> Option<String>;
async fn set_token(&mut self, token: String);
}
pub struct InMemoryAuthenticationStore {
credentials: Option<Credentials>,
token: Option<JwtToken>,
}
impl Default for InMemoryAuthenticationStore {
fn default() -> Self {
Self::new(None)
}
}
impl InMemoryAuthenticationStore {
pub fn new(credentials: Option<Credentials>) -> Self {
Self {
credentials,
token: None,
}
}
}
#[async_trait]
impl AuthenticationStore for InMemoryAuthenticationStore {
async fn get_credentials(&mut self) -> Option<Credentials> {
self.credentials.clone()
}
async fn get_token(&mut self) -> Option<String> {
self.token.clone().map(|token| token.to_string())
}
async fn set_token(&mut self, token: String) {
self.token = Some(JwtToken::new(&token).unwrap());
}
}

View File

@@ -0,0 +1,38 @@
use crate::model::event::Event;
use crate::model::update::UpdateItem;
use async_trait::async_trait;
use futures_util::stream::Stream;
use futures_util::Sink;
#[derive(Debug, Eq, PartialEq, Clone)]
pub enum SinkMessage {
Ping,
}
pub enum SocketUpdate {
Update(Vec<UpdateItem>),
Pong,
}
pub enum SocketEvent {
Update(Event),
Pong,
}
#[async_trait]
pub trait EventSocket {
type Error;
type EventStream: Stream<Item = Result<SocketEvent, Self::Error>>;
type UpdateStream: Stream<Item = Result<SocketUpdate, Self::Error>>;
/// Modern event pipeline
async fn events(
self,
) -> (
Self::EventStream,
impl Sink<SinkMessage, Error = Self::Error>,
);
/// Raw update items from the v1 API.
async fn raw_updates(self) -> Self::UpdateStream;
}

View File

@@ -0,0 +1,710 @@
extern crate hyper;
extern crate serde;
use std::{path::PathBuf, pin::Pin, str, task::Poll};
use crate::api::event_socket::{EventSocket, SinkMessage, SocketEvent, SocketUpdate};
use crate::api::AuthenticationStore;
use bytes::Bytes;
use hyper::{Body, Client, Method, Request, Uri};
use hyper_tls::HttpsConnector;
use async_trait::async_trait;
use serde::{de::DeserializeOwned, Deserialize, Serialize};
use tokio::net::TcpStream;
use futures_util::stream::{BoxStream, Stream};
use futures_util::task::Context;
use futures_util::{Sink, SinkExt, StreamExt, TryStreamExt};
use tokio_tungstenite::connect_async;
use tokio_tungstenite::{MaybeTlsStream, WebSocketStream};
use crate::{
model::{
Conversation, ConversationID, Event, JwtToken, Message, MessageID, OutgoingMessage,
UpdateItem,
},
APIInterface,
};
type HttpClient = Client<HttpsConnector<hyper::client::HttpConnector>>;
pub struct HTTPAPIClient<K: AuthenticationStore + Send + Sync> {
pub base_url: Uri,
pub auth_store: K,
client: HttpClient,
}
#[derive(Clone, Serialize, Deserialize, Debug)]
pub struct Credentials {
pub username: String,
pub password: String,
}
#[derive(Debug)]
pub enum Error {
ClientError(String),
HTTPError(hyper::Error),
SerdeError(serde_json::Error),
DecodeError(String),
PongError(tungstenite::Error),
URLError,
Unauthorized,
}
impl std::error::Error for Error {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match self {
Error::HTTPError(ref err) => Some(err),
_ => None,
}
}
}
impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{:?}", self)
}
}
impl From<hyper::Error> for Error {
fn from(err: hyper::Error) -> Error {
Error::HTTPError(err)
}
}
impl From<serde_json::Error> for Error {
fn from(err: serde_json::Error) -> Error {
Error::SerdeError(err)
}
}
impl From<tungstenite::Error> for Error {
fn from(err: tungstenite::Error) -> Error {
Error::ClientError(err.to_string())
}
}
trait AuthBuilder {
fn with_auth_string(self, token: &Option<String>) -> Self;
}
impl AuthBuilder for hyper::http::request::Builder {
fn with_auth_string(self, token: &Option<String>) -> Self {
if let Some(token) = &token {
self.header("Authorization", format!("Bearer: {}", token))
} else {
self
}
}
}
#[cfg(test)]
#[allow(dead_code)]
trait AuthSetting {
fn authenticate(&mut self, token: &Option<JwtToken>);
}
#[cfg(test)]
impl<B> AuthSetting for hyper::http::Request<B> {
fn authenticate(&mut self, token: &Option<JwtToken>) {
if let Some(token) = &token {
self.headers_mut()
.insert("Authorization", token.to_header_value());
}
}
}
type WebsocketSink = futures_util::stream::SplitSink<
WebSocketStream<tokio_tungstenite::MaybeTlsStream<tokio::net::TcpStream>>,
tungstenite::Message,
>;
type WebsocketStream =
futures_util::stream::SplitStream<WebSocketStream<MaybeTlsStream<TcpStream>>>;
pub struct WebsocketEventSocket {
sink: Option<WebsocketSink>,
stream: WebsocketStream,
}
impl WebsocketEventSocket {
pub fn new(socket: WebSocketStream<MaybeTlsStream<TcpStream>>) -> Self {
let (sink, stream) = socket.split();
Self {
sink: Some(sink),
stream,
}
}
}
impl WebsocketEventSocket {
fn raw_update_stream(self) -> impl Stream<Item = Result<SocketUpdate, Error>> {
self.stream
.map_err(Error::from)
.try_filter_map(|msg| async move {
match msg {
tungstenite::Message::Text(text) => {
match serde_json::from_str::<Vec<UpdateItem>>(&text) {
Ok(updates) => Ok(Some(SocketUpdate::Update(updates))),
Err(e) => {
log::error!("Error parsing update: {:?}", e);
Err(Error::from(e))
}
}
}
tungstenite::Message::Ping(_) => {
// We don't expect the server to send us pings.
Ok(None)
}
tungstenite::Message::Pong(_) => Ok(Some(SocketUpdate::Pong)),
tungstenite::Message::Close(_) => {
// Connection was closed cleanly
Err(Error::ClientError("WebSocket connection closed".into()))
}
_ => Ok(None),
}
})
}
}
#[async_trait]
impl EventSocket for WebsocketEventSocket {
type Error = Error;
type EventStream = BoxStream<'static, Result<SocketEvent, Error>>;
type UpdateStream = BoxStream<'static, Result<SocketUpdate, Error>>;
async fn events(
mut self,
) -> (
Self::EventStream,
impl Sink<SinkMessage, Error = Self::Error>,
) {
use futures_util::stream::iter;
let sink = self.sink.take().unwrap().with(|f| match f {
SinkMessage::Ping => futures_util::future::ready(Ok::<tungstenite::Message, Error>(
tungstenite::Message::Ping(Bytes::new()),
)),
});
let stream = self
.raw_update_stream()
.map_ok(
|updates| -> BoxStream<'static, Result<SocketEvent, Error>> {
match updates {
SocketUpdate::Update(updates) => {
let iter_stream = iter(
updates
.into_iter()
.map(|u| Ok(SocketEvent::Update(Event::from(u)))),
);
iter_stream.boxed()
}
SocketUpdate::Pong => iter(std::iter::once(Ok(SocketEvent::Pong))).boxed(),
}
},
)
.try_flatten()
.boxed();
(stream, sink)
}
async fn raw_updates(self) -> Self::UpdateStream {
self.raw_update_stream().boxed()
}
}
pub struct ResponseStream {
body: hyper::Body,
}
impl Stream for ResponseStream {
type Item = Result<Bytes, Error>;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
self.body.poll_next_unpin(cx).map_err(Error::HTTPError)
}
}
impl From<hyper::Body> for ResponseStream {
fn from(value: hyper::Body) -> Self {
ResponseStream { body: value }
}
}
#[async_trait]
impl<K: AuthenticationStore + Send + Sync> APIInterface for HTTPAPIClient<K> {
type Error = Error;
type ResponseStream = ResponseStream;
async fn get_version(&mut self) -> Result<String, Self::Error> {
let version: String = self.deserialized_response("version", Method::GET).await?;
Ok(version)
}
async fn get_conversations(&mut self) -> Result<Vec<Conversation>, Self::Error> {
let conversations: Vec<Conversation> = self
.deserialized_response("conversations", Method::GET)
.await?;
Ok(conversations)
}
async fn authenticate(&mut self, credentials: Credentials) -> Result<JwtToken, Self::Error> {
#[derive(Deserialize, Debug)]
struct AuthResponse {
jwt: String,
}
log::debug!("Authenticating with username: {:?}", credentials.username);
let body = || -> Body { serde_json::to_string(&credentials).unwrap().into() };
let token: AuthResponse = self
.deserialized_response_with_body_retry("authenticate", Method::POST, body, false)
.await?;
let token = JwtToken::new(&token.jwt).map_err(|e| Error::DecodeError(e.to_string()))?;
log::debug!("Saving token: {:?}", token);
self.auth_store.set_token(token.to_string()).await;
Ok(token)
}
async fn mark_conversation_as_read(
&mut self,
conversation_id: &ConversationID,
) -> Result<(), Self::Error> {
// SERVER JANK: This should be POST, but it's GET for some reason.
let endpoint = format!("markConversation?guid={}", conversation_id);
self.response_with_body_retry(&endpoint, Method::GET, Body::empty, true)
.await?;
Ok(())
}
async fn get_messages(
&mut self,
conversation_id: &ConversationID,
limit: Option<u32>,
before: Option<MessageID>,
after: Option<MessageID>,
) -> Result<Vec<Message>, Self::Error> {
let mut endpoint = format!("messages?guid={}", conversation_id);
if let Some(limit_val) = limit {
endpoint.push_str(&format!("&limit={}", limit_val));
}
if let Some(before_id) = before {
endpoint.push_str(&format!("&beforeMessageGUID={}", before_id));
}
if let Some(after_id) = after {
endpoint.push_str(&format!("&afterMessageGUID={}", after_id));
}
let messages: Vec<Message> = self.deserialized_response(&endpoint, Method::GET).await?;
Ok(messages)
}
async fn send_message(
&mut self,
outgoing_message: &OutgoingMessage,
) -> Result<Message, Self::Error> {
let message: Message = self
.deserialized_response_with_body("sendMessage", Method::POST, || {
serde_json::to_string(&outgoing_message).unwrap().into()
})
.await?;
Ok(message)
}
async fn fetch_attachment_data(
&mut self,
guid: &str,
preview: bool,
) -> Result<ResponseStream, Self::Error> {
let endpoint = format!("attachment?guid={}&preview={}", guid, preview);
self.response_with_body_retry(&endpoint, Method::GET, Body::empty, true)
.await
.map(hyper::Response::into_body)
.map(ResponseStream::from)
}
async fn upload_attachment<R>(
&mut self,
mut data: tokio::io::BufReader<R>,
filename: &str,
_size: u64,
) -> Result<String, Self::Error>
where
R: tokio::io::AsyncRead + Unpin + Send + Sync + 'static,
{
use tokio::io::AsyncReadExt;
#[derive(Deserialize, Debug)]
struct UploadAttachmentResponse {
#[serde(rename = "fileTransferGUID")]
guid: String,
}
// TODO: We can still use Body::wrap_stream here, but we need to make sure to plumb the CONTENT_LENGTH header,
// otherwise CocoaHTTPServer will crash because of a bug.
//
// See ff03e73758f30c081a9319a8c04025cba69b8393 for what this was like before.
let mut bytes = Vec::new();
data.read_to_end(&mut bytes)
.await
.map_err(|e| Error::ClientError(e.to_string()))?;
let encoded_filename = urlencoding::encode(filename);
let endpoint = format!("uploadAttachment?filename={}", encoded_filename);
let mut bytes_opt = Some(bytes);
let response: UploadAttachmentResponse = self
.deserialized_response_with_body_retry(
&endpoint,
Method::POST,
move || {
Body::from(
bytes_opt
.take()
.expect("Body already consumed during retry"),
)
},
false,
)
.await?;
Ok(response.guid)
}
async fn open_event_socket(
&mut self,
update_seq: Option<u64>,
) -> Result<WebsocketEventSocket, Self::Error> {
use tungstenite::handshake::client::generate_key;
use tungstenite::handshake::client::Request as TungsteniteRequest;
let endpoint = match update_seq {
Some(seq) => format!("updates?seq={}", seq),
None => "updates".to_string(),
};
let uri = self
.uri_for_endpoint(&endpoint, Some(self.websocket_scheme()))?;
log::debug!("Connecting to websocket: {:?}", uri);
let auth = self.auth_store.get_token().await;
let host = uri.authority().unwrap().host();
let mut request = TungsteniteRequest::builder()
.header("Host", host)
.header("Connection", "Upgrade")
.header("Upgrade", "websocket")
.header("Sec-WebSocket-Version", "13")
.header("Sec-WebSocket-Key", generate_key())
.uri(uri.to_string())
.body(())
.expect("Unable to build websocket request");
match &auth {
Some(token) => {
request.headers_mut().insert(
"Authorization",
format!("Bearer: {}", token).parse().unwrap(),
);
}
None => {
log::warn!(target: "websocket", "Proceeding without auth token.");
}
}
log::debug!("Websocket request: {:?}", request);
match connect_async(request).await.map_err(Error::from) {
Ok((socket, response)) => {
log::debug!("Websocket connected: {:?}", response.status());
Ok(WebsocketEventSocket::new(socket))
}
Err(e) => match &e {
Error::ClientError(ce) => match ce.as_str() {
"HTTP error: 401 Unauthorized" | "Unauthorized" => {
// Try to authenticate
if let Some(credentials) = &self.auth_store.get_credentials().await {
log::warn!("Websocket connection failed, attempting to authenticate");
let new_token = self.authenticate(credentials.clone()).await?;
self.auth_store.set_token(new_token.to_string()).await;
// try again on the next attempt.
return Err(Error::Unauthorized);
} else {
log::error!("Websocket unauthorized, no credentials provided");
return Err(Error::ClientError(
"Unauthorized, no credentials provided".into(),
));
}
}
_ => Err(e),
},
_ => Err(e),
},
}
}
}
impl<K: AuthenticationStore + Send + Sync> HTTPAPIClient<K> {
pub fn new(base_url: Uri, auth_store: K) -> HTTPAPIClient<K> {
let https = HttpsConnector::new();
let client = Client::builder().build::<_, Body>(https);
HTTPAPIClient { base_url, auth_store, client }
}
fn uri_for_endpoint(&self, endpoint: &str, scheme: Option<&str>) -> Result<Uri, Error> {
let mut parts = self.base_url.clone().into_parts();
let root_path: PathBuf = parts
.path_and_query
.ok_or(Error::URLError)?
.path()
.into();
let path = root_path.join(endpoint);
let path_str = path.to_str().ok_or(Error::URLError)?;
parts.path_and_query = Some(path_str.parse().map_err(|_| Error::URLError)?);
if let Some(scheme) = scheme {
parts.scheme = Some(scheme.parse().map_err(|_| Error::URLError)?);
}
Uri::try_from(parts).map_err(|_| Error::URLError)
}
fn websocket_scheme(&self) -> &str {
if self.base_url.scheme().unwrap() == "https" {
"wss"
} else {
"ws"
}
}
async fn deserialized_response<T: DeserializeOwned>(
&mut self,
endpoint: &str,
method: Method,
) -> Result<T, Error> {
self.deserialized_response_with_body(endpoint, method, Body::empty)
.await
}
async fn deserialized_response_with_body<T>(
&mut self,
endpoint: &str,
method: Method,
body_fn: impl FnMut() -> Body,
) -> Result<T, Error>
where
T: DeserializeOwned,
{
self.deserialized_response_with_body_retry(endpoint, method, body_fn, true)
.await
}
async fn deserialized_response_with_body_retry<T>(
&mut self,
endpoint: &str,
method: Method,
body_fn: impl FnMut() -> Body,
retry_auth: bool,
) -> Result<T, Error>
where
T: DeserializeOwned,
{
let response = self
.response_with_body_retry(endpoint, method, body_fn, retry_auth)
.await?;
// Read and parse response body
let body = hyper::body::to_bytes(response.into_body()).await?;
let parsed: T = match serde_json::from_slice(&body) {
Ok(result) => Ok(result),
Err(json_err) => {
log::error!("Error deserializing JSON: {:?}", json_err);
log::error!("Body: {:?}", String::from_utf8_lossy(&body));
// If JSON deserialization fails, try to interpret it as plain text
// Unfortunately the server does return things like this...
let s = str::from_utf8(&body).map_err(|e| Error::DecodeError(e.to_string()))?;
serde_plain::from_str(s).map_err(|_| json_err)
}
}?;
Ok(parsed)
}
async fn response_with_body_retry(
&mut self,
endpoint: &str,
method: Method,
mut body_fn: impl FnMut() -> Body,
retry_auth: bool,
) -> Result<hyper::Response<Body>, Error> {
use hyper::StatusCode;
let uri = self.uri_for_endpoint(endpoint, None)?;
log::debug!("Requesting {:?} {:?}", method, uri);
let mut build_request = |auth: &Option<String>| {
let body = body_fn();
Request::builder()
.method(&method)
.uri(uri.clone())
.with_auth_string(auth)
.body(body)
.expect("Unable to build request")
};
log::trace!("Obtaining token from auth store");
let token = self.auth_store.get_token().await;
log::trace!("Token: {:?}", token);
let request = build_request(&token);
log::trace!("Request: {:?}. Sending request...", request);
let mut response = self.client.request(request).await?;
log::debug!("-> Response: {:}", response.status());
match response.status() {
StatusCode::OK => { /* cool */ }
// 401: Unauthorized. Token may have expired or is invalid. Attempt to renew.
StatusCode::UNAUTHORIZED => {
if !retry_auth {
return Err(Error::ClientError("Unauthorized".into()));
}
if let Some(credentials) = &self.auth_store.get_credentials().await {
log::debug!(
"Renewing token using credentials: u: {:?}",
credentials.username
);
let new_token = self.authenticate(credentials.clone()).await?;
let request = build_request(&Some(new_token.to_string()));
response = self.client.request(request).await?;
} else {
return Err(Error::ClientError(
"Unauthorized, no credentials provided".into(),
));
}
}
// Other errors: bubble up.
_ => {
let status = response.status();
let body_str = hyper::body::to_bytes(response.into_body()).await?;
let message = format!(
"Request failed ({:}). Response body: {:?}",
status,
String::from_utf8_lossy(&body_str)
);
return Err(Error::ClientError(message));
}
}
Ok(response)
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::api::InMemoryAuthenticationStore;
#[cfg(test)]
fn local_mock_client() -> HTTPAPIClient<InMemoryAuthenticationStore> {
let base_url = "http://localhost:5738".parse().unwrap();
let credentials = Credentials {
username: "test".to_string(),
password: "test".to_string(),
};
HTTPAPIClient::new(
base_url,
InMemoryAuthenticationStore::new(Some(credentials)),
)
}
#[cfg(test)]
async fn mock_client_is_reachable() -> bool {
let mut client = local_mock_client();
let version = client.get_version().await;
match version {
Ok(_) => true,
Err(e) => {
log::error!("Mock client error: {:?}", e);
false
}
}
}
#[tokio::test]
async fn test_version() {
if !mock_client_is_reachable().await {
log::warn!("Skipping http_client tests (mock server not reachable)");
return;
}
let mut client = local_mock_client();
let version = client.get_version().await.unwrap();
assert!(version.starts_with("KordophoneMock-"));
}
#[tokio::test]
async fn test_conversations() {
if !mock_client_is_reachable().await {
log::warn!("Skipping http_client tests (mock server not reachable)");
return;
}
let mut client = local_mock_client();
let conversations = client.get_conversations().await.unwrap();
assert!(!conversations.is_empty());
}
#[tokio::test]
async fn test_messages() {
if !mock_client_is_reachable().await {
log::warn!("Skipping http_client tests (mock server not reachable)");
return;
}
let mut client = local_mock_client();
let conversations = client.get_conversations().await.unwrap();
let conversation = conversations.first().unwrap();
let messages = client
.get_messages(&conversation.guid, None, None, None)
.await
.unwrap();
assert!(!messages.is_empty());
}
#[tokio::test]
async fn test_updates() {
if !mock_client_is_reachable().await {
log::warn!("Skipping http_client tests (mock server not reachable)");
return;
}
let mut client = local_mock_client();
// We just want to see if the connection is established, we won't wait for any events
let _ = client.open_event_socket(None).await.unwrap();
assert!(true);
}
}

View File

@@ -0,0 +1,78 @@
pub use crate::model::{Conversation, ConversationID, Message, MessageID, OutgoingMessage};
use async_trait::async_trait;
use bytes::Bytes;
use futures_util::Stream;
pub mod auth;
pub use crate::api::auth::{AuthenticationStore, InMemoryAuthenticationStore};
use crate::model::JwtToken;
pub mod http_client;
pub use http_client::HTTPAPIClient;
pub mod event_socket;
pub use event_socket::EventSocket;
use self::http_client::Credentials;
use std::fmt::Debug;
#[async_trait]
pub trait APIInterface {
type Error: Debug;
type ResponseStream: Stream<Item = Result<Bytes, Self::Error>>;
// (GET) /version
async fn get_version(&mut self) -> Result<String, Self::Error>;
// (GET) /conversations
async fn get_conversations(&mut self) -> Result<Vec<Conversation>, Self::Error>;
// (GET) /messages
async fn get_messages(
&mut self,
conversation_id: &ConversationID,
limit: Option<u32>,
before: Option<MessageID>,
after: Option<MessageID>,
) -> Result<Vec<Message>, Self::Error>;
// (POST) /sendMessage
async fn send_message(
&mut self,
outgoing_message: &OutgoingMessage,
) -> Result<Message, Self::Error>;
// (GET) /attachment
async fn fetch_attachment_data(
&mut self,
guid: &str,
preview: bool,
) -> Result<Self::ResponseStream, Self::Error>;
// (POST) /uploadAttachment
async fn upload_attachment<R>(
&mut self,
data: tokio::io::BufReader<R>,
filename: &str,
size: u64,
) -> Result<String, Self::Error>
where
R: tokio::io::AsyncRead + Unpin + Send + Sync + 'static;
// (POST) /authenticate
async fn authenticate(&mut self, credentials: Credentials) -> Result<JwtToken, Self::Error>;
// (GET) /markConversation
async fn mark_conversation_as_read(
&mut self,
conversation_id: &ConversationID,
) -> Result<(), Self::Error>;
// (WS) /updates
async fn open_event_socket(
&mut self,
update_seq: Option<u64>,
) -> Result<impl EventSocket, Self::Error>;
}

View File

@@ -0,0 +1,20 @@
pub mod api;
pub mod model;
pub use self::api::APIInterface;
#[cfg(test)]
pub mod tests;
// Ensure a process-level rustls CryptoProvider is installed for TLS (wss).
// Rustls 0.23 requires an explicit provider installation (e.g., ring or aws-lc).
// We depend on rustls with feature "ring" and install it once at startup.
#[ctor::ctor]
fn install_rustls_crypto_provider() {
// If already installed, this is a no-op. Ignore the result.
#[allow(unused_must_use)]
{
use rustls::crypto::ring;
ring::default_provider().install_default();
}
}

View File

@@ -0,0 +1,112 @@
use serde::Deserialize;
use time::OffsetDateTime;
use uuid::Uuid;
use super::Identifiable;
use crate::model::message::Message;
pub type ConversationID = <Conversation as Identifiable>::ID;
#[derive(Debug, Clone, Deserialize)]
pub struct Conversation {
pub guid: String,
#[serde(with = "time::serde::iso8601")]
pub date: OffsetDateTime,
#[serde(rename = "unreadCount")]
pub unread_count: i32,
#[serde(rename = "lastMessagePreview")]
pub last_message_preview: Option<String>,
#[serde(rename = "participantDisplayNames")]
pub participant_display_names: Vec<String>,
#[serde(rename = "displayName")]
pub display_name: Option<String>,
#[serde(rename = "lastMessage")]
pub last_message: Option<Message>,
}
impl Conversation {
pub fn builder() -> ConversationBuilder {
ConversationBuilder::new()
}
}
impl Identifiable for Conversation {
type ID = String;
fn id(&self) -> &Self::ID {
&self.guid
}
}
#[derive(Default)]
pub struct ConversationBuilder {
guid: Option<String>,
date: Option<OffsetDateTime>,
unread_count: Option<i32>,
last_message_preview: Option<String>,
participant_display_names: Option<Vec<String>>,
display_name: Option<String>,
last_message: Option<Message>,
}
impl ConversationBuilder {
pub fn new() -> Self {
Self::default()
}
pub fn guid(mut self, guid: String) -> Self {
self.guid = Some(guid);
self
}
pub fn date(mut self, date: OffsetDateTime) -> Self {
self.date = Some(date);
self
}
pub fn unread_count(mut self, unread_count: i32) -> Self {
self.unread_count = Some(unread_count);
self
}
pub fn last_message_preview(mut self, last_message_preview: String) -> Self {
self.last_message_preview = Some(last_message_preview);
self
}
pub fn participant_display_names(mut self, participant_display_names: Vec<String>) -> Self {
self.participant_display_names = Some(participant_display_names);
self
}
pub fn display_name<T>(mut self, display_name: T) -> Self
where
T: Into<String>,
{
self.display_name = Some(display_name.into());
self
}
pub fn last_message(mut self, last_message: Message) -> Self {
self.last_message = Some(last_message);
self
}
pub fn build(self) -> Conversation {
Conversation {
guid: self.guid.unwrap_or(Uuid::new_v4().to_string()),
date: self.date.unwrap_or(OffsetDateTime::now_utc()),
unread_count: self.unread_count.unwrap_or(0),
last_message_preview: self.last_message_preview,
participant_display_names: self.participant_display_names.unwrap_or_default(),
display_name: self.display_name,
last_message: self.last_message,
}
}
}

View File

@@ -0,0 +1,39 @@
use crate::model::{Conversation, Message, UpdateItem};
#[derive(Debug, Clone)]
pub struct Event {
pub data: EventData,
pub update_seq: u64,
}
#[derive(Debug, Clone)]
pub enum EventData {
ConversationChanged(Conversation),
MessageReceived(Conversation, Message),
}
impl From<UpdateItem> for Event {
fn from(update: UpdateItem) -> Self {
match update {
UpdateItem {
conversation: Some(conversation),
message: None,
..
} => Event {
data: EventData::ConversationChanged(conversation),
update_seq: update.seq,
},
UpdateItem {
conversation: Some(conversation),
message: Some(message),
..
} => Event {
data: EventData::MessageReceived(conversation, message),
update_seq: update.seq,
},
_ => panic!("Invalid update item: {:?}", update),
}
}
}

View File

@@ -0,0 +1,146 @@
use std::error::Error;
use base64::{
engine::{self, general_purpose},
Engine,
};
use chrono::{DateTime, Utc};
use hyper::http::HeaderValue;
use serde::{Deserialize, Serialize};
#[derive(Deserialize, Serialize, Debug, Clone)]
#[allow(dead_code)]
struct JwtHeader {
alg: String,
typ: String,
}
#[derive(Deserialize, Serialize, Debug, Clone)]
#[allow(dead_code)]
enum ExpValue {
Integer(i64),
String(String),
}
#[derive(Deserialize, Serialize, Debug, Clone)]
#[allow(dead_code)]
struct JwtPayload {
#[serde(deserialize_with = "deserialize_exp")]
exp: i64,
iss: Option<String>,
user: Option<String>,
}
fn deserialize_exp<'de, D>(deserializer: D) -> Result<i64, D::Error>
where
D: serde::Deserializer<'de>,
{
use serde::de::Error;
#[derive(Deserialize)]
#[serde(untagged)]
enum ExpValue {
String(String),
Number(i64),
}
match ExpValue::deserialize(deserializer)? {
ExpValue::String(s) => s.parse().map_err(D::Error::custom),
ExpValue::Number(n) => Ok(n),
}
}
#[derive(Deserialize, Serialize, Debug, Clone)]
#[allow(dead_code)]
pub struct JwtToken {
header: JwtHeader,
payload: JwtPayload,
signature: Vec<u8>,
expiration_date: DateTime<Utc>,
token: String,
}
impl JwtToken {
fn decode_token_using_engine(
token: &str,
engine: engine::GeneralPurpose,
) -> Result<Self, Box<dyn Error + Send + Sync>> {
let mut parts = token.split('.');
let header = parts.next().unwrap();
let payload = parts.next().unwrap();
let signature = parts.next().unwrap();
let header = engine.decode(header)?;
let payload = engine.decode(payload)?;
let signature = engine.decode(signature)?;
// Parse jwt header
let header: JwtHeader = serde_json::from_slice(&header)?;
// Parse jwt payload
let payload: JwtPayload = serde_json::from_slice(&payload)?;
// Parse jwt expiration date
let timestamp = DateTime::from_timestamp(payload.exp, 0)
.unwrap()
.naive_utc();
let expiration_date = DateTime::from_naive_utc_and_offset(timestamp, Utc);
Ok(JwtToken {
header,
payload,
signature,
expiration_date,
token: token.to_string(),
})
}
pub fn new(token: &str) -> Result<Self, Box<dyn Error + Send + Sync>> {
// STUPID: My mock server uses a different encoding than the real server, so we have to
// try both encodings here.
log::debug!("Attempting to decode JWT token: {}", token);
let result = Self::decode_token_using_engine(token, general_purpose::STANDARD).or(
Self::decode_token_using_engine(token, general_purpose::URL_SAFE_NO_PAD),
);
if let Err(ref e) = result {
log::error!("Failed to decode JWT token: {}", e);
log::error!("Token length: {}", token.len());
log::error!("Token parts: {:?}", token.split('.').collect::<Vec<_>>());
}
result
}
pub fn dummy() -> Self {
JwtToken {
header: JwtHeader {
alg: "none".to_string(),
typ: "JWT".to_string(),
},
payload: JwtPayload {
exp: 0,
iss: None,
user: None,
},
signature: vec![],
expiration_date: Utc::now(),
token: "".to_string(),
}
}
pub fn is_valid(&self) -> bool {
self.expiration_date > Utc::now()
}
pub fn to_header_value(&self) -> HeaderValue {
format!("Bearer {}", self.token).parse().unwrap()
}
pub fn to_string(&self) -> String {
self.token.clone()
}
}

View File

@@ -0,0 +1,121 @@
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use time::OffsetDateTime;
use uuid::Uuid;
use super::Identifiable;
pub type MessageID = <Message as Identifiable>::ID;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AttributionInfo {
/// Picture width
#[serde(rename = "pgensw")]
pub width: Option<u32>,
/// Picture height
#[serde(rename = "pgensh")]
pub height: Option<u32>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AttachmentMetadata {
#[serde(rename = "attributionInfo")]
pub attribution_info: Option<AttributionInfo>,
}
#[derive(Debug, Clone, Deserialize)]
pub struct Message {
pub guid: String,
#[serde(rename = "text")]
pub text: String,
#[serde(rename = "sender")]
pub sender: Option<String>,
#[serde(with = "time::serde::iso8601")]
pub date: OffsetDateTime,
/// Array of file transfer GUIDs for attachments
#[serde(rename = "fileTransferGUIDs", default)]
pub file_transfer_guids: Vec<String>,
/// Optional attachment metadata, keyed by attachment GUID
#[serde(rename = "attachmentMetadata")]
pub attachment_metadata: Option<HashMap<String, AttachmentMetadata>>,
}
impl Message {
pub fn builder() -> MessageBuilder {
MessageBuilder::new()
}
}
impl Identifiable for Message {
type ID = String;
fn id(&self) -> &Self::ID {
&self.guid
}
}
#[derive(Default)]
pub struct MessageBuilder {
guid: Option<String>,
text: Option<String>,
sender: Option<String>,
date: Option<OffsetDateTime>,
file_transfer_guids: Option<Vec<String>>,
attachment_metadata: Option<HashMap<String, AttachmentMetadata>>,
}
impl MessageBuilder {
pub fn new() -> Self {
Self::default()
}
pub fn guid(mut self, guid: String) -> Self {
self.guid = Some(guid);
self
}
pub fn text(mut self, text: String) -> Self {
self.text = Some(text);
self
}
pub fn sender(mut self, sender: String) -> Self {
self.sender = Some(sender);
self
}
pub fn date(mut self, date: OffsetDateTime) -> Self {
self.date = Some(date);
self
}
pub fn file_transfer_guids(mut self, file_transfer_guids: Vec<String>) -> Self {
self.file_transfer_guids = Some(file_transfer_guids);
self
}
pub fn attachment_metadata(
mut self,
attachment_metadata: HashMap<String, AttachmentMetadata>,
) -> Self {
self.attachment_metadata = Some(attachment_metadata);
self
}
pub fn build(self) -> Message {
Message {
guid: self.guid.unwrap_or(Uuid::new_v4().to_string()),
text: self.text.unwrap_or("".to_string()),
sender: self.sender,
date: self.date.unwrap_or(OffsetDateTime::now_utc()),
file_transfer_guids: self.file_transfer_guids.unwrap_or_default(),
attachment_metadata: self.attachment_metadata,
}
}
}

View File

@@ -0,0 +1,26 @@
pub mod conversation;
pub mod event;
pub mod message;
pub mod outgoing_message;
pub mod update;
pub use conversation::Conversation;
pub use conversation::ConversationID;
pub use message::Message;
pub use message::MessageID;
pub use outgoing_message::OutgoingMessage;
pub use outgoing_message::OutgoingMessageBuilder;
pub use update::UpdateItem;
pub use event::Event;
pub mod jwt;
pub use jwt::JwtToken;
pub trait Identifiable {
type ID;
fn id(&self) -> &Self::ID;
}

View File

@@ -0,0 +1,72 @@
use super::conversation::ConversationID;
use chrono::NaiveDateTime;
use serde::Serialize;
use uuid::Uuid;
#[derive(Debug, Clone, Serialize)]
pub struct OutgoingMessage {
#[serde(skip)]
pub guid: Uuid,
#[serde(skip)]
pub date: NaiveDateTime,
#[serde(rename = "body")]
pub text: String,
#[serde(rename = "guid")]
pub conversation_id: ConversationID,
#[serde(rename = "fileTransferGUIDs")]
pub file_transfer_guids: Vec<String>,
}
impl OutgoingMessage {
pub fn builder() -> OutgoingMessageBuilder {
OutgoingMessageBuilder::new()
}
}
#[derive(Default)]
pub struct OutgoingMessageBuilder {
guid: Option<Uuid>,
text: Option<String>,
conversation_id: Option<ConversationID>,
file_transfer_guids: Option<Vec<String>>,
}
impl OutgoingMessageBuilder {
pub fn new() -> Self {
Self::default()
}
pub fn guid(mut self, guid: Uuid) -> Self {
self.guid = Some(guid);
self
}
pub fn text(mut self, text: String) -> Self {
self.text = Some(text);
self
}
pub fn conversation_id(mut self, conversation_id: ConversationID) -> Self {
self.conversation_id = Some(conversation_id);
self
}
pub fn file_transfer_guids(mut self, file_transfer_guids: Vec<String>) -> Self {
self.file_transfer_guids = Some(file_transfer_guids);
self
}
pub fn build(self) -> OutgoingMessage {
OutgoingMessage {
guid: self.guid.unwrap_or_else(Uuid::new_v4),
text: self.text.unwrap(),
conversation_id: self.conversation_id.unwrap(),
file_transfer_guids: self.file_transfer_guids.unwrap_or_default(),
date: chrono::Utc::now().naive_utc(),
}
}
}

View File

@@ -0,0 +1,18 @@
use super::conversation::Conversation;
use super::message::Message;
use serde::Deserialize;
#[derive(Debug, Clone, Deserialize, Default)]
pub struct UpdateItem {
#[serde(rename = "messageSequenceNumber")]
pub seq: u64,
#[serde(rename = "conversation")]
pub conversation: Option<Conversation>,
#[serde(rename = "message")]
pub message: Option<Message>,
#[serde(default)]
pub pong: bool,
}

View File

@@ -0,0 +1,31 @@
mod test_client;
use self::test_client::TestClient;
use crate::APIInterface;
pub mod api_interface {
use crate::model::Conversation;
use super::*;
#[tokio::test]
async fn test_version() {
let mut client = TestClient::new();
let version = client.get_version().await.unwrap();
assert_eq!(version, client.version);
}
#[tokio::test]
async fn test_conversations() {
let mut client = TestClient::new();
let test_convo = Conversation::builder()
.display_name("Test Conversation")
.build();
client.conversations.push(test_convo.clone());
let conversations = client.get_conversations().await.unwrap();
assert_eq!(conversations.len(), 1);
assert_eq!(conversations[0].display_name, test_convo.display_name);
}
}

View File

@@ -0,0 +1,158 @@
use async_trait::async_trait;
use std::collections::HashMap;
use time::OffsetDateTime;
use uuid::Uuid;
pub use crate::APIInterface;
use crate::{
api::event_socket::{EventSocket, SinkMessage, SocketEvent, SocketUpdate},
api::http_client::Credentials,
model::{
Conversation, ConversationID, Event, JwtToken, Message, MessageID, OutgoingMessage,
UpdateItem,
},
};
use bytes::Bytes;
use futures_util::stream::BoxStream;
use futures_util::Sink;
use futures_util::StreamExt;
pub struct TestClient {
pub version: &'static str,
pub conversations: Vec<Conversation>,
pub messages: HashMap<ConversationID, Vec<Message>>,
}
#[derive(Debug)]
pub enum TestError {
ConversationNotFound,
}
impl TestClient {
pub fn new() -> TestClient {
TestClient {
version: "KordophoneTest-1.0",
conversations: vec![],
messages: HashMap::<ConversationID, Vec<Message>>::new(),
}
}
}
pub struct TestEventSocket {
pub events: Vec<Event>,
}
impl TestEventSocket {
pub fn new() -> Self {
Self { events: vec![] }
}
}
#[async_trait]
impl EventSocket for TestEventSocket {
type Error = TestError;
type EventStream = BoxStream<'static, Result<SocketEvent, TestError>>;
type UpdateStream = BoxStream<'static, Result<SocketUpdate, TestError>>;
async fn events(
self,
) -> (
Self::EventStream,
impl Sink<SinkMessage, Error = Self::Error>,
) {
(
futures_util::stream::iter(self.events.into_iter().map(Ok)).boxed(),
futures_util::sink::sink(),
)
}
async fn raw_updates(self) -> Self::UpdateStream {
let results: Vec<Result<Vec<UpdateItem>, TestError>> = vec![];
futures_util::stream::iter(results.into_iter()).boxed()
}
}
#[async_trait]
impl APIInterface for TestClient {
type Error = TestError;
type ResponseStream = BoxStream<'static, Result<Bytes, TestError>>;
async fn authenticate(&mut self, _credentials: Credentials) -> Result<JwtToken, Self::Error> {
Ok(JwtToken::dummy())
}
async fn get_version(&mut self) -> Result<String, Self::Error> {
Ok(self.version.to_string())
}
async fn get_conversations(&mut self) -> Result<Vec<Conversation>, Self::Error> {
Ok(self.conversations.clone())
}
async fn get_messages(
&mut self,
conversation_id: &ConversationID,
limit: Option<u32>,
before: Option<MessageID>,
after: Option<MessageID>,
) -> Result<Vec<Message>, Self::Error> {
if let Some(messages) = self.messages.get(conversation_id) {
return Ok(messages.clone());
}
Err(TestError::ConversationNotFound)
}
async fn send_message(
&mut self,
outgoing_message: &OutgoingMessage,
) -> Result<Message, Self::Error> {
let message = Message::builder()
.guid(Uuid::new_v4().to_string())
.text(outgoing_message.text.clone())
.date(OffsetDateTime::now_utc())
.build();
self.messages
.entry(outgoing_message.conversation_id.clone())
.or_insert(vec![])
.push(message.clone());
Ok(message)
}
async fn open_event_socket(
&mut self,
_update_seq: Option<u64>,
) -> Result<impl EventSocket, Self::Error> {
Ok(TestEventSocket::new())
}
async fn fetch_attachment_data(
&mut self,
guid: &str,
preview: bool,
) -> Result<Self::ResponseStream, Self::Error> {
Ok(futures_util::stream::iter(vec![Ok(Bytes::from_static(b"test"))]).boxed())
}
async fn upload_attachment<R>(
&mut self,
data: tokio::io::BufReader<R>,
filename: &str,
size: u64,
) -> Result<String, Self::Error>
where
R: tokio::io::AsyncRead + Unpin + Send + Sync + 'static,
{
Ok(String::from("test"))
}
async fn mark_conversation_as_read(
&mut self,
conversation_id: &ConversationID,
) -> Result<(), Self::Error> {
Ok(())
}
}

View File

@@ -0,0 +1,52 @@
[package]
name = "kordophoned"
version = "1.0.1"
edition = "2021"
license = "GPL-3.0"
description = "Client daemon for the Kordophone chat protocol"
[dependencies]
anyhow = "1.0.98"
async-trait = "0.1.88"
chrono = "0.4.38"
directories = "6.0.0"
env_logger = "0.11.6"
futures-util = "0.3.31"
keyring = { version = "3.6.3", features = ["apple-native", "sync-secret-service"] }
kordophone = { path = "../kordophone" }
kordophone-db = { path = "../kordophone-db" }
log = "0.4.25"
serde_json = "1.0"
thiserror = "2.0.12"
tokio = { version = "1", features = ["full"] }
tokio-condvar = "0.3.0"
uuid = "1.16.0"
once_cell = "1.19.0"
# D-Bus dependencies only on Linux
[target.'cfg(target_os = "linux")'.dependencies]
dbus = { version = "0.9.7", features = ["futures"] }
dbus-crossroads = "0.5.2"
dbus-tokio = "0.7.6"
dbus-tree = "0.9.2"
# D-Bus codegen only on Linux
[target.'cfg(target_os = "linux")'.build-dependencies]
dbus-codegen = "0.10.0"
dbus-crossroads = "0.5.1"
# XPC (libxpc) interface for macOS IPC
[target.'cfg(target_os = "macos")'.dependencies]
block = "0.1.6"
futures = "0.3.31"
xpc-connection = { git = "https://github.com/dfrankland/xpc-connection-rs.git", rev = "cd4fb3d", package = "xpc-connection" }
xpc-connection-sys = { git = "https://github.com/dfrankland/xpc-connection-rs.git", rev = "cd4fb3d", package = "xpc-connection-sys" }
serde = { version = "1.0", features = ["derive"] }
[package.metadata.generate-rpm]
assets = [
{ source = "../target/release/kordophoned", dest = "/usr/libexec/kordophoned", mode = "755" },
{ source = "../target/release/kpcli", dest = "/usr/bin/kpcli", mode = "755" },
{ source = "include/net.buzzert.kordophonecd.service", dest = "/usr/share/dbus-1/services/net.buzzert.kordophonecd.service", mode = "644" },
]

View File

@@ -0,0 +1,37 @@
# kordophoned
This is the client Kordophone daemon. It exposes a dbus interface for accessing the caching layer, handles the update cycle, etc.
# Building RPM
Make sure cargo-generate-rpm is installed, `cargo install cargo-generate-rpm`.
Then:
```
cargo build --release
strip -s target/release/kordophoned
cargo generate-rpm
```
## Running on macOS
Before any client can talk to the kordophone daemon on macOS, the XPC service needs to be manually registered with launchd.
- Register using `launchctl load net.buzzert.kordophonecd.plist`
Plans are to embed this into the app executable, which would then not need to be manually registered (only via the following Swift code):
```swift
try? SMAppService.agent(plistName: "net.buzzert.kordophonecd.plist").register()
```
and the following in Info.plist:
```xml
<key>Label</key><string>net.buzzert.kordophonecd</string>
<key>BundleProgram</key><string>Contents/MacOS/kordophoned</string>
<key>MachServices</key><dict><key>net.buzzert.kordophonecd</key><true/></dict>
<key>KeepAlive</key><true/>
```

29
core/kordophoned/build.rs Normal file
View File

@@ -0,0 +1,29 @@
const KORDOPHONE_XML: &str = "include/net.buzzert.kordophonecd.Server.xml";
#[cfg(not(target_os = "linux"))]
fn main() {
// No D-Bus code generation on non-Linux platforms
}
#[cfg(target_os = "linux")]
fn main() {
// Generate D-Bus code
let out_dir = std::env::var("OUT_DIR").unwrap();
let out_path = std::path::Path::new(&out_dir).join("kordophone-server.rs");
let opts = dbus_codegen::GenOpts {
connectiontype: dbus_codegen::ConnectionType::Nonblock,
methodtype: None, // Set to None for crossroads
crossroads: true,
..Default::default()
};
let xml = std::fs::read_to_string(KORDOPHONE_XML).expect("Error reading server dbus interface");
let output =
dbus_codegen::generate(&xml, &opts).expect("Error generating server dbus interface");
std::fs::write(out_path, output).expect("Error writing server dbus code");
println!("cargo:rerun-if-changed={}", KORDOPHONE_XML);
}

View File

@@ -0,0 +1,190 @@
<!DOCTYPE node PUBLIC "-//freedesktop//DTD D-BUS Object Introspection 1.0//EN"
"http://www.freedesktop.org/standards/dbus/1.0/introspect.dtd">
<node>
<interface name="net.buzzert.kordophone.Repository">
<method name="GetVersion">
<arg type="s" name="version" direction="out" />
<annotation name="org.freedesktop.DBus.DocString"
value="Returns the version of the client daemon."/>
</method>
<!-- Conversations -->
<method name="GetConversations">
<arg type="i" name="limit" direction="in"/>
<arg type="i" name="offset" direction="in"/>
<arg type="aa{sv}" direction="out" name="conversations">
<annotation name="org.freedesktop.DBus.DocString"
value="Array of dictionaries. Each dictionary has keys:
'id' (string): Unique identifier
'display_name' (string): Display name
'last_message_preview' (string): Preview text
'is_unread' (boolean): Unread status
'date' (int64): Date of last message
'participants' (array of strings): List of participants
'unread_count' (int32): Number of unread messages"/>
</arg>
</method>
<method name="SyncConversationList">
<annotation name="org.freedesktop.DBus.DocString"
value="Initiates a background sync of the conversation list with the server."/>
</method>
<method name="SyncAllConversations">
<annotation name="org.freedesktop.DBus.DocString"
value="Initiates a background sync of all conversations with the server."/>
</method>
<method name="SyncConversation">
<arg type="s" name="conversation_id" direction="in"/>
<annotation name="org.freedesktop.DBus.DocString"
value="Initiates a background sync of a single conversation with the server."/>
</method>
<method name="MarkConversationAsRead">
<arg type="s" name="conversation_id" direction="in"/>
<annotation name="org.freedesktop.DBus.DocString"
value="Marks a conversation as read."/>
</method>
<signal name="ConversationsUpdated">
<annotation name="org.freedesktop.DBus.DocString"
value="Emitted when the list of conversations is updated."/>
</signal>
<method name="DeleteAllConversations">
<annotation name="org.freedesktop.DBus.DocString"
value="Deletes all conversations from the database."/>
</method>
<!-- Messages -->
<method name="GetMessages">
<arg type="s" name="conversation_id" direction="in"/>
<arg type="s" name="last_message_id" direction="in"/>
<arg type="aa{sv}" direction="out" name="messages">
<annotation name="org.freedesktop.DBus.DocString"
value="Array of dictionaries. Each dictionary has keys:
'id' (string): Unique message identifier
'text' (string): Message body text
'date' (int64): Message timestamp
'sender' (string): Sender display name
'attachments' (array of dictionaries): List of attachments
'guid' (string): Attachment GUID
'path' (string): Attachment path
'preview_path' (string): Preview attachment path
'downloaded' (boolean): Whether the attachment is downloaded
'preview_downloaded' (boolean): Whether the preview is downloaded
'metadata' (dictionary, optional): Attachment metadata
'attribution_info' (dictionary, optional): Attribution info
'width' (int32, optional): Width
'height' (int32, optional): Height"/>
</arg>
</method>
<method name="SendMessage">
<arg type="s" name="conversation_id" direction="in"/>
<arg type="s" name="text" direction="in"/>
<arg type="as" name="attachment_guids" direction="in"/>
<arg type="s" name="outgoing_message_id" direction="out"/>
<annotation name="org.freedesktop.DBus.DocString"
value="Sends a message to the server. Returns the outgoing message ID.
Arguments:
- conversation_id: The ID of the conversation to send the message to.
- text: The text of the message to send.
- attachment_guids: The GUIDs of the attachments to send.
Returns:
- outgoing_message_id: The ID of the outgoing message.
"/>
</method>
<signal name="MessagesUpdated">
<arg type="s" name="conversation_id" direction="in"/>
<annotation name="org.freedesktop.DBus.DocString"
value="Emitted when the list of messages is updated."/>
</signal>
<signal name="UpdateStreamReconnected">
<annotation name="org.freedesktop.DBus.DocString"
value="Emitted when the update stream is reconnected after a timeout or configuration change."/>
</signal>
<!-- Attachments -->
<method name="GetAttachmentInfo">
<arg type="s" name="attachment_id" direction="in"/>
<arg type="(ssbb)" name="attachment_info" direction="out"/>
<annotation name="org.freedesktop.DBus.DocString"
value="Returns attachment info:
- path: string
- preview_path: string
- downloaded: boolean
- preview_downloaded: boolean
"/>
</method>
<method name="DownloadAttachment">
<arg type="s" name="attachment_id" direction="in"/>
<arg type="b" name="preview" direction="in"/>
<annotation name="org.freedesktop.DBus.DocString"
value="Initiates download of the specified attachment if not already downloaded.
Arguments:
attachment_id: the attachment GUID
preview: whether to download the preview (true) or full attachment (false)
"/>
</method>
<method name="UploadAttachment">
<arg type="s" name="path" direction="in"/>
<arg type="s" name="upload_guid" direction="out"/>
</method>
<signal name="AttachmentDownloadCompleted">
<arg type="s" name="attachment_id"/>
<annotation name="org.freedesktop.DBus.DocString"
value="Emitted when an attachment download completes successfully."/>
</signal>
<signal name="AttachmentDownloadFailed">
<arg type="s" name="attachment_id"/>
<arg type="s" name="error_message"/>
<annotation name="org.freedesktop.DBus.DocString"
value="Emitted when an attachment download fails."/>
</signal>
<signal name="AttachmentUploadCompleted">
<arg type="s" name="upload_guid"/>
<arg type="s" name="attachment_guid"/>
<annotation name="org.freedesktop.DBus.DocString"
value="Emitted when an attachment upload completes successfully.
Returns:
- upload_guid: The GUID of the upload.
- attachment_guid: The GUID of the attachment on the server.
"/>
</signal>
</interface>
<interface name="net.buzzert.kordophone.Settings">
<!-- editable properties -->
<property name="ServerURL" type="s" access="readwrite"/>
<property name="Username" type="s" access="readwrite"/>
<!-- helpers for atomic updates -->
<method name="SetServer">
<arg name="url" type="s" direction="in"/>
<arg name="user" type="s" direction="in"/>
</method>
<!-- emitted when anything changes -->
<signal name="ConfigChanged"/>
</interface>
</node>

View File

@@ -0,0 +1,35 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>net.buzzert.kordophonecd</string>
<key>ProgramArguments</key>
<array>
<string>/Users/buzzert/src/kordophone/kordophone-rs/target/debug/kordophoned</string>
</array>
<key>EnvironmentVariables</key>
<dict>
<key>RUST_LOG</key>
<string>info</string>
</dict>
<key>MachServices</key>
<dict>
<key>net.buzzert.kordophonecd</key>
<true/>
</dict>
<key>RunAtLoad</key>
<true/>
<key>KeepAlive</key>
<true/>
<key>StandardOutPath</key>
<string>/tmp/kordophoned.out.log</string>
<key>StandardErrorPath</key>
<string>/tmp/kordophoned.err.log</string>
</dict>
</plist>

View File

@@ -0,0 +1,4 @@
[D-BUS Service]
Name=net.buzzert.kordophonecd
Exec=/usr/libexec/kordophoned

View File

@@ -0,0 +1,280 @@
use std::{
io::{BufWriter, Write},
path::PathBuf,
};
use anyhow::Result;
use futures_util::StreamExt;
use kordophone::APIInterface;
use thiserror::Error;
use kordophone_db::database::Database;
use crate::daemon::events::Event as DaemonEvent;
use crate::daemon::events::Reply;
use crate::daemon::models::Attachment;
use crate::daemon::Daemon;
use std::sync::Arc;
use tokio::sync::mpsc::{Receiver, Sender};
use tokio::sync::Mutex;
use uuid::Uuid;
mod target {
pub static ATTACHMENTS: &str = "attachments";
}
#[derive(Debug)]
pub enum AttachmentStoreEvent {
// Get the attachment info for a given attachment guid.
// Args: attachment guid, reply channel.
GetAttachmentInfo(String, Reply<Attachment>),
// Queue a download for a given attachment guid.
// Args:
// - attachment guid
// - preview: whether to download the preview (true) or full attachment (false)
QueueDownloadAttachment(String, bool),
// Queue an upload for a given attachment file.
// Args:
// - path: the path to the attachment file
// - reply: a reply channel to send the pending upload guid to
QueueUploadAttachment(PathBuf, Reply<String>),
}
#[derive(Debug, Error)]
enum AttachmentStoreError {
#[error("attachment has already been downloaded")]
AttachmentAlreadyDownloaded,
#[error("temporary file already exists, assuming download is in progress")]
DownloadAlreadyInProgress,
#[error("Client error: {0}")]
APIClientError(String),
}
pub struct AttachmentStore {
store_path: PathBuf,
database: Arc<Mutex<Database>>,
daemon_event_sink: Sender<DaemonEvent>,
event_source: Receiver<AttachmentStoreEvent>,
event_sink: Option<Sender<AttachmentStoreEvent>>,
}
impl AttachmentStore {
pub fn get_default_store_path() -> PathBuf {
let data_dir = Daemon::get_data_dir().expect("Unable to get data path");
data_dir.join("attachments")
}
pub fn new(
database: Arc<Mutex<Database>>,
daemon_event_sink: Sender<DaemonEvent>,
) -> AttachmentStore {
let store_path = Self::get_default_store_path();
log::info!(target: target::ATTACHMENTS, "Attachment store path: {}", store_path.display());
// Create the attachment store if it doesn't exist
std::fs::create_dir_all(&store_path)
.expect("Wasn't able to create the attachment store path");
let (event_sink, event_source) = tokio::sync::mpsc::channel(100);
AttachmentStore {
store_path: store_path,
database: database,
daemon_event_sink: daemon_event_sink,
event_source: event_source,
event_sink: Some(event_sink),
}
}
pub fn get_event_sink(&mut self) -> Sender<AttachmentStoreEvent> {
self.event_sink.take().unwrap()
}
fn get_attachment(&self, guid: &String) -> Attachment {
Self::get_attachment_impl(&self.store_path, guid)
}
pub fn get_attachment_impl(store_path: &PathBuf, guid: &String) -> Attachment {
let base_path = store_path.join(guid);
Attachment {
guid: guid.to_owned(),
base_path: base_path,
metadata: None,
}
}
async fn download_attachment_impl(
store_path: &PathBuf,
database: &mut Arc<Mutex<Database>>,
daemon_event_sink: &Sender<DaemonEvent>,
guid: &String,
preview: bool,
) -> Result<()> {
let attachment = Self::get_attachment_impl(store_path, guid);
if attachment.is_downloaded(preview) {
log::debug!(target: target::ATTACHMENTS, "Attachment already downloaded: {}", attachment.guid);
return Err(AttachmentStoreError::AttachmentAlreadyDownloaded.into());
}
let temporary_path = attachment.get_path_for_preview_scratch(preview, true);
if std::fs::exists(&temporary_path).unwrap_or(false) {
log::warn!(target: target::ATTACHMENTS, "Temporary file already exists: {}, assuming download is in progress", temporary_path.display());
return Err(AttachmentStoreError::DownloadAlreadyInProgress.into());
}
log::debug!(target: target::ATTACHMENTS, "Starting download for attachment: {}", attachment.guid);
let file = std::fs::File::create(&temporary_path)?;
let mut writer = BufWriter::new(&file);
let mut client = Daemon::get_client_impl(database).await?;
let mut stream = client
.fetch_attachment_data(&attachment.guid, preview)
.await
.map_err(|e| AttachmentStoreError::APIClientError(format!("{:?}", e)))?;
log::trace!(target: target::ATTACHMENTS, "Writing attachment {:?} data to temporary file {:?}", &attachment.guid, &temporary_path);
while let Some(Ok(data)) = stream.next().await {
writer.write(data.as_ref())?;
}
// Flush and sync the temporary file before moving
writer.flush()?;
file.sync_all()?;
// Atomically move the temporary file to the final location
std::fs::rename(
&temporary_path,
&attachment.get_path_for_preview_scratch(preview, false),
)?;
log::debug!(target: target::ATTACHMENTS, "Completed download for attachment: {}", attachment.guid);
// Send a signal to the daemon that the attachment has been downloaded.
let event = DaemonEvent::AttachmentDownloaded(attachment.guid.clone());
daemon_event_sink.send(event).await.unwrap();
Ok(())
}
async fn upload_attachment_impl(
store_path: &PathBuf,
incoming_path: &PathBuf,
upload_guid: &String,
database: &mut Arc<Mutex<Database>>,
daemon_event_sink: &Sender<DaemonEvent>,
) -> Result<String> {
use tokio::fs::File;
use tokio::io::BufReader;
// Create uploads directory if it doesn't exist.
let uploads_path = store_path.join("uploads");
std::fs::create_dir_all(&uploads_path).unwrap();
// First, copy the file to the store path, under /uploads/.
log::trace!(target: target::ATTACHMENTS, "Copying attachment to uploads directory: {}", uploads_path.display());
let temporary_path = uploads_path.join(incoming_path.file_name().unwrap());
std::fs::copy(incoming_path, &temporary_path).unwrap();
// Open file handle to the temporary file,
log::trace!(target: target::ATTACHMENTS, "Opening stream to temporary file: {}", temporary_path.display());
let file = File::open(&temporary_path).await?;
let reader: BufReader<File> = BufReader::new(file);
// Upload the file to the server.
let filename = incoming_path.file_name().unwrap().to_str().unwrap();
log::trace!(target: target::ATTACHMENTS, "Uploading attachment to server: {}", &filename);
let mut client = Daemon::get_client_impl(database).await?;
let metadata = std::fs::metadata(&temporary_path)?;
let size = metadata.len();
let guid = client.upload_attachment(reader, filename, size).await?;
// Delete the temporary file.
log::debug!(target: target::ATTACHMENTS, "Upload completed with guid {}, deleting temporary file: {}", guid, temporary_path.display());
std::fs::remove_file(&temporary_path).unwrap();
// Send a signal to the daemon that the attachment has been uploaded.
let event = DaemonEvent::AttachmentUploaded(upload_guid.clone(), guid.clone());
daemon_event_sink.send(event).await.unwrap();
Ok(guid)
}
pub async fn run(&mut self) {
loop {
tokio::select! {
Some(event) = self.event_source.recv() => {
log::debug!(target: target::ATTACHMENTS, "Received attachment store event: {:?}", event);
match event {
AttachmentStoreEvent::QueueDownloadAttachment(guid, preview) => {
let attachment = self.get_attachment(&guid);
if !attachment.is_downloaded(preview) {
let store_path = self.store_path.clone();
let mut database = self.database.clone();
let daemon_event_sink = self.daemon_event_sink.clone();
let _guid = guid.clone();
// Spawn a new task here so we don't block incoming queue events.
tokio::spawn(async move {
let result = Self::download_attachment_impl(
&store_path,
&mut database,
&daemon_event_sink,
&_guid,
preview,
).await;
if let Err(e) = result {
log::error!(target: target::ATTACHMENTS, "Error downloading attachment {}: {}", &_guid, e);
}
});
log::debug!(target: target::ATTACHMENTS, "Queued download for attachment: {}", &guid);
} else {
log::debug!(target: target::ATTACHMENTS, "Attachment already downloaded: {}", guid);
}
}
AttachmentStoreEvent::GetAttachmentInfo(guid, reply) => {
let attachment = self.get_attachment(&guid);
reply.send(attachment).unwrap();
}
AttachmentStoreEvent::QueueUploadAttachment(path, reply) => {
let upload_guid = Uuid::new_v4().to_string();
let store_path = self.store_path.clone();
let mut database = self.database.clone();
let daemon_event_sink = self.daemon_event_sink.clone();
let _upload_guid = upload_guid.clone();
tokio::spawn(async move {
let result = Self::upload_attachment_impl(
&store_path,
&path,
&_upload_guid,
&mut database,
&daemon_event_sink,
).await;
if let Err(e) = result {
log::error!(target: target::ATTACHMENTS, "Error uploading attachment {}: {}", &_upload_guid, e);
}
});
reply.send(upload_guid).unwrap();
}
}
}
}
}
}
}

View File

@@ -0,0 +1,87 @@
use crate::daemon::SettingsKey;
use keyring::{Entry, Result};
use std::sync::Arc;
use tokio::sync::Mutex;
use kordophone::api::{http_client::Credentials, AuthenticationStore};
use kordophone_db::database::{Database, DatabaseAccess};
use async_trait::async_trait;
pub struct DatabaseAuthenticationStore {
database: Arc<Mutex<Database>>,
}
impl DatabaseAuthenticationStore {
pub fn new(database: Arc<Mutex<Database>>) -> Self {
Self { database }
}
}
#[async_trait]
impl AuthenticationStore for DatabaseAuthenticationStore {
async fn get_credentials(&mut self) -> Option<Credentials> {
self.database
.lock()
.await
.with_settings(|settings| {
let username: Option<String> = settings
.get::<String>(SettingsKey::USERNAME)
.unwrap_or_else(|e| {
log::warn!("error getting username from database: {}", e);
None
});
match username {
Some(username) => {
let credential_res = Entry::new("net.buzzert.kordophonecd", &username);
let password: Result<String> = match credential_res {
Ok(credential) => credential.get_password(),
Err(e) => {
log::error!("error creating keyring credential: {}", e);
return None;
}
};
match password {
Ok(password) => Some(Credentials { username, password }),
Err(e) => {
log::error!("error getting password from keyring: {}", e);
None
}
}
}
None => None,
}
})
.await
}
async fn get_token(&mut self) -> Option<String> {
self.database
.lock()
.await
.with_settings(
|settings| match settings.get::<String>(SettingsKey::TOKEN) {
Ok(token) => token,
Err(e) => {
log::warn!("Failed to get token from settings: {}", e);
None
}
},
)
.await
}
async fn set_token(&mut self, token: String) {
self.database
.lock()
.await
.with_settings(|settings| settings.put(SettingsKey::TOKEN, &token))
.await
.unwrap_or_else(|e| {
log::error!("Failed to set token: {}", e);
});
}
}

View File

@@ -0,0 +1,294 @@
use super::ContactResolverBackend;
use dbus::arg::{RefArg, Variant};
use dbus::blocking::Connection;
use once_cell::sync::OnceCell;
use std::collections::HashMap;
use std::sync::Mutex;
use std::time::Duration;
#[derive(Clone)]
pub struct EDSContactResolverBackend;
// Cache the UID of the default local address book so we do not have to scan
// all sources over and over again. Discovering the address book requires a
// D-Bus round-trip that we would rather avoid on every lookup.
static ADDRESS_BOOK_SOURCE_UID: OnceCell<String> = OnceCell::new();
/// Holds a D-Bus connection and the identifiers needed to create an address-book proxy.
struct AddressBookHandle {
connection: Connection,
object_path: String,
bus_name: String,
}
impl AddressBookHandle {
fn new() -> anyhow::Result<Self> {
let connection = new_session_connection()?;
let source_uid = ensure_address_book_uid(&connection)?;
let (object_path, bus_name) = open_address_book(&connection, &source_uid)?;
Ok(Self {
connection,
object_path,
bus_name,
})
}
}
/// Obtain the global address-book handle, initialising it on the first call.
static ADDRESS_BOOK_HANDLE: OnceCell<Mutex<AddressBookHandle>> = OnceCell::new();
/// Check whether a given well-known name currently has an owner on the bus.
fn name_has_owner(conn: &Connection, name: &str) -> bool {
let proxy = conn.with_proxy(
"org.freedesktop.DBus",
"/org/freedesktop/DBus",
Duration::from_secs(2),
);
let result: Result<(bool,), _> =
proxy.method_call("org.freedesktop.DBus", "NameHasOwner", (name.to_string(),));
result.map(|(b,)| b).unwrap_or(false)
}
/// Returns a fresh handle, ensuring the cached one is still valid. If the backend owning the
/// address-book disappeared, the cache is cleared and we try to create a new handle.
fn obtain_handle() -> Option<std::sync::MutexGuard<'static, AddressBookHandle>> {
// Initialize cell if necessary.
let cell = ADDRESS_BOOK_HANDLE
.get_or_try_init(|| AddressBookHandle::new().map(Mutex::new))
.ok()?;
// Validate existing handle.
{
let mut guard = cell.lock().ok()?;
if !name_has_owner(&guard.connection, &guard.bus_name) {
// Try to refresh the handle in-place.
match AddressBookHandle::new() {
Ok(new_h) => {
*guard = new_h;
}
Err(e) => {
log::debug!("EDS resolver: failed to refresh address book handle: {}", e);
// keep the stale handle but report failure
return None;
}
}
}
// Return guard after ensuring validity.
return Some(guard);
}
}
/// Helper that returns a blocking D-Bus session connection. Creating the
/// connection is cheap (<1 ms) but we still keep it around because the
/// underlying socket is re-used by the dbus crate.
fn new_session_connection() -> Result<Connection, dbus::Error> {
Connection::new_session()
}
/// Scan Evolution-Data-Server sources to find a suitable address-book source
/// UID. The implementation mirrors what `gdbus introspect` reveals for the
/// EDS interfaces. We search all `org.gnome.evolution.dataserver.Source`
/// objects and pick the first one that advertises the `[Address Book]` section
/// with a `BackendName=` entry in its INI-style `Data` property.
fn ensure_address_book_uid(conn: &Connection) -> anyhow::Result<String> {
if let Some(uid) = ADDRESS_BOOK_SOURCE_UID.get() {
return Ok(uid.clone());
}
let source_manager_proxy = conn.with_proxy(
"org.gnome.evolution.dataserver.Sources5",
"/org/gnome/evolution/dataserver/SourceManager",
Duration::from_secs(5),
);
// The GetManagedObjects reply is the usual ObjectManager map.
let (managed_objects,): (
HashMap<dbus::Path<'static>, HashMap<String, HashMap<String, Variant<Box<dyn RefArg>>>>>,
) = source_manager_proxy.method_call(
"org.freedesktop.DBus.ObjectManager",
"GetManagedObjects",
(),
)?;
let uid = managed_objects
.values()
.filter_map(|ifaces| ifaces.get("org.gnome.evolution.dataserver.Source"))
.filter_map(|props| {
let uid = props.get("UID")?.as_str()?;
if uid == "system-address-book" {
// Decoy.
return None;
}
let data = props.get("Data")?.as_str()?;
if data_contains_address_book_backend(data) {
Some(uid.to_owned())
} else {
None
}
})
.next()
.ok_or_else(|| anyhow::anyhow!("No address book source found"))?;
// Remember for future look-ups.
log::debug!("EDS resolver: found address book source UID: {}", uid);
let _ = ADDRESS_BOOK_SOURCE_UID.set(uid.clone());
Ok(uid)
}
fn data_contains_address_book_backend(data: &str) -> bool {
let mut in_address_book_section = false;
for line in data.lines() {
let trimmed = line.trim();
if trimmed.starts_with('[') && trimmed.ends_with(']') {
in_address_book_section = trimmed == "[Address Book]";
continue;
}
if in_address_book_section && trimmed.starts_with("BackendName=") {
return true;
}
}
false
}
/// Open the Evolution address book referenced by `source_uid` and return the
/// pair `(object_path, bus_name)` that identifies the newly created D-Bus
/// proxy.
fn open_address_book(conn: &Connection, source_uid: &str) -> anyhow::Result<(String, String)> {
let factory_proxy = conn.with_proxy(
"org.gnome.evolution.dataserver.AddressBook10",
"/org/gnome/evolution/dataserver/AddressBookFactory",
Duration::from_secs(60),
);
let (object_path, bus_name): (String, String) = factory_proxy.method_call(
"org.gnome.evolution.dataserver.AddressBookFactory",
"OpenAddressBook",
(source_uid.to_owned(),),
)?;
Ok((object_path, bus_name))
}
/// Ensure that the backend for the given address-book proxy is opened.
/// Evolution-Data-Server returns "Backend is not opened yet" until someone
/// calls the `Open` method once per process. We ignore any error here
/// because the backend might already be open.
fn ensure_address_book_open(proxy: &dbus::blocking::Proxy<&Connection>) {
let _: Result<(), _> =
proxy.method_call("org.gnome.evolution.dataserver.AddressBook", "Open", ());
}
impl ContactResolverBackend for EDSContactResolverBackend {
type ContactID = String;
fn resolve_contact_id(&self, address: &str) -> Option<Self::ContactID> {
let handle = match obtain_handle() {
Some(h) => h,
None => return None,
};
let address_book_proxy = handle.connection.with_proxy(
&handle.bus_name,
&handle.object_path,
Duration::from_secs(60),
);
ensure_address_book_open(&address_book_proxy);
let filter = if address.contains('@') {
format!("(is \"email\" \"{}\")", address)
} else {
let mut filters: Vec<String> = Vec::new();
filters.push(format!("(is \"phone\" \"{}\")", address));
let normalized_address = address
.chars()
.filter(|c| c.is_numeric())
.collect::<String>();
filters.push(format!("(is \"phone\" \"{}\")", normalized_address));
let local_address = address
.replace('+', "")
.chars()
.skip_while(|c| c.is_numeric() || *c == '(' || *c == ')')
.collect::<String>()
.chars()
.filter(|c| c.is_numeric())
.collect::<String>();
if !local_address.is_empty() {
filters.push(format!("(is \"phone\" \"{}\")", local_address));
}
format!("(or {})", filters.join(" "))
};
log::trace!(
"EDS resolver: GetContactListUids filter: {}, address: {}",
filter,
address
);
let uids_result: Result<(Vec<String>,), _> = address_book_proxy.method_call(
"org.gnome.evolution.dataserver.AddressBook",
"GetContactListUids",
(filter,),
);
let (uids,) = match uids_result {
Ok(v) => v,
Err(e) => {
log::debug!("EDS resolver: GetContactListUids failed: {}", e);
return None;
}
};
uids.into_iter().next()
}
fn get_contact_display_name(&self, contact_id: &Self::ContactID) -> Option<String> {
let handle = match obtain_handle() {
Some(h) => h,
None => return None,
};
let address_book_proxy = handle.connection.with_proxy(
&handle.bus_name,
&handle.object_path,
Duration::from_secs(60),
);
ensure_address_book_open(&address_book_proxy);
let vcard_result: Result<(String,), _> = address_book_proxy.method_call(
"org.gnome.evolution.dataserver.AddressBook",
"GetContact",
(contact_id.clone(),),
);
let (vcard,) = match vcard_result {
Ok(v) => v,
Err(e) => {
log::debug!("EDS resolver: GetContact failed: {}", e);
return None;
}
};
for line in vcard.lines() {
if let Some(rest) = line.strip_prefix("FN:") {
return Some(rest.to_string());
}
}
None
}
}
impl Default for EDSContactResolverBackend {
fn default() -> Self {
Self
}
}

View File

@@ -0,0 +1,16 @@
use super::ContactResolverBackend;
#[derive(Clone, Default)]
pub struct GenericContactResolverBackend;
impl ContactResolverBackend for GenericContactResolverBackend {
type ContactID = String;
fn resolve_contact_id(&self, address: &str) -> Option<Self::ContactID> {
None
}
fn get_contact_display_name(&self, contact_id: &Self::ContactID) -> Option<String> {
None
}
}

View File

@@ -0,0 +1,107 @@
#[cfg(target_os = "linux")]
pub mod eds;
pub mod generic;
// Convenient alias for the platform's default backend
#[cfg(target_os = "linux")]
pub type DefaultContactResolverBackend = eds::EDSContactResolverBackend;
#[cfg(not(target_os = "linux"))]
pub type DefaultContactResolverBackend = generic::GenericContactResolverBackend;
#[cfg(not(target_os = "linux"))]
#[derive(Clone)]
pub struct EDSContactResolverBackend;
#[cfg(not(target_os = "linux"))]
impl Default for EDSContactResolverBackend {
fn default() -> Self {
EDSContactResolverBackend
}
}
#[cfg(not(target_os = "linux"))]
impl ContactResolverBackend for EDSContactResolverBackend {
type ContactID = String;
fn resolve_contact_id(&self, _address: &str) -> Option<Self::ContactID> {
None
}
fn get_contact_display_name(&self, _contact_id: &Self::ContactID) -> Option<String> {
None
}
}
use std::collections::HashMap;
pub trait ContactResolverBackend {
type ContactID;
fn resolve_contact_id(&self, address: &str) -> Option<Self::ContactID>;
fn get_contact_display_name(&self, contact_id: &Self::ContactID) -> Option<String>;
}
pub type AnyContactID = String;
#[derive(Clone)]
pub struct ContactResolver<T: ContactResolverBackend> {
backend: T,
display_name_cache: HashMap<AnyContactID, String>,
contact_id_cache: HashMap<String, AnyContactID>,
}
impl<T: ContactResolverBackend> ContactResolver<T>
where
T::ContactID: From<AnyContactID>,
T::ContactID: Into<AnyContactID>,
T: Default,
{
pub fn new(backend: T) -> Self {
Self {
backend,
display_name_cache: HashMap::new(),
contact_id_cache: HashMap::new(),
}
}
pub fn resolve_contact_id(&mut self, address: &str) -> Option<AnyContactID> {
if let Some(id) = self.contact_id_cache.get(address) {
return Some(id.clone());
}
let id = self.backend.resolve_contact_id(address).map(|id| id.into());
if let Some(ref id) = id {
self.contact_id_cache
.insert(address.to_string(), id.clone());
}
id
}
pub fn get_contact_display_name(&mut self, contact_id: &AnyContactID) -> Option<String> {
if let Some(display_name) = self.display_name_cache.get(contact_id) {
return Some(display_name.clone());
}
let backend_contact_id: T::ContactID = T::ContactID::from((*contact_id).clone());
let display_name = self.backend.get_contact_display_name(&backend_contact_id);
if let Some(ref display_name) = display_name {
self.display_name_cache
.insert(contact_id.to_string(), display_name.clone());
}
display_name
}
}
impl<T: ContactResolverBackend> Default for ContactResolver<T>
where
T::ContactID: From<AnyContactID>,
T::ContactID: Into<AnyContactID>,
T: Default,
{
fn default() -> Self {
Self::new(T::default())
}
}

View File

@@ -0,0 +1,103 @@
use tokio::sync::oneshot;
use uuid::Uuid;
use kordophone::model::ConversationID;
use kordophone::model::OutgoingMessage;
use kordophone_db::models::Conversation;
use crate::daemon::settings::Settings;
use crate::daemon::{Attachment, Message};
pub type Reply<T> = oneshot::Sender<T>;
use std::path::PathBuf;
#[derive(Debug)]
pub enum Event {
/// Get the version of the daemon.
GetVersion(Reply<String>),
/// Asynchronous event for syncing the conversation list with the server.
SyncConversationList(Reply<()>),
/// Asynchronous event for syncing all conversations with the server.
SyncAllConversations(Reply<()>),
/// Asynchronous event for syncing a single conversation with the server.
SyncConversation(String, Reply<()>),
/// Asynchronous event for marking a conversation as read.
MarkConversationAsRead(String, Reply<()>),
/// Asynchronous event for updating the metadata for a conversation.
UpdateConversationMetadata(Conversation, Reply<()>),
/// Sent when the update stream is reconnected after a timeout or configuration change.
UpdateStreamReconnected,
/// Returns all known conversations from the database.
/// Parameters:
/// - limit: The maximum number of conversations to return. (-1 for no limit)
/// - offset: The offset into the conversation list to start returning conversations from.
GetAllConversations(i32, i32, Reply<Vec<Conversation>>),
/// Returns all known settings from the database.
GetAllSettings(Reply<Settings>),
/// Update settings in the database.
UpdateSettings(Settings, Reply<()>),
/// Returns all messages for a conversation from the database.
/// Parameters:
/// - conversation_id: The ID of the conversation to get messages for.
/// - last_message_id: (optional) The ID of the last message to get. If None, all messages are returned.
GetMessages(String, Option<String>, Reply<Vec<Message>>),
/// Enqueues a message to be sent to the server.
/// Parameters:
/// - conversation_id: The ID of the conversation to send the message to.
/// - text: The text of the message to send.
/// - attachment_guids: The GUIDs of the attachments to send.
/// - reply: The outgoing message ID (not the server-assigned message ID).
SendMessage(String, String, Vec<String>, Reply<Uuid>),
/// Notifies the daemon that a message has been sent.
/// Parameters:
/// - message: The message that was sent.
/// - outgoing_message: The outgoing message that was sent.
/// - conversation_id: The ID of the conversation that the message was sent to.
MessageSent(Message, OutgoingMessage, ConversationID),
/// Gets an attachment object from the attachment store.
/// Parameters:
/// - guid: The attachment guid
/// - reply: Reply of the attachment object, if known.
GetAttachment(String, Reply<Attachment>),
/// Downloads an attachment from the server.
/// Parameters:
/// - attachment_id: The attachment ID to download
/// - preview: Whether to download the preview (true) or full attachment (false)
/// - reply: Reply indicating success or failure
DownloadAttachment(String, bool, Reply<()>),
/// Delete all conversations from the database.
DeleteAllConversations(Reply<()>),
/// Notifies the daemon that an attachment has been downloaded.
/// Parameters:
/// - attachment_id: The attachment ID that was downloaded.
AttachmentDownloaded(String),
/// Upload an attachment to the server.
/// Parameters:
/// - path: The path to the attachment file
/// - reply: Reply indicating the upload GUID
UploadAttachment(PathBuf, Reply<String>),
/// Notifies the daemon that an attachment has been uploaded.
/// Parameters:
/// - upload_id: The upload ID that was uploaded.
/// - attachment_id: The attachment ID that was uploaded.
AttachmentUploaded(String, String),
}

View File

@@ -0,0 +1,745 @@
pub mod settings;
use settings::keys as SettingsKey;
use settings::Settings;
pub mod events;
use events::*;
pub mod signals;
use signals::*;
use anyhow::Result;
use directories::ProjectDirs;
use std::collections::HashMap;
use std::error::Error;
use std::path::PathBuf;
use std::sync::Arc;
use thiserror::Error;
use tokio::sync::mpsc::{Receiver, Sender};
use tokio::sync::Mutex;
use uuid::Uuid;
use kordophone_db::{
database::{Database, DatabaseAccess},
models::Conversation,
};
use kordophone::api::http_client::HTTPAPIClient;
use kordophone::api::APIInterface;
use kordophone::model::outgoing_message::OutgoingMessage;
use kordophone::model::{ConversationID, MessageID};
mod update_monitor;
use update_monitor::{UpdateMonitor, UpdateMonitorCommand};
mod auth_store;
use auth_store::DatabaseAuthenticationStore;
mod post_office;
use post_office::Event as PostOfficeEvent;
use post_office::PostOffice;
mod models;
pub use models::Attachment;
pub use models::Message;
mod attachment_store;
pub use attachment_store::AttachmentStore;
pub use attachment_store::AttachmentStoreEvent;
pub mod contact_resolver;
use contact_resolver::ContactResolver;
use contact_resolver::DefaultContactResolverBackend;
use kordophone_db::models::participant::Participant as DbParticipant;
#[derive(Debug, Error)]
pub enum DaemonError {
#[error("Client Not Configured")]
ClientNotConfigured,
}
pub type DaemonResult<T> = Result<T, Box<dyn Error + Send + Sync>>;
pub mod target {
pub static SYNC: &str = "sync";
pub static EVENT: &str = "event";
pub static SETTINGS: &str = "settings";
pub static UPDATES: &str = "updates";
pub static ATTACHMENTS: &str = "attachments";
pub static DAEMON: &str = "daemon";
}
pub struct Daemon {
pub event_sender: Sender<Event>,
event_receiver: Receiver<Event>,
signal_receiver: Option<Receiver<Signal>>,
signal_sender: Sender<Signal>,
post_office_sink: Sender<PostOfficeEvent>,
post_office_source: Option<Receiver<PostOfficeEvent>>,
outgoing_messages: HashMap<ConversationID, Vec<OutgoingMessage>>,
attachment_store_sink: Option<Sender<AttachmentStoreEvent>>,
update_monitor_command_tx: Option<Sender<UpdateMonitorCommand>>,
version: String,
database: Arc<Mutex<Database>>,
runtime: tokio::runtime::Runtime,
}
impl Daemon {
pub fn new() -> Result<Self> {
let database_path = Self::get_database_path();
log::info!("Database path: {}", database_path.display());
// Create the database directory if it doesn't exist
let database_dir = database_path.parent().unwrap();
std::fs::create_dir_all(database_dir)?;
// Create event channels
let (event_sender, event_receiver) = tokio::sync::mpsc::channel(100);
let (signal_sender, signal_receiver) = tokio::sync::mpsc::channel(100);
let (post_office_sink, post_office_source) = tokio::sync::mpsc::channel(100);
// Create background task runtime
let runtime = tokio::runtime::Builder::new_multi_thread()
.enable_all()
.build()
.unwrap();
let database_impl = Database::new(&database_path.to_string_lossy())?;
let database = Arc::new(Mutex::new(database_impl));
Ok(Self {
version: env!("CARGO_PKG_VERSION").to_string(),
database,
event_receiver,
event_sender,
signal_receiver: Some(signal_receiver),
signal_sender,
post_office_sink,
post_office_source: Some(post_office_source),
outgoing_messages: HashMap::new(),
attachment_store_sink: None,
update_monitor_command_tx: None,
runtime,
})
}
pub async fn run(&mut self) {
log::info!("Starting daemon version {}", self.version);
log::debug!("Debug logging enabled.");
// Update monitor
let mut update_monitor =
UpdateMonitor::new(self.database.clone(), self.event_sender.clone());
self.update_monitor_command_tx = Some(update_monitor.take_command_channel());
tokio::spawn(async move {
update_monitor.run().await; // should run indefinitely
});
// Post office
{
let mut database = self.database.clone();
let event_sender = self.event_sender.clone();
let post_office_source = self.post_office_source.take().unwrap();
tokio::spawn(async move {
let mut post_office =
PostOffice::new(post_office_source, event_sender, async move || {
Self::get_client_impl(&mut database).await
});
post_office.run().await;
});
}
// Attachment store
let mut attachment_store =
AttachmentStore::new(self.database.clone(), self.event_sender.clone());
self.attachment_store_sink = Some(attachment_store.get_event_sink());
tokio::spawn(async move {
attachment_store.run().await;
});
while let Some(event) = self.event_receiver.recv().await {
log::debug!(target: target::EVENT, "Received event: {:?}", event);
self.handle_event(event).await;
}
}
fn spawn_conversation_list_sync(&mut self) {
let mut db_clone = self.database.clone();
let signal_sender = self.signal_sender.clone();
self.runtime.spawn(async move {
let result = Self::sync_conversation_list(&mut db_clone, &signal_sender).await;
if let Err(e) = result {
log::error!(target: target::SYNC, "Error handling sync event: {}", e);
}
});
}
async fn handle_event(&mut self, event: Event) {
match event {
Event::GetVersion(reply) => {
reply.send(self.version.clone()).unwrap();
}
Event::SyncConversationList(reply) => {
self.spawn_conversation_list_sync();
// This is a background operation, so return right away.
reply.send(()).unwrap();
}
Event::SyncAllConversations(reply) => {
let mut db_clone = self.database.clone();
let signal_sender = self.signal_sender.clone();
self.runtime.spawn(async move {
let result =
Self::sync_all_conversations_impl(&mut db_clone, &signal_sender).await;
if let Err(e) = result {
log::error!(target: target::SYNC, "Error handling sync event: {}", e);
}
});
// This is a background operation, so return right away.
reply.send(()).unwrap();
}
Event::SyncConversation(conversation_id, reply) => {
let mut db_clone = self.database.clone();
let signal_sender = self.signal_sender.clone();
self.runtime.spawn(async move {
let result = Self::sync_conversation_impl(
&mut db_clone,
&signal_sender,
conversation_id,
)
.await;
if let Err(e) = result {
log::error!(target: target::SYNC, "Error handling sync event: {}", e);
}
});
reply.send(()).unwrap();
}
Event::MarkConversationAsRead(conversation_id, reply) => {
let mut db_clone = self.database.clone();
self.runtime.spawn(async move {
let result = Self::mark_conversation_as_read_impl(&mut db_clone, conversation_id).await;
if let Err(e) = result {
log::error!(target: target::DAEMON, "Error handling mark conversation as read event: {}", e);
}
});
reply.send(()).unwrap();
}
Event::UpdateConversationMetadata(conversation, reply) => {
let mut db_clone = self.database.clone();
let signal_sender = self.signal_sender.clone();
self.runtime.spawn(async move {
let result = Self::update_conversation_metadata_impl(&mut db_clone, conversation, &signal_sender).await;
if let Err(e) = result {
log::error!(target: target::DAEMON, "Error handling update conversation metadata event: {}", e);
}
});
reply.send(()).unwrap();
}
Event::UpdateStreamReconnected => {
log::info!(target: target::UPDATES, "Update stream reconnected");
// The ui client will respond differently, but we'll almost certainly want to do a sync-list in response to this.
self.spawn_conversation_list_sync();
// Send signal to the client that the update stream has been reconnected.
self.signal_sender
.send(Signal::UpdateStreamReconnected)
.await
.unwrap();
}
Event::GetAllConversations(limit, offset, reply) => {
let conversations = self.get_conversations_limit_offset(limit, offset).await;
reply.send(conversations).unwrap();
}
Event::GetAllSettings(reply) => {
let settings = self.get_settings().await.unwrap_or_else(|e| {
log::error!(target: target::SETTINGS, "Failed to get settings: {:#?}", e);
Settings::default()
});
reply.send(settings).unwrap();
}
Event::UpdateSettings(settings, reply) => {
let previous_settings = self.get_settings().await.unwrap_or_default();
let previous_server_url = previous_settings.server_url;
self.update_settings(&settings).await.unwrap_or_else(|e| {
log::error!(target: target::SETTINGS, "Failed to update settings: {}", e);
});
// Only trigger re-sync if both URLs are Some and different, or if one is Some and other is None
if previous_server_url.as_deref() != settings.server_url.as_deref() {
// If the server url has changed, we'll need to do a full re-sync.
self.delete_all_conversations().await.unwrap_or_else(|e| {
log::error!(target: target::SYNC, "Failed to delete all conversations: {}", e);
});
// Do a sync-list to get the new conversations.
self.spawn_conversation_list_sync();
// Also restart the update monitor.
if let Err(e) = self
.update_monitor_command_tx
.as_ref()
.unwrap()
.try_send(UpdateMonitorCommand::Restart)
{
log::warn!(target: target::UPDATES, "Failed to send restart command to update monitor: {}", e);
}
}
reply.send(()).unwrap();
}
Event::GetMessages(conversation_id, last_message_id, reply) => {
let messages = self.get_messages(conversation_id, last_message_id).await;
reply.send(messages).unwrap();
}
Event::DeleteAllConversations(reply) => {
self.delete_all_conversations().await.unwrap_or_else(|e| {
log::error!(target: target::SYNC, "Failed to delete all conversations: {}", e);
});
reply.send(()).unwrap();
}
Event::SendMessage(conversation_id, text, attachment_guids, reply) => {
let conversation_id = conversation_id.clone();
let uuid = self
.enqueue_outgoing_message(text, conversation_id.clone(), attachment_guids)
.await;
reply.send(uuid).unwrap();
// Send message updated signal, we have a placeholder message we will return.
self.signal_sender
.send(Signal::MessagesUpdated(conversation_id.clone()))
.await
.unwrap();
}
Event::MessageSent(message, outgoing_message, conversation_id) => {
log::info!(target: target::EVENT, "Daemon: message sent: {}", message.id);
// Insert the message into the database.
log::debug!(target: target::EVENT, "inserting sent message into database: {}", message.id);
self.database
.lock()
.await
.with_repository(|r| r.insert_message(&conversation_id, message.into()))
.await
.unwrap();
// Remove from outgoing messages.
log::debug!(target: target::EVENT, "Removing message from outgoing messages: {}", outgoing_message.guid);
self.outgoing_messages
.get_mut(&conversation_id)
.map(|messages| messages.retain(|m| m.guid != outgoing_message.guid));
// Send message updated signal.
self.signal_sender
.send(Signal::MessagesUpdated(conversation_id))
.await
.unwrap();
}
Event::GetAttachment(guid, reply) => {
self.attachment_store_sink
.as_ref()
.unwrap()
.send(AttachmentStoreEvent::GetAttachmentInfo(guid, reply))
.await
.unwrap();
}
Event::DownloadAttachment(attachment_id, preview, reply) => {
log::debug!(target: target::ATTACHMENTS, "Download requested for attachment: {}, preview: {}", &attachment_id, preview);
self.attachment_store_sink
.as_ref()
.unwrap()
.send(AttachmentStoreEvent::QueueDownloadAttachment(
attachment_id,
preview,
))
.await
.unwrap();
reply.send(()).unwrap();
}
Event::AttachmentDownloaded(attachment_id) => {
log::debug!(target: target::ATTACHMENTS, "Daemon: attachment downloaded: {}, sending signal", attachment_id);
// Send signal to the client that the attachment has been downloaded.
self.signal_sender
.send(Signal::AttachmentDownloaded(attachment_id))
.await
.unwrap();
}
Event::UploadAttachment(path, reply) => {
self.attachment_store_sink
.as_ref()
.unwrap()
.send(AttachmentStoreEvent::QueueUploadAttachment(path, reply))
.await
.unwrap();
}
Event::AttachmentUploaded(upload_guid, attachment_guid) => {
log::info!(target: target::ATTACHMENTS, "Daemon: attachment uploaded: {}, {}", upload_guid, attachment_guid);
self.signal_sender
.send(Signal::AttachmentUploaded(upload_guid, attachment_guid))
.await
.unwrap();
}
}
}
/// Panics if the signal receiver has already been taken.
pub fn obtain_signal_receiver(&mut self) -> Receiver<Signal> {
self.signal_receiver.take().unwrap()
}
async fn get_conversations_limit_offset(
&mut self,
limit: i32,
offset: i32,
) -> Vec<Conversation> {
self.database
.lock()
.await
.with_repository(|r| r.all_conversations(limit, offset).unwrap())
.await
}
async fn get_messages(
&mut self,
conversation_id: String,
_last_message_id: Option<MessageID>,
) -> Vec<Message> {
// Get outgoing messages for this conversation.
let empty_vec: Vec<OutgoingMessage> = vec![];
let outgoing_messages: &Vec<OutgoingMessage> = self
.outgoing_messages
.get(&conversation_id)
.unwrap_or(&empty_vec);
self.database
.lock()
.await
.with_repository(|r| {
r.get_messages_for_conversation(&conversation_id)
.unwrap()
.into_iter()
.map(|m| m.into()) // Convert db::Message to daemon::Message
.chain(outgoing_messages.into_iter().map(|m| m.into()))
.collect()
})
.await
}
async fn enqueue_outgoing_message(
&mut self,
text: String,
conversation_id: String,
attachment_guids: Vec<String>,
) -> Uuid {
let conversation_id = conversation_id.clone();
let outgoing_message = OutgoingMessage::builder()
.text(text)
.conversation_id(conversation_id.clone())
.file_transfer_guids(attachment_guids)
.build();
// Keep a record of this so we can provide a consistent model to the client.
self.outgoing_messages
.entry(conversation_id)
.or_insert(vec![])
.push(outgoing_message.clone());
let guid = outgoing_message.guid.clone();
self.post_office_sink
.send(PostOfficeEvent::EnqueueOutgoingMessage(outgoing_message))
.await
.unwrap();
guid
}
async fn sync_conversation_list(
database: &mut Arc<Mutex<Database>>,
signal_sender: &Sender<Signal>,
) -> Result<()> {
log::info!(target: target::SYNC, "Starting list conversation sync");
let mut client = Self::get_client_impl(database).await?;
// Fetch conversations from server
let fetched_conversations = client.get_conversations().await?;
let db_conversations: Vec<kordophone_db::models::Conversation> = fetched_conversations
.into_iter()
.map(kordophone_db::models::Conversation::from)
.collect();
// Insert each conversation
let num_conversations = db_conversations.len();
let mut contact_resolver = ContactResolver::new(DefaultContactResolverBackend::default());
for conversation in db_conversations {
// Insert or update conversation and its participants
database
.with_repository(|r| r.insert_conversation(conversation.clone()))
.await?;
// Resolve any new participants via the contact resolver and store their contact_id
log::trace!(target: target::SYNC, "Resolving participants for conversation: {}", conversation.guid);
let guid = conversation.guid.clone();
if let Some(saved) = database
.with_repository(|r| r.get_conversation_by_guid(&guid))
.await?
{
for p in &saved.participants {
if let DbParticipant::Remote {
handle,
contact_id: None,
} = p
{
log::trace!(target: target::SYNC, "Resolving contact id for participant: {}", handle);
if let Some(contact) = contact_resolver.resolve_contact_id(handle) {
log::trace!(target: target::SYNC, "Resolved contact id for participant: {}", contact);
let _ = database
.with_repository(|r| {
r.update_participant_contact(&handle, &contact)
})
.await;
} else {
log::trace!(target: target::SYNC, "No contact id found for participant: {}", handle);
}
}
}
}
}
// Send conversations updated signal
signal_sender.send(Signal::ConversationsUpdated).await?;
log::info!(target: target::SYNC, "List synchronized: {} conversations", num_conversations);
Ok(())
}
async fn sync_all_conversations_impl(
database: &mut Arc<Mutex<Database>>,
signal_sender: &Sender<Signal>,
) -> Result<()> {
log::info!(target: target::SYNC, "Starting full conversation sync");
let mut client = Self::get_client_impl(database).await?;
// Fetch conversations from server
let fetched_conversations = client.get_conversations().await?;
let db_conversations: Vec<kordophone_db::models::Conversation> = fetched_conversations
.into_iter()
.map(kordophone_db::models::Conversation::from)
.collect();
// Process each conversation
let num_conversations = db_conversations.len();
for conversation in db_conversations {
let conversation_id = conversation.guid.clone();
// Insert the conversation
database
.with_repository(|r| r.insert_conversation(conversation))
.await?;
// Sync individual conversation.
Self::sync_conversation_impl(database, signal_sender, conversation_id).await?;
}
// Send conversations updated signal.
signal_sender.send(Signal::ConversationsUpdated).await?;
log::info!(target: target::SYNC, "Full sync complete, {} conversations processed", num_conversations);
Ok(())
}
async fn sync_conversation_impl(
database: &mut Arc<Mutex<Database>>,
signal_sender: &Sender<Signal>,
conversation_id: String,
) -> Result<()> {
log::debug!(target: target::SYNC, "Starting conversation sync for {}", conversation_id);
let mut client = Self::get_client_impl(database).await?;
// Check if conversation exists in database.
let conversation = database
.with_repository(|r| r.get_conversation_by_guid(&conversation_id))
.await?;
if conversation.is_none() {
// If the conversation doesn't exist, first do a conversation list sync.
log::warn!(target: target::SYNC, "Conversation {} not found, performing list sync", conversation_id);
Self::sync_conversation_list(database, signal_sender).await?;
}
// Fetch and sync messages for this conversation
let last_message_id = database
.with_repository(|r| -> Option<String> {
r.get_last_message_for_conversation(&conversation_id)
.unwrap_or(None)
.map(|m| m.id)
})
.await;
log::debug!(target: target::SYNC, "Fetching messages for conversation {}", &conversation_id);
log::debug!(target: target::SYNC, "Last message id: {:?}", last_message_id);
let messages = client
.get_messages(&conversation_id, None, None, last_message_id)
.await?;
// Filter messages that have an empty body, or a body that is just whitespace.
// This is a workaround for a bug in the server where it returns messages with an empty body, which is usually
// the typing indicator or stuff like that. In the future, we need to move to ChatItems instead of Messages.
let insertable_messages: Vec<kordophone::model::Message> = messages
.into_iter()
.filter(|m| {
(!m.text.is_empty() && !m.text.trim().is_empty())
|| !m.file_transfer_guids.is_empty()
})
.collect();
let db_messages: Vec<kordophone_db::models::Message> = insertable_messages
.into_iter()
.map(kordophone_db::models::Message::from)
.collect();
// Insert each message
let num_messages = db_messages.len();
log::debug!(target: target::SYNC, "Inserting {} messages for conversation {}", num_messages, &conversation_id);
database
.with_repository(|r| r.insert_messages(&conversation_id, db_messages))
.await?;
// Send messages updated signal, if we actually inserted any messages.
if num_messages > 0 {
signal_sender
.send(Signal::MessagesUpdated(conversation_id.clone()))
.await?;
}
log::debug!(target: target::SYNC, "Synchronized {} messages for conversation {}", num_messages, &conversation_id);
Ok(())
}
async fn mark_conversation_as_read_impl(
database: &mut Arc<Mutex<Database>>,
conversation_id: String,
) -> Result<()> {
log::debug!(target: target::DAEMON, "Marking conversation as read: {}", conversation_id);
let mut client = Self::get_client_impl(database).await?;
client.mark_conversation_as_read(&conversation_id).await?;
Ok(())
}
async fn update_conversation_metadata_impl(
database: &mut Arc<Mutex<Database>>,
conversation: Conversation,
signal_sender: &Sender<Signal>,
) -> Result<()> {
log::debug!(target: target::DAEMON, "Updating conversation metadata: {}", conversation.guid);
let updated = database
.with_repository(|r| r.merge_conversation_metadata(conversation))
.await?;
if updated {
signal_sender.send(Signal::ConversationsUpdated).await?;
}
Ok(())
}
async fn get_settings(&mut self) -> Result<Settings> {
let settings = self.database.with_settings(Settings::from_db).await?;
Ok(settings)
}
async fn update_settings(&mut self, settings: &Settings) -> Result<()> {
self.database.with_settings(|s| settings.save(s)).await
}
async fn get_client_impl(
database: &mut Arc<Mutex<Database>>,
) -> Result<HTTPAPIClient<DatabaseAuthenticationStore>> {
let settings = database.with_settings(Settings::from_db).await?;
let server_url = settings
.server_url
.ok_or(DaemonError::ClientNotConfigured)?;
let client = HTTPAPIClient::new(
match server_url.parse() {
Ok(url) => url,
Err(_) => {
log::error!(target: target::DAEMON, "Invalid server URL: {}", server_url);
return Err(DaemonError::ClientNotConfigured.into());
}
},
DatabaseAuthenticationStore::new(database.clone()),
);
Ok(client)
}
async fn delete_all_conversations(&mut self) -> Result<()> {
self.database
.with_repository(|r| -> Result<()> {
r.delete_all_conversations()?;
r.delete_all_messages()?;
Ok(())
})
.await?;
self.signal_sender
.send(Signal::ConversationsUpdated)
.await?;
Ok(())
}
fn get_data_dir() -> Option<PathBuf> {
ProjectDirs::from("net", "buzzert", "kordophonecd").map(|p| PathBuf::from(p.data_dir()))
}
fn get_database_path() -> PathBuf {
if let Some(data_dir) = Self::get_data_dir() {
data_dir.join("database.db")
} else {
// Fallback to a local path if we can't get the system directories
PathBuf::from("database.db")
}
}
}

View File

@@ -0,0 +1,83 @@
use std::path::PathBuf;
#[derive(Debug, Clone)]
pub struct AttachmentMetadata {
pub attribution_info: Option<AttributionInfo>,
}
#[derive(Debug, Clone)]
pub struct AttributionInfo {
pub width: Option<u32>,
pub height: Option<u32>,
}
#[derive(Debug, Clone)]
pub struct Attachment {
pub guid: String,
pub base_path: PathBuf,
pub metadata: Option<AttachmentMetadata>,
}
impl Attachment {
pub fn get_path(&self) -> PathBuf {
self.get_path_for_preview_scratch(false, false)
}
pub fn get_path_for_preview(&self, preview: bool) -> PathBuf {
self.get_path_for_preview_scratch(preview, false)
}
pub fn get_path_for_preview_scratch(&self, preview: bool, scratch: bool) -> PathBuf {
let extension = if preview { "preview" } else { "full" };
if scratch {
self.base_path
.with_extension(format!("{}.download", extension))
} else {
self.base_path.with_extension(extension)
}
}
pub fn is_downloaded(&self, preview: bool) -> bool {
std::fs::exists(&self.get_path_for_preview(preview)).expect(
format!(
"Wasn't able to check for the existence of an attachment file path at {}",
&self.get_path_for_preview(preview).display()
)
.as_str(),
)
}
}
impl From<kordophone::model::message::AttachmentMetadata> for AttachmentMetadata {
fn from(metadata: kordophone::model::message::AttachmentMetadata) -> Self {
Self {
attribution_info: metadata.attribution_info.map(|info| info.into()),
}
}
}
impl From<kordophone::model::message::AttributionInfo> for AttributionInfo {
fn from(info: kordophone::model::message::AttributionInfo) -> Self {
Self {
width: info.width,
height: info.height,
}
}
}
impl From<AttachmentMetadata> for kordophone::model::message::AttachmentMetadata {
fn from(metadata: AttachmentMetadata) -> Self {
Self {
attribution_info: metadata.attribution_info.map(|info| info.into()),
}
}
}
impl From<AttributionInfo> for kordophone::model::message::AttributionInfo {
fn from(info: AttributionInfo) -> Self {
Self {
width: info.width,
height: info.height,
}
}
}

View File

@@ -0,0 +1,186 @@
use chrono::DateTime;
use chrono::NaiveDateTime;
use crate::daemon::attachment_store::AttachmentStore;
use crate::daemon::models::Attachment;
use kordophone::model::message::AttachmentMetadata;
use kordophone::model::outgoing_message::OutgoingMessage;
use kordophone_db::models::participant::Participant as DbParticipant;
use std::collections::HashMap;
#[derive(Clone, Debug)]
pub enum Participant {
Me,
Remote {
handle: String,
contact_id: Option<String>,
},
}
impl From<String> for Participant {
fn from(display_name: String) -> Self {
Participant::Remote {
handle: display_name,
contact_id: None,
}
}
}
impl From<&str> for Participant {
fn from(display_name: &str) -> Self {
Participant::Remote {
handle: display_name.to_string(),
contact_id: None,
}
}
}
impl From<kordophone_db::models::Participant> for Participant {
fn from(participant: kordophone_db::models::Participant) -> Self {
match participant {
kordophone_db::models::Participant::Me => Participant::Me,
kordophone_db::models::Participant::Remote { handle, contact_id } => {
Participant::Remote { handle, contact_id }
}
}
}
}
impl Participant {
pub fn display_name(&self) -> String {
match self {
Participant::Me => "(Me)".to_string(),
Participant::Remote { handle, .. } => handle.clone(),
}
}
}
#[derive(Clone, Debug)]
pub struct Message {
pub id: String,
pub sender: Participant,
pub text: String,
pub date: NaiveDateTime,
pub attachments: Vec<Attachment>,
}
fn attachments_from(
file_transfer_guids: &Vec<String>,
attachment_metadata: &Option<HashMap<String, AttachmentMetadata>>,
) -> Vec<Attachment> {
file_transfer_guids
.iter()
.map(|guid| {
let mut attachment = AttachmentStore::get_attachment_impl(
&AttachmentStore::get_default_store_path(),
guid,
);
attachment.metadata = match attachment_metadata {
Some(attachment_metadata) => attachment_metadata
.get(guid)
.cloned()
.map(|metadata| metadata.into()),
None => None,
};
attachment
})
.collect()
}
impl From<kordophone_db::models::Message> for Message {
fn from(message: kordophone_db::models::Message) -> Self {
let attachments =
attachments_from(&message.file_transfer_guids, &message.attachment_metadata);
Self {
id: message.id,
sender: message.sender.into(),
text: message.text,
date: message.date,
attachments,
}
}
}
impl From<Message> for kordophone_db::models::Message {
fn from(message: Message) -> Self {
Self {
id: message.id,
sender: match message.sender {
Participant::Me => kordophone_db::models::Participant::Me,
Participant::Remote { handle, contact_id } => {
kordophone_db::models::Participant::Remote { handle, contact_id }
}
},
text: message.text,
date: message.date,
file_transfer_guids: message.attachments.iter().map(|a| a.guid.clone()).collect(),
attachment_metadata: {
let metadata_map: HashMap<String, kordophone::model::message::AttachmentMetadata> =
message
.attachments
.iter()
.filter_map(|a| {
a.metadata
.as_ref()
.map(|m| (a.guid.clone(), m.clone().into()))
})
.collect();
if metadata_map.is_empty() {
None
} else {
Some(metadata_map)
}
},
}
}
}
impl From<kordophone::model::Message> for Message {
fn from(message: kordophone::model::Message) -> Self {
let attachments =
attachments_from(&message.file_transfer_guids, &message.attachment_metadata);
Self {
id: message.guid,
sender: match message.sender {
Some(sender) => Participant::Remote {
handle: sender,
contact_id: None,
},
None => Participant::Me,
},
text: message.text,
date: DateTime::from_timestamp(
message.date.unix_timestamp(),
message.date.unix_timestamp_nanos().try_into().unwrap_or(0),
)
.unwrap()
.naive_local(),
attachments,
}
}
}
impl From<&OutgoingMessage> for Message {
fn from(value: &OutgoingMessage) -> Self {
Self {
id: value.guid.to_string(),
sender: Participant::Me,
text: value.text.clone(),
date: value.date,
attachments: Vec::new(), // Outgoing messages don't have attachments initially
}
}
}
impl From<Participant> for DbParticipant {
fn from(participant: Participant) -> Self {
match participant {
Participant::Me => DbParticipant::Me,
Participant::Remote { handle, contact_id } => DbParticipant::Remote {
handle,
contact_id: contact_id.clone(),
},
}
}
}

View File

@@ -0,0 +1,5 @@
pub mod attachment;
pub mod message;
pub use attachment::Attachment;
pub use message::Message;

View File

@@ -0,0 +1,130 @@
use std::collections::VecDeque;
use std::time::Duration;
use tokio::sync::mpsc::{Receiver, Sender};
use tokio::sync::Mutex;
use tokio_condvar::Condvar;
use crate::daemon::events::Event as DaemonEvent;
use kordophone::api::APIInterface;
use kordophone::model::outgoing_message::OutgoingMessage;
use anyhow::Result;
mod target {
pub static POST_OFFICE: &str = "post_office";
}
#[derive(Debug)]
pub enum Event {
EnqueueOutgoingMessage(OutgoingMessage),
}
pub struct PostOffice<C: APIInterface, F: AsyncFnMut() -> Result<C>> {
event_source: Receiver<Event>,
event_sink: Sender<DaemonEvent>,
make_client: F,
message_queue: Mutex<VecDeque<OutgoingMessage>>,
message_available: Condvar,
}
impl<C: APIInterface, F: AsyncFnMut() -> Result<C>> PostOffice<C, F> {
pub fn new(
event_source: Receiver<Event>,
event_sink: Sender<DaemonEvent>,
make_client: F,
) -> Self {
Self {
event_source,
event_sink,
make_client,
message_queue: Mutex::new(VecDeque::new()),
message_available: Condvar::new(),
}
}
pub async fn queue_message(&mut self, message: &OutgoingMessage) {
self.message_queue.lock().await.push_back(message.clone());
self.message_available.notify_one();
}
pub async fn run(&mut self) {
log::info!(target: target::POST_OFFICE, "Starting post office");
loop {
let mut retry_messages = Vec::new();
tokio::select! {
// Incoming events
Some(event) = self.event_source.recv() => {
match event {
Event::EnqueueOutgoingMessage(message) => {
log::debug!(target: target::POST_OFFICE, "Received enqueue outgoing message event");
self.message_queue.lock().await.push_back(message);
self.message_available.notify_one();
}
}
}
// Message queue
mut lock = self.message_available.wait(self.message_queue.lock().await) => {
log::debug!(target: target::POST_OFFICE, "Message available in queue");
// Get the next message to send, if any
let message = lock.pop_front();
drop(lock); // Release the lock before sending, we dont want to remain locked while sending.
if let Some(message) = message {
retry_messages = Self::try_send_message(&mut self.make_client, &self.event_sink, message).await;
}
}
}
if !retry_messages.is_empty() {
log::debug!(target: target::POST_OFFICE, "Queueing {} messages for retry", retry_messages.len());
for message in retry_messages {
self.queue_message(&message).await;
}
}
}
}
async fn try_send_message(
make_client: &mut F,
event_sink: &Sender<DaemonEvent>,
message: OutgoingMessage,
) -> Vec<OutgoingMessage> {
let mut retry_messages = Vec::new();
match (make_client)().await {
Ok(mut client) => {
log::debug!(target: target::POST_OFFICE, "Obtained client, sending message.");
match client.send_message(&message).await {
Ok(sent_message) => {
log::info!(target: target::POST_OFFICE, "Message sent successfully: {}", message.guid);
let conversation_id = message.conversation_id.clone();
let event =
DaemonEvent::MessageSent(sent_message.into(), message, conversation_id);
event_sink.send(event).await.unwrap();
}
Err(e) => {
log::error!(target: target::POST_OFFICE, "Error sending message: {:?}", e);
log::warn!(target: target::POST_OFFICE, "Retrying in 5 seconds");
tokio::time::sleep(Duration::from_secs(5)).await;
retry_messages.push(message);
}
}
}
Err(e) => {
log::error!(target: target::POST_OFFICE, "Error creating client: {:?}", e);
log::warn!(target: target::POST_OFFICE, "Retrying in 5 seconds");
tokio::time::sleep(Duration::from_secs(5)).await;
retry_messages.push(message);
}
}
retry_messages
}
}

View File

@@ -0,0 +1,48 @@
use anyhow::Result;
use kordophone_db::settings::Settings as DbSettings;
pub mod keys {
pub static SERVER_URL: &str = "ServerURL";
pub static USERNAME: &str = "Username";
pub static TOKEN: &str = "Token";
}
#[derive(Debug, Default)]
pub struct Settings {
pub server_url: Option<String>,
pub username: Option<String>,
pub token: Option<String>,
}
impl Settings {
pub fn from_db(db_settings: &mut DbSettings) -> Result<Self> {
let server_url = db_settings.get(keys::SERVER_URL)?;
let username = db_settings.get(keys::USERNAME)?;
let token = db_settings.get(keys::TOKEN)?;
// Create the settings struct with the results
let settings = Self {
server_url,
username,
token,
};
// Load bearing
log::debug!("Loaded settings: {:?}", settings);
Ok(settings)
}
pub fn save(&self, db_settings: &mut DbSettings) -> Result<()> {
if let Some(server_url) = &self.server_url {
db_settings.put(keys::SERVER_URL, &server_url)?;
}
if let Some(username) = &self.username {
db_settings.put(keys::USERNAME, &username)?;
}
if let Some(token) = &self.token {
db_settings.put(keys::TOKEN, &token)?;
}
Ok(())
}
}

View File

@@ -0,0 +1,24 @@
#[derive(Debug, Clone)]
pub enum Signal {
/// Emitted when the list of conversations is updated.
ConversationsUpdated,
/// Emitted when the list of messages for a conversation is updated.
/// Parameters:
/// - conversation_id: The ID of the conversation that was updated.
MessagesUpdated(String),
/// Emitted when an attachment has been downloaded.
/// Parameters:
/// - attachment_id: The ID of the attachment that was downloaded.
AttachmentDownloaded(String),
/// Emitted when an attachment has been uploaded.
/// Parameters:
/// - upload_guid: The GUID of the upload.
/// - attachment_guid: The GUID of the attachment on the server.
AttachmentUploaded(String, String),
/// Emitted when the update stream is reconnected after a timeout or configuration change.
UpdateStreamReconnected,
}

View File

@@ -0,0 +1,241 @@
use crate::daemon::{
events::{Event, Reply},
target, Daemon, DaemonResult,
};
use futures_util::SinkExt;
use kordophone::api::event_socket::{EventSocket, SinkMessage};
use kordophone::model::event::Event as UpdateEvent;
use kordophone::model::event::EventData as UpdateEventData;
use kordophone::APIInterface;
use kordophone_db::database::Database;
use kordophone_db::database::DatabaseAccess;
use std::collections::HashMap;
use std::sync::Arc;
use std::time::{Duration, Instant};
use tokio::sync::mpsc::{Receiver, Sender};
use tokio::sync::Mutex;
pub enum UpdateMonitorCommand {
Restart,
}
pub struct UpdateMonitor {
command_tx: Option<Sender<UpdateMonitorCommand>>,
command_rx: Receiver<UpdateMonitorCommand>,
database: Arc<Mutex<Database>>,
event_sender: Sender<Event>,
last_sync_times: HashMap<String, Instant>,
update_seq: Option<u64>,
first_connection: bool,
}
impl UpdateMonitor {
pub fn new(database: Arc<Mutex<Database>>, event_sender: Sender<Event>) -> Self {
let (command_tx, command_rx) = tokio::sync::mpsc::channel(100);
Self {
database,
event_sender,
last_sync_times: HashMap::new(),
update_seq: None,
first_connection: false, // optimistic assumption that we're not reconnecting the first time.
command_tx: Some(command_tx),
command_rx,
}
}
pub fn take_command_channel(&mut self) -> Sender<UpdateMonitorCommand> {
self.command_tx.take().unwrap()
}
async fn send_event<T>(&self, make_event: impl FnOnce(Reply<T>) -> Event) -> DaemonResult<T> {
let (reply_tx, reply_rx) = tokio::sync::oneshot::channel();
self.event_sender
.send(make_event(reply_tx))
.await
.map_err(|_| "Failed to send event")?;
reply_rx.await.map_err(|_| "Failed to receive reply".into())
}
async fn handle_update(&mut self, update: UpdateEvent) {
match update.data {
UpdateEventData::ConversationChanged(conversation) => {
log::info!(target: target::UPDATES, "Conversation changed: {}", conversation.guid);
// Explicitly update the unread count, we assume this is fresh from the notification.
let db_conversation: kordophone_db::models::Conversation =
conversation.clone().into();
self.send_event(|r| Event::UpdateConversationMetadata(db_conversation, r))
.await
.unwrap_or_else(|e| {
log::error!("Failed to send daemon event: {}", e);
});
// Check if we've synced this conversation recently (within 5 seconds)
// This is currently a hack/workaround to prevent an infinite loop of sync events, because for some reason
// imagent will post a conversation changed notification when we call getMessages.
if let Some(last_sync) = self.last_sync_times.get(&conversation.guid) {
if last_sync.elapsed() < Duration::from_secs(1) {
log::warn!(target: target::UPDATES, "Skipping sync for conversation id: {}. Last sync was {} seconds ago.",
conversation.guid, last_sync.elapsed().as_secs_f64());
return;
}
}
// This is the non-hacky path once we can reason about chat items with associatedMessageGUIDs (e.g., reactions).
let last_message = self
.database
.with_repository(|r| r.get_last_message_for_conversation(&conversation.guid))
.await
.unwrap_or_default();
match (&last_message, &conversation.last_message) {
(Some(message), Some(conversation_message)) => {
if message.id == conversation_message.guid {
log::warn!(target: target::UPDATES, "Skipping sync for conversation id: {}. We already have this message.", &conversation.guid);
return;
}
}
_ => {}
};
// Update the last sync time and proceed with sync
self.last_sync_times
.insert(conversation.guid.clone(), Instant::now());
log::info!(target: target::UPDATES, "Syncing new messages for conversation id: {}", conversation.guid);
self.send_event(|r| Event::SyncConversation(conversation.guid, r))
.await
.unwrap_or_else(|e| {
log::error!("Failed to send daemon event: {}", e);
});
}
UpdateEventData::MessageReceived(conversation, message) => {
log::info!(target: target::UPDATES, "Message received: msgid:{:?}, convid:{:?}", message.guid, conversation.guid);
log::info!(target: target::UPDATES, "Triggering message sync for conversation id: {}", conversation.guid);
self.send_event(|r| Event::SyncConversation(conversation.guid, r))
.await
.unwrap_or_else(|e| {
log::error!("Failed to send daemon event: {}", e);
});
}
}
}
pub async fn run(&mut self) {
use futures_util::stream::StreamExt;
log::info!(target: target::UPDATES, "Starting update monitor");
loop {
log::debug!(target: target::UPDATES, "Creating client");
let mut client = match Daemon::get_client_impl(&mut self.database).await {
Ok(client) => client,
Err(e) => {
log::error!("Failed to get client: {}", e);
log::warn!("Retrying in 5 seconds...");
tokio::time::sleep(std::time::Duration::from_secs(5)).await;
continue;
}
};
log::debug!(target: target::UPDATES, "Opening event socket");
let socket = match client.open_event_socket(self.update_seq).await {
Ok(events) => events,
Err(e) => {
log::warn!("Failed to open event socket: {}", e);
log::warn!("Retrying in 5 seconds...");
tokio::time::sleep(std::time::Duration::from_secs(5)).await;
continue;
}
};
log::debug!(target: target::UPDATES, "Starting event stream");
let (mut event_stream, mut sink) = socket.events().await;
// We won't know if the websocket is dead until we try to send a message, so time out waiting for
// a message every 30 seconds.
let mut timeout = tokio::time::interval(Duration::from_secs(10));
timeout.set_missed_tick_behavior(tokio::time::MissedTickBehavior::Skip);
// First tick will happen immediately
timeout.tick().await;
// Track when the last ping was sent so we know when to give up
// waiting for the corresponding pong.
let mut ping_sent_at: Option<Instant> = None;
loop {
tokio::select! {
Some(result) = event_stream.next() => {
match result {
Ok(socket_event) => {
match socket_event {
kordophone::api::event_socket::SocketEvent::Update(event) => {
self.handle_update(event).await;
}
kordophone::api::event_socket::SocketEvent::Pong => {
log::debug!(target: target::UPDATES, "Received websocket pong");
}
}
if self.first_connection {
self.event_sender.send(Event::UpdateStreamReconnected).await.unwrap();
self.first_connection = false;
}
// Any successfully handled message (update or pong) keeps the connection alive.
ping_sent_at = None;
timeout.reset();
}
Err(e) => {
log::error!("Error in event stream: {}", e);
self.first_connection = true;
break; // Break inner loop to reconnect
}
}
}
_ = timeout.tick() => {
// If we previously sent a ping and haven't heard back since the timeout, we'll assume the connection is dead.
if let Some(_) = ping_sent_at {
log::error!(target: target::UPDATES, "Ping timed out. Restarting stream.");
self.first_connection = true;
break;
}
log::debug!("Sending websocket ping on timer");
match sink.send(SinkMessage::Ping).await {
Ok(_) => {
ping_sent_at = Some(Instant::now());
}
Err(e) => {
log::error!(target: target::UPDATES, "Error writing ping to event socket: {}, restarting stream.", e);
self.first_connection = true;
break;
}
}
}
Some(command) = self.command_rx.recv() => {
match command {
UpdateMonitorCommand::Restart => {
log::info!(target: target::UPDATES, "Restarting update monitor");
self.first_connection = true;
break;
}
}
}
}
}
// Add a small delay before reconnecting to avoid tight reconnection loops
tokio::time::sleep(Duration::from_secs(1)).await;
}
}
}

View File

@@ -0,0 +1,512 @@
use dbus::arg;
use dbus_tree::MethodErr;
use std::sync::Arc;
use std::{future::Future, thread};
use tokio::sync::{mpsc, oneshot, Mutex};
use kordophoned::daemon::{
contact_resolver::{ContactResolver, DefaultContactResolverBackend},
events::{Event, Reply},
settings::Settings,
signals::Signal,
DaemonResult,
};
use kordophone_db::models::participant::Participant;
use crate::dbus::endpoint::DbusRegistry;
use crate::dbus::interface;
use crate::dbus::interface::signals as DbusSignals;
use dbus_tokio::connection;
#[derive(Clone)]
pub struct DBusAgent {
event_sink: mpsc::Sender<Event>,
signal_receiver: Arc<Mutex<Option<mpsc::Receiver<Signal>>>>,
contact_resolver: ContactResolver<DefaultContactResolverBackend>,
}
impl DBusAgent {
pub fn new(event_sink: mpsc::Sender<Event>, signal_receiver: mpsc::Receiver<Signal>) -> Self {
Self {
event_sink,
signal_receiver: Arc::new(Mutex::new(Some(signal_receiver))),
contact_resolver: ContactResolver::new(DefaultContactResolverBackend::default()),
}
}
pub async fn run(self) {
// Establish a session bus connection.
let (resource, connection) =
connection::new_session_sync().expect("Failed to connect to session bus");
// Ensure the D-Bus resource is polled.
tokio::spawn(async move {
let err = resource.await;
panic!("Lost connection to D-Bus: {:?}", err);
});
// Claim well-known bus name.
connection
.request_name(interface::NAME, false, true, false)
.await
.expect("Unable to acquire D-Bus name");
// Registry for objects & signals.
let dbus_registry = DbusRegistry::new(connection.clone());
// Register our object implementation.
let implementation = self.clone();
dbus_registry.register_object(interface::OBJECT_PATH, implementation, |cr| {
vec![
interface::register_net_buzzert_kordophone_repository(cr),
interface::register_net_buzzert_kordophone_settings(cr),
]
});
// Spawn task that forwards daemon signals to D-Bus.
{
let registry = dbus_registry.clone();
let receiver_arc = self.signal_receiver.clone();
tokio::spawn(async move {
let mut receiver = receiver_arc
.lock()
.await
.take()
.expect("Signal receiver already taken");
while let Some(signal) = receiver.recv().await {
match signal {
Signal::ConversationsUpdated => {
log::debug!("Sending signal: ConversationsUpdated");
registry
.send_signal(
interface::OBJECT_PATH,
DbusSignals::ConversationsUpdated {},
)
.unwrap_or_else(|_| {
log::error!("Failed to send signal");
0
});
}
Signal::MessagesUpdated(conversation_id) => {
log::debug!(
"Sending signal: MessagesUpdated for conversation {}",
conversation_id
);
registry
.send_signal(
interface::OBJECT_PATH,
DbusSignals::MessagesUpdated { conversation_id },
)
.unwrap_or_else(|_| {
log::error!("Failed to send signal");
0
});
}
Signal::AttachmentDownloaded(attachment_id) => {
log::debug!(
"Sending signal: AttachmentDownloaded for attachment {}",
attachment_id
);
registry
.send_signal(
interface::OBJECT_PATH,
DbusSignals::AttachmentDownloadCompleted { attachment_id },
)
.unwrap_or_else(|_| {
log::error!("Failed to send signal");
0
});
}
Signal::AttachmentUploaded(upload_guid, attachment_guid) => {
log::debug!(
"Sending signal: AttachmentUploaded for upload {}, attachment {}",
upload_guid,
attachment_guid
);
registry
.send_signal(
interface::OBJECT_PATH,
DbusSignals::AttachmentUploadCompleted {
upload_guid,
attachment_guid,
},
)
.unwrap_or_else(|_| {
log::error!("Failed to send signal");
0
});
}
Signal::UpdateStreamReconnected => {
log::debug!("Sending signal: UpdateStreamReconnected");
registry
.send_signal(
interface::OBJECT_PATH,
DbusSignals::UpdateStreamReconnected {},
)
.unwrap_or_else(|_| {
log::error!("Failed to send signal");
0
});
}
}
}
});
}
// Keep running forever.
std::future::pending::<()>().await;
}
pub async fn send_event<T>(
&self,
make_event: impl FnOnce(Reply<T>) -> Event,
) -> DaemonResult<T> {
let (reply_tx, reply_rx) = oneshot::channel();
self.event_sink
.send(make_event(reply_tx))
.await
.map_err(|_| "Failed to send event")?;
reply_rx.await.map_err(|_| "Failed to receive reply".into())
}
pub fn send_event_sync<T: Send>(
&self,
make_event: impl FnOnce(Reply<T>) -> Event + Send,
) -> Result<T, MethodErr> {
run_sync_future(self.send_event(make_event))
.unwrap()
.map_err(|e| MethodErr::failed(&format!("Daemon error: {}", e)))
}
fn resolve_participant_display_name(&mut self, participant: &Participant) -> String {
match participant {
// Me (we should use a special string here...)
Participant::Me => "(Me)".to_string(),
// Remote participant with a resolved contact_id
Participant::Remote {
handle,
contact_id: Some(contact_id),
..
} => self
.contact_resolver
.get_contact_display_name(contact_id)
.unwrap_or_else(|| handle.clone()),
// Remote participant without a resolved contact_id
Participant::Remote { handle, .. } => handle.clone(),
}
}
}
//
// D-Bus repository interface implementation
//
use crate::dbus::interface::NetBuzzertKordophoneRepository as DbusRepository;
use crate::dbus::interface::NetBuzzertKordophoneSettings as DbusSettings;
impl DbusRepository for DBusAgent {
fn get_version(&mut self) -> Result<String, MethodErr> {
self.send_event_sync(Event::GetVersion)
}
fn get_conversations(
&mut self,
limit: i32,
offset: i32,
) -> Result<Vec<arg::PropMap>, MethodErr> {
self.send_event_sync(|r| Event::GetAllConversations(limit, offset, r))
.map(|conversations| {
conversations
.into_iter()
.map(|conv| {
let mut map = arg::PropMap::new();
map.insert("guid".into(), arg::Variant(Box::new(conv.guid)));
map.insert(
"display_name".into(),
arg::Variant(Box::new(conv.display_name.unwrap_or_default())),
);
map.insert(
"unread_count".into(),
arg::Variant(Box::new(conv.unread_count as i32)),
);
map.insert(
"last_message_preview".into(),
arg::Variant(Box::new(conv.last_message_preview.unwrap_or_default())),
);
map.insert(
"participants".into(),
arg::Variant(Box::new(
conv.participants
.into_iter()
.map(|p| self.resolve_participant_display_name(&p))
.collect::<Vec<String>>(),
)),
);
map.insert(
"date".into(),
arg::Variant(Box::new(conv.date.and_utc().timestamp())),
);
map
})
.collect()
})
}
fn sync_conversation_list(&mut self) -> Result<(), MethodErr> {
self.send_event_sync(Event::SyncConversationList)
}
fn sync_all_conversations(&mut self) -> Result<(), MethodErr> {
self.send_event_sync(Event::SyncAllConversations)
}
fn sync_conversation(&mut self, conversation_id: String) -> Result<(), MethodErr> {
self.send_event_sync(|r| Event::SyncConversation(conversation_id, r))
}
fn mark_conversation_as_read(&mut self, conversation_id: String) -> Result<(), MethodErr> {
self.send_event_sync(|r| Event::MarkConversationAsRead(conversation_id, r))
}
fn get_messages(
&mut self,
conversation_id: String,
last_message_id: String,
) -> Result<Vec<arg::PropMap>, MethodErr> {
let last_message_id_opt = if last_message_id.is_empty() {
None
} else {
Some(last_message_id)
};
self.send_event_sync(|r| Event::GetMessages(conversation_id, last_message_id_opt, r))
.map(|messages| {
messages
.into_iter()
.map(|msg| {
let mut map = arg::PropMap::new();
map.insert("id".into(), arg::Variant(Box::new(msg.id)));
// Remove the attachment placeholder here.
let text = msg.text.replace("\u{FFFC}", "");
map.insert("text".into(), arg::Variant(Box::new(text)));
map.insert(
"date".into(),
arg::Variant(Box::new(msg.date.and_utc().timestamp())),
);
map.insert(
"sender".into(),
arg::Variant(Box::new(
self.resolve_participant_display_name(&msg.sender.into()),
)),
);
// Attachments array
let attachments: Vec<arg::PropMap> = msg
.attachments
.into_iter()
.map(|attachment| {
let mut attachment_map = arg::PropMap::new();
attachment_map.insert(
"guid".into(),
arg::Variant(Box::new(attachment.guid.clone())),
);
// Paths and download status
let path = attachment.get_path_for_preview(false);
let preview_path = attachment.get_path_for_preview(true);
let downloaded = attachment.is_downloaded(false);
let preview_downloaded = attachment.is_downloaded(true);
attachment_map.insert(
"path".into(),
arg::Variant(Box::new(path.to_string_lossy().to_string())),
);
attachment_map.insert(
"preview_path".into(),
arg::Variant(Box::new(
preview_path.to_string_lossy().to_string(),
)),
);
attachment_map.insert(
"downloaded".into(),
arg::Variant(Box::new(downloaded)),
);
attachment_map.insert(
"preview_downloaded".into(),
arg::Variant(Box::new(preview_downloaded)),
);
// Metadata
if let Some(ref metadata) = attachment.metadata {
let mut metadata_map = arg::PropMap::new();
if let Some(ref attribution_info) = metadata.attribution_info {
let mut attribution_map = arg::PropMap::new();
if let Some(width) = attribution_info.width {
attribution_map.insert(
"width".into(),
arg::Variant(Box::new(width as i32)),
);
}
if let Some(height) = attribution_info.height {
attribution_map.insert(
"height".into(),
arg::Variant(Box::new(height as i32)),
);
}
metadata_map.insert(
"attribution_info".into(),
arg::Variant(Box::new(attribution_map)),
);
}
attachment_map.insert(
"metadata".into(),
arg::Variant(Box::new(metadata_map)),
);
}
attachment_map
})
.collect();
map.insert("attachments".into(), arg::Variant(Box::new(attachments)));
map
})
.collect()
})
}
fn delete_all_conversations(&mut self) -> Result<(), MethodErr> {
self.send_event_sync(Event::DeleteAllConversations)
}
fn send_message(
&mut self,
conversation_id: String,
text: String,
attachment_guids: Vec<String>,
) -> Result<String, MethodErr> {
self.send_event_sync(|r| Event::SendMessage(conversation_id, text, attachment_guids, r))
.map(|uuid| uuid.to_string())
}
fn get_attachment_info(
&mut self,
attachment_id: String,
) -> Result<(String, String, bool, bool), MethodErr> {
self.send_event_sync(|r| Event::GetAttachment(attachment_id, r))
.map(|attachment| {
let path = attachment.get_path_for_preview(false);
let downloaded = attachment.is_downloaded(false);
let preview_path = attachment.get_path_for_preview(true);
let preview_downloaded = attachment.is_downloaded(true);
(
path.to_string_lossy().to_string(),
preview_path.to_string_lossy().to_string(),
downloaded,
preview_downloaded,
)
})
}
fn download_attachment(
&mut self,
attachment_id: String,
preview: bool,
) -> Result<(), MethodErr> {
self.send_event_sync(|r| Event::DownloadAttachment(attachment_id, preview, r))
}
fn upload_attachment(&mut self, path: String) -> Result<String, MethodErr> {
use std::path::PathBuf;
let path = PathBuf::from(path);
self.send_event_sync(|r| Event::UploadAttachment(path, r))
}
}
//
// D-Bus settings interface implementation.
//
impl DbusSettings for DBusAgent {
fn set_server(&mut self, url: String, user: String) -> Result<(), MethodErr> {
self.send_event_sync(|r| {
Event::UpdateSettings(
Settings {
server_url: Some(url),
username: Some(user),
token: None,
},
r,
)
})
}
fn server_url(&self) -> Result<String, MethodErr> {
self.send_event_sync(Event::GetAllSettings)
.map(|settings| settings.server_url.unwrap_or_default())
}
fn set_server_url(&self, value: String) -> Result<(), MethodErr> {
self.send_event_sync(|r| {
Event::UpdateSettings(
Settings {
server_url: Some(value),
username: None,
token: None,
},
r,
)
})
}
fn username(&self) -> Result<String, MethodErr> {
self.send_event_sync(Event::GetAllSettings)
.map(|settings| settings.username.unwrap_or_default())
}
fn set_username(&self, value: String) -> Result<(), MethodErr> {
self.send_event_sync(|r| {
Event::UpdateSettings(
Settings {
server_url: None,
username: Some(value),
token: None,
},
r,
)
})
}
}
//
// Helper utilities.
//
fn run_sync_future<F, T>(f: F) -> Result<T, MethodErr>
where
T: Send,
F: Future<Output = T> + Send,
{
thread::scope(move |s| {
s.spawn(move || {
let rt = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.map_err(|_| MethodErr::failed("Unable to create tokio runtime"))?;
let result = rt.block_on(f);
Ok(result)
})
.join()
})
.expect("Error joining runtime thread")
}

View File

@@ -0,0 +1,75 @@
use log::info;
use std::sync::{Arc, Mutex};
use dbus::{
channel::{MatchingReceiver, Sender},
message::MatchRule,
nonblock::SyncConnection,
Path,
};
use dbus_crossroads::Crossroads;
#[derive(Clone)]
pub struct DbusRegistry {
connection: Arc<SyncConnection>,
crossroads: Arc<Mutex<Crossroads>>,
message_handler_started: Arc<Mutex<bool>>,
}
impl DbusRegistry {
pub fn new(connection: Arc<SyncConnection>) -> Self {
let mut cr = Crossroads::new();
// Enable async support for the crossroads instance.
// (Currently irrelevant since dbus generates sync code)
cr.set_async_support(Some((
connection.clone(),
Box::new(|x| {
tokio::spawn(x);
}),
)));
Self {
connection,
crossroads: Arc::new(Mutex::new(cr)),
message_handler_started: Arc::new(Mutex::new(false)),
}
}
pub fn register_object<T, F, R>(&self, path: &str, implementation: T, register_fn: F)
where
T: Send + Clone + 'static,
F: Fn(&mut Crossroads) -> R,
R: IntoIterator<Item = dbus_crossroads::IfaceToken<T>>,
{
let dbus_path = String::from(path);
let mut cr = self.crossroads.lock().unwrap();
let tokens: Vec<_> = register_fn(&mut cr).into_iter().collect();
cr.insert(dbus_path, &tokens, implementation);
// Start message handler if not already started
let mut handler_started = self.message_handler_started.lock().unwrap();
if !*handler_started {
let crossroads_clone = self.crossroads.clone();
self.connection.start_receive(
MatchRule::new_method_call(),
Box::new(move |msg, conn| {
let mut cr = crossroads_clone.lock().unwrap();
cr.handle_message(msg, conn).is_ok()
}),
);
*handler_started = true;
info!(target: "dbus", "Started D-Bus message handler");
}
info!(target: "dbus", "Registered object at {} with {} interfaces", path, tokens.len());
}
pub fn send_signal<S>(&self, path: &str, signal: S) -> Result<u32, ()>
where
S: dbus::message::SignalArgs + dbus::arg::AppendAll,
{
let message = signal.to_emit_message(&Path::new(path).unwrap());
self.connection.send(message)
}
}

View File

@@ -0,0 +1,19 @@
pub mod agent;
pub mod endpoint;
pub mod interface {
#![allow(unused)]
pub const NAME: &str = "net.buzzert.kordophonecd";
pub const OBJECT_PATH: &str = "/net/buzzert/kordophonecd/daemon";
include!(concat!(env!("OUT_DIR"), "/kordophone-server.rs"));
pub mod signals {
pub use super::NetBuzzertKordophoneRepositoryAttachmentDownloadCompleted as AttachmentDownloadCompleted;
pub use super::NetBuzzertKordophoneRepositoryAttachmentUploadCompleted as AttachmentUploadCompleted;
pub use super::NetBuzzertKordophoneRepositoryConversationsUpdated as ConversationsUpdated;
pub use super::NetBuzzertKordophoneRepositoryMessagesUpdated as MessagesUpdated;
pub use super::NetBuzzertKordophoneRepositoryUpdateStreamReconnected as UpdateStreamReconnected;
}
}

View File

@@ -0,0 +1 @@
pub mod daemon;

View File

@@ -0,0 +1,75 @@
#[cfg(target_os = "linux")]
mod dbus;
#[cfg(target_os = "macos")]
mod xpc;
use log::LevelFilter;
use std::future;
use kordophoned::daemon::Daemon;
fn initialize_logging() {
// Weird: is this the best way to do this?
let log_level = std::env::var("RUST_LOG")
.map(|s| s.parse::<LevelFilter>().unwrap_or(LevelFilter::Info))
.unwrap_or(LevelFilter::Info);
env_logger::Builder::from_default_env()
.format_timestamp_millis()
.filter_level(log_level)
.init();
}
#[cfg(target_os = "linux")]
async fn start_ipc_agent(daemon: &mut Daemon) {
use dbus::agent::DBusAgent;
// Start the D-Bus agent (events in, signals out).
let agent = DBusAgent::new(daemon.event_sender.clone(), daemon.obtain_signal_receiver());
tokio::spawn(async move {
agent.run().await;
});
}
#[cfg(target_os = "macos")]
async fn start_ipc_agent(daemon: &mut Daemon) {
// Start the macOS XPC agent (events in, signals out) on a dedicated thread.
let agent =
xpc::agent::XpcAgent::new(daemon.event_sender.clone(), daemon.obtain_signal_receiver());
std::thread::spawn(move || {
// Use a single-threaded Tokio runtime for the XPC agent.
let rt = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.expect("Unable to create tokio runtime for XPC agent");
rt.block_on(agent.run());
});
}
#[cfg(not(any(target_os = "linux", target_os = "macos")))]
async fn start_ipc_agent(daemon: &mut Daemon) {
panic!("Unsupported IPC platform");
}
#[tokio::main]
async fn main() {
initialize_logging();
// Create the daemon
let mut daemon = Daemon::new()
.map_err(|e| {
log::error!("Failed to initialize daemon: {}", e);
std::process::exit(1);
})
.unwrap();
// Start the IPC agent.
start_ipc_agent(&mut daemon).await;
// Run the main daemon loop.
daemon.run().await;
// Keep the process alive as long as any background tasks are running.
future::pending::<()>().await;
}

View File

@@ -0,0 +1,190 @@
use crate::xpc::interface::SERVICE_NAME;
use kordophoned::daemon::{events::Event, signals::Signal, DaemonResult};
use std::ffi::CString;
use std::os::raw::c_char;
use std::ptr;
use std::sync::Arc;
use tokio::sync::{mpsc, oneshot, Mutex};
use xpc_connection::{message_to_xpc_object, xpc_object_to_message, Message, MessageError};
use xpc_connection_sys as xpc_sys;
pub(super) static LOG_TARGET: &str = "xpc";
/// Wrapper for raw XPC connection pointer to declare cross-thread usage.
/// Safety: libxpc connections are reference-counted and may be used to send from other threads.
#[derive(Copy, Clone)]
pub(crate) struct XpcConn(pub xpc_sys::xpc_connection_t);
unsafe impl Send for XpcConn {}
unsafe impl Sync for XpcConn {}
type Subscribers = Arc<std::sync::Mutex<Vec<XpcConn>>>;
#[derive(Clone)]
pub struct XpcAgent {
event_sink: mpsc::Sender<Event>,
signal_receiver: Arc<Mutex<Option<mpsc::Receiver<Signal>>>>,
}
impl XpcAgent {
pub fn new(event_sink: mpsc::Sender<Event>, signal_receiver: mpsc::Receiver<Signal>) -> Self {
Self {
event_sink,
signal_receiver: Arc::new(Mutex::new(Some(signal_receiver))),
}
}
pub async fn run(self) {
use block::ConcreteBlock;
use std::ops::Deref;
// Construct the Mach service name without a trailing NUL for CString.
let service_name = SERVICE_NAME.trim_end_matches('\0');
let mach_port_name = match CString::new(service_name) {
Ok(c) => c,
Err(e) => {
log::error!(target: LOG_TARGET, "Invalid XPC service name: {e}");
return;
}
};
log::info!(
target: LOG_TARGET,
"Waiting for XPC connections on {}",
service_name
);
let rt = match tokio::runtime::Runtime::new() {
Ok(rt) => Arc::new(rt),
Err(e) => {
log::error!(target: LOG_TARGET, "Failed to create Tokio runtime: {}", e);
return;
}
};
let connections: Subscribers = Arc::new(std::sync::Mutex::new(Vec::new()));
{
let receiver_arc = self.signal_receiver.clone();
let conns = connections.clone();
rt.spawn(async move {
let mut receiver = receiver_arc
.lock()
.await
.take()
.expect("Signal receiver already taken");
while let Some(signal) = receiver.recv().await {
log::trace!(target: LOG_TARGET, "Broadcasting signal: {:?}", signal);
let msg = super::util::signal_to_message(signal);
let xobj = message_to_xpc_object(msg);
let list = conns.lock().unwrap();
log::trace!(target: LOG_TARGET, "Active XPC clients: {}", list.len());
for c in list.iter() {
log::trace!(target: LOG_TARGET, "Sending signal to client");
unsafe { xpc_sys::xpc_connection_send_message(c.0, xobj) };
}
unsafe { xpc_sys::xpc_release(xobj) };
}
});
}
let service = unsafe {
xpc_sys::xpc_connection_create_mach_service(
mach_port_name.as_ptr(),
ptr::null_mut(),
xpc_sys::XPC_CONNECTION_MACH_SERVICE_LISTENER as u64,
)
};
let agent = self.clone();
let rt_accept = rt.clone();
let conns_accept = connections.clone();
let service_handler = ConcreteBlock::new(move |event: xpc_sys::xpc_object_t| {
unsafe {
let client = event as xpc_sys::xpc_connection_t;
log::trace!(target: LOG_TARGET, "New XPC connection accepted");
let agent_conn = agent.clone();
let rt_conn = rt_accept.clone();
let conns_for_handler = conns_accept.clone();
let conn_handler = ConcreteBlock::new(move |msg: xpc_sys::xpc_object_t| {
match xpc_object_to_message(msg) {
Message::Dictionary(map) => {
let method = super::util::dict_get_str(&map, "method").or_else(|| super::util::dict_get_str(&map, "type")).unwrap_or_else(|| "<unknown>".to_string());
log::trace!(target: LOG_TARGET, "XPC request received: {}", method);
let result = rt_conn.block_on(super::rpc::dispatch(&agent_conn, &conns_for_handler, client, &map));
let reply = xpc_sys::xpc_dictionary_create_reply(msg);
if !reply.is_null() {
let payload = message_to_xpc_object(result.message);
let apply_block = ConcreteBlock::new(move |key: *const c_char, value: xpc_sys::xpc_object_t| {
xpc_sys::xpc_dictionary_set_value(reply, key, value);
})
.copy();
xpc_sys::xpc_dictionary_apply(payload, apply_block.deref() as *const _ as *mut _);
xpc_sys::xpc_connection_send_message(client, reply);
xpc_sys::xpc_release(payload);
xpc_sys::xpc_release(reply);
// Drop any cleanup resource now that payload is constructed and sent.
drop(result.cleanup);
log::trace!(target: LOG_TARGET, "XPC reply sent for method: {}", method);
} else {
log::warn!(target: LOG_TARGET, "No reply port for method: {}", method);
}
}
Message::Error(e) => {
match e {
MessageError::ConnectionInvalid => {
let mut list = conns_for_handler.lock().unwrap();
let before = list.len();
list.retain(|c| c.0 != client);
let after = list.len();
if after < before {
log::trace!(target: LOG_TARGET, "Removed closed XPC client from subscribers ({} -> {})", before, after);
} else {
log::debug!(target: LOG_TARGET, "XPC connection closed (no subscription)");
}
}
other => {
log::warn!(target: LOG_TARGET, "XPC error event: {:?}", other);
}
}
}
_ => {}
}
})
.copy();
xpc_sys::xpc_connection_set_event_handler(
client,
conn_handler.deref() as *const _ as *mut _,
);
xpc_sys::xpc_connection_resume(client);
}
})
.copy();
unsafe {
xpc_sys::xpc_connection_set_event_handler(
service,
service_handler.deref() as *const _ as *mut _,
);
xpc_sys::xpc_connection_resume(service);
}
futures_util::future::pending::<()>().await;
}
pub async fn send_event<T>(
&self,
make_event: impl FnOnce(kordophoned::daemon::events::Reply<T>) -> Event,
) -> DaemonResult<T> {
let (tx, rx) = oneshot::channel();
self.event_sink
.send(make_event(tx))
.await
.map_err(|_| "Failed to send event")?;
rx.await.map_err(|_| "Failed to receive reply".into())
}
}

View File

@@ -0,0 +1,5 @@
#![cfg(target_os = "macos")]
//! XPC interface definitions for macOS IPC
/// Mach service name for the XPC interface (must include trailing NUL).
pub const SERVICE_NAME: &str = "net.buzzert.kordophonecd\0";

View File

@@ -0,0 +1,24 @@
pub mod agent;
pub mod interface;
pub mod rpc;
pub mod util;
use std::any::Any;
use xpc_connection::Message;
/// Result of dispatching an XPC request: the message to send plus an optional
/// resource to keep alive until after the XPC payload is constructed.
pub struct DispatchResult {
pub message: Message,
pub cleanup: Option<Box<dyn Any + Send>>,
}
impl DispatchResult {
pub fn new(message: Message) -> Self {
Self { message, cleanup: None }
}
pub fn with_cleanup<T: Any + Send + 'static>(message: Message, cleanup: T) -> Self {
Self { message, cleanup: Some(Box::new(cleanup)) }
}
}

View File

@@ -0,0 +1,450 @@
use super::agent::{XpcAgent, XpcConn, LOG_TARGET};
use kordophoned::daemon::events::Event;
use kordophoned::daemon::settings::Settings;
use std::collections::HashMap;
use std::ffi::CString;
use xpc_connection::Message;
use xpc_connection_sys as xpc_sys;
use super::util::*;
use super::DispatchResult;
pub async fn dispatch(
agent: &XpcAgent,
subscribers: &std::sync::Mutex<Vec<XpcConn>>,
current_client: xpc_sys::xpc_connection_t,
root: &HashMap<CString, Message>,
) -> DispatchResult {
let request_id = dict_get_str(root, "request_id");
let method = match dict_get_str(root, "method").or_else(|| dict_get_str(root, "type")) {
Some(m) => m,
None => {
return DispatchResult::new(attach_request_id(
make_error_reply("InvalidRequest", "Missing method/type"),
request_id,
))
}
};
let _arguments = get_dictionary_field(root, "arguments");
let mut response = match method.as_str() {
// GetVersion
"GetVersion" => match agent.send_event(Event::GetVersion).await {
Ok(version) => {
let mut reply: XpcMap = HashMap::new();
dict_put_str(&mut reply, "type", "GetVersionResponse");
dict_put_str(&mut reply, "version", &version);
DispatchResult::new(Message::Dictionary(reply))
}
Err(e) => DispatchResult::new(make_error_reply("DaemonError", &format!("{}", e))),
},
// GetConversations
"GetConversations" => {
let mut limit: i32 = 100;
let mut offset: i32 = 0;
if let Some(args) = get_dictionary_field(root, "arguments") {
if let Some(v) = dict_get_i64_from_str(args, "limit") {
limit = v as i32;
}
if let Some(v) = dict_get_i64_from_str(args, "offset") {
offset = v as i32;
}
}
match agent
.send_event(|r| Event::GetAllConversations(limit, offset, r))
.await
{
Ok(conversations) => {
let mut items: Vec<Message> = Vec::with_capacity(conversations.len());
for conv in conversations {
let mut m: XpcMap = HashMap::new();
dict_put_str(&mut m, "guid", &conv.guid);
dict_put_str(
&mut m,
"display_name",
&conv.display_name.unwrap_or_default(),
);
dict_put_i64_as_str(&mut m, "unread_count", conv.unread_count as i64);
dict_put_str(
&mut m,
"last_message_preview",
&conv.last_message_preview.unwrap_or_default(),
);
let participant_names: Vec<String> = conv
.participants
.into_iter()
.map(|p| p.display_name())
.collect();
m.insert(cstr("participants"), array_from_strs(participant_names));
dict_put_i64_as_str(&mut m, "date", conv.date.and_utc().timestamp());
items.push(Message::Dictionary(m));
}
let mut reply: XpcMap = HashMap::new();
dict_put_str(&mut reply, "type", "GetConversationsResponse");
reply.insert(cstr("conversations"), Message::Array(items));
DispatchResult::new(Message::Dictionary(reply))
}
Err(e) => DispatchResult::new(make_error_reply("DaemonError", &format!("{}", e))),
}
}
// Sync ops
"SyncConversationList" => match agent.send_event(Event::SyncConversationList).await {
Ok(()) => DispatchResult::new(make_ok_reply()),
Err(e) => DispatchResult::new(make_error_reply("DaemonError", &format!("{}", e))),
},
"SyncAllConversations" => match agent.send_event(Event::SyncAllConversations).await {
Ok(()) => DispatchResult::new(make_ok_reply()),
Err(e) => DispatchResult::new(make_error_reply("DaemonError", &format!("{}", e))),
},
"SyncConversation" => {
let conversation_id = match get_dictionary_field(root, "arguments")
.and_then(|m| dict_get_str(m, "conversation_id"))
{
Some(id) => id,
None => return DispatchResult::new(make_error_reply("InvalidRequest", "Missing conversation_id")),
};
match agent
.send_event(|r| Event::SyncConversation(conversation_id, r))
.await
{
Ok(()) => DispatchResult::new(make_ok_reply()),
Err(e) => DispatchResult::new(make_error_reply("DaemonError", &format!("{}", e))),
}
}
// Mark as read
"MarkConversationAsRead" => {
let conversation_id = match get_dictionary_field(root, "arguments")
.and_then(|m| dict_get_str(m, "conversation_id"))
{
Some(id) => id,
None => return DispatchResult::new(make_error_reply("InvalidRequest", "Missing conversation_id")),
};
match agent
.send_event(|r| Event::MarkConversationAsRead(conversation_id, r))
.await
{
Ok(()) => DispatchResult::new(make_ok_reply()),
Err(e) => DispatchResult::new(make_error_reply("DaemonError", &format!("{}", e))),
}
}
// GetMessages
"GetMessages" => {
let args = match get_dictionary_field(root, "arguments") {
Some(a) => a,
None => return DispatchResult::new(make_error_reply("InvalidRequest", "Missing arguments")),
};
let conversation_id = match dict_get_str(args, "conversation_id") {
Some(id) => id,
None => return DispatchResult::new(make_error_reply("InvalidRequest", "Missing conversation_id")),
};
let last_message_id = dict_get_str(args, "last_message_id");
match agent
.send_event(|r| Event::GetMessages(conversation_id, last_message_id, r))
.await
{
Ok(messages) => {
let mut items: Vec<Message> = Vec::with_capacity(messages.len());
for msg in messages {
let mut m: XpcMap = HashMap::new();
dict_put_str(&mut m, "id", &msg.id);
dict_put_str(&mut m, "text", &msg.text.replace('\u{FFFC}', ""));
dict_put_i64_as_str(&mut m, "date", msg.date.and_utc().timestamp());
dict_put_str(&mut m, "sender", &msg.sender.display_name());
// Include attachment GUIDs for the client to resolve/download
let attachment_guids: Vec<String> = msg
.attachments
.iter()
.map(|a| a.guid.clone())
.collect();
m.insert(cstr("attachment_guids"), array_from_strs(attachment_guids));
// Full attachments array with metadata (mirrors DBus fields)
let mut attachments_items: Vec<Message> = Vec::new();
for attachment in msg.attachments.iter() {
let mut a: XpcMap = HashMap::new();
// Basic identifiers
dict_put_str(&mut a, "guid", &attachment.guid);
// Paths and download status
let path = attachment.get_path_for_preview(false);
let preview_path = attachment.get_path_for_preview(true);
let downloaded = attachment.is_downloaded(false);
let preview_downloaded = attachment.is_downloaded(true);
dict_put_str(&mut a, "path", &path.to_string_lossy());
dict_put_str(&mut a, "preview_path", &preview_path.to_string_lossy());
dict_put_str(&mut a, "downloaded", &downloaded.to_string());
dict_put_str(
&mut a,
"preview_downloaded",
&preview_downloaded.to_string(),
);
// Metadata (optional)
if let Some(metadata) = &attachment.metadata {
let mut metadata_map: XpcMap = HashMap::new();
if let Some(attribution_info) = &metadata.attribution_info {
let mut attribution_map: XpcMap = HashMap::new();
if let Some(width) = attribution_info.width {
dict_put_i64_as_str(&mut attribution_map, "width", width as i64);
}
if let Some(height) = attribution_info.height {
dict_put_i64_as_str(&mut attribution_map, "height", height as i64);
}
metadata_map.insert(cstr("attribution_info"), Message::Dictionary(attribution_map));
}
if !metadata_map.is_empty() {
a.insert(cstr("metadata"), Message::Dictionary(metadata_map));
}
}
attachments_items.push(Message::Dictionary(a));
}
m.insert(cstr("attachments"), Message::Array(attachments_items));
items.push(Message::Dictionary(m));
}
let mut reply: XpcMap = HashMap::new();
dict_put_str(&mut reply, "type", "GetMessagesResponse");
reply.insert(cstr("messages"), Message::Array(items));
DispatchResult::new(Message::Dictionary(reply))
}
Err(e) => DispatchResult::new(make_error_reply("DaemonError", &format!("{}", e))),
}
}
// Delete all
"DeleteAllConversations" => match agent.send_event(Event::DeleteAllConversations).await {
Ok(()) => DispatchResult::new(make_ok_reply()),
Err(e) => DispatchResult::new(make_error_reply("DaemonError", &format!("{}", e))),
},
// SendMessage
"SendMessage" => {
let args = match get_dictionary_field(root, "arguments") {
Some(a) => a,
None => return DispatchResult::new(make_error_reply("InvalidRequest", "Missing arguments")),
};
let conversation_id = match dict_get_str(args, "conversation_id") {
Some(v) => v,
None => return DispatchResult::new(make_error_reply("InvalidRequest", "Missing conversation_id")),
};
let text = dict_get_str(args, "text").unwrap_or_default();
let attachment_guids: Vec<String> = match args.get(&cstr("attachment_guids")) {
Some(Message::Array(arr)) => arr
.iter()
.filter_map(|m| match m {
Message::String(s) => Some(s.to_string_lossy().into_owned()),
_ => None,
})
.collect(),
_ => Vec::new(),
};
match agent
.send_event(|r| Event::SendMessage(conversation_id, text, attachment_guids, r))
.await
{
Ok(uuid) => {
let mut reply: XpcMap = HashMap::new();
dict_put_str(&mut reply, "type", "SendMessageResponse");
dict_put_str(&mut reply, "uuid", &uuid.to_string());
DispatchResult::new(Message::Dictionary(reply))
}
Err(e) => DispatchResult::new(make_error_reply("DaemonError", &format!("{}", e))),
}
}
// GetAttachmentInfo
"GetAttachmentInfo" => {
let args = match get_dictionary_field(root, "arguments") {
Some(a) => a,
None => return DispatchResult::new(make_error_reply("InvalidRequest", "Missing arguments")),
};
let attachment_id = match dict_get_str(args, "attachment_id") {
Some(v) => v,
None => return DispatchResult::new(make_error_reply("InvalidRequest", "Missing attachment_id")),
};
match agent
.send_event(|r| Event::GetAttachment(attachment_id, r))
.await
{
Ok(attachment) => {
let mut reply: XpcMap = HashMap::new();
dict_put_str(&mut reply, "type", "GetAttachmentInfoResponse");
dict_put_str(
&mut reply,
"path",
&attachment.get_path_for_preview(false).to_string_lossy(),
);
dict_put_str(
&mut reply,
"preview_path",
&attachment.get_path_for_preview(true).to_string_lossy(),
);
dict_put_str(
&mut reply,
"downloaded",
&attachment.is_downloaded(false).to_string(),
);
dict_put_str(
&mut reply,
"preview_downloaded",
&attachment.is_downloaded(true).to_string(),
);
DispatchResult::new(Message::Dictionary(reply))
}
Err(e) => DispatchResult::new(make_error_reply("DaemonError", &format!("{}", e))),
}
}
// OpenAttachmentFd (return file descriptor in reply)
"OpenAttachmentFd" => {
let args = match get_dictionary_field(root, "arguments") {
Some(a) => a,
None => return DispatchResult::new(make_error_reply("InvalidRequest", "Missing arguments")),
};
let attachment_id = match dict_get_str(args, "attachment_id") {
Some(v) => v,
None => return DispatchResult::new(make_error_reply("InvalidRequest", "Missing attachment_id")),
};
let preview = dict_get_str(args, "preview")
.map(|s| s == "true")
.unwrap_or(false);
match agent
.send_event(|r| Event::GetAttachment(attachment_id, r))
.await
{
Ok(attachment) => {
use std::os::fd::AsRawFd;
let path = attachment.get_path_for_preview(preview);
match std::fs::OpenOptions::new().read(true).open(&path) {
Ok(file) => {
use std::os::fd::AsRawFd;
let fd = file.as_raw_fd();
let mut reply: XpcMap = HashMap::new();
dict_put_str(&mut reply, "type", "OpenAttachmentFdResponse");
reply.insert(cstr("fd"), Message::Fd(fd));
DispatchResult { message: Message::Dictionary(reply), cleanup: Some(Box::new(file)) }
}
Err(e) => DispatchResult::new(make_error_reply("OpenFailed", &format!("{}", e))),
}
}
Err(e) => DispatchResult::new(make_error_reply("DaemonError", &format!("{}", e))),
}
}
// DownloadAttachment
"DownloadAttachment" => {
let args = match get_dictionary_field(root, "arguments") {
Some(a) => a,
None => return DispatchResult::new(make_error_reply("InvalidRequest", "Missing arguments")),
};
let attachment_id = match dict_get_str(args, "attachment_id") {
Some(v) => v,
None => return DispatchResult::new(make_error_reply("InvalidRequest", "Missing attachment_id")),
};
let preview = dict_get_str(args, "preview")
.map(|s| s == "true")
.unwrap_or(false);
match agent
.send_event(|r| Event::DownloadAttachment(attachment_id, preview, r))
.await
{
Ok(()) => DispatchResult::new(make_ok_reply()),
Err(e) => DispatchResult::new(make_error_reply("DaemonError", &format!("{}", e))),
}
}
// UploadAttachment
"UploadAttachment" => {
use std::path::PathBuf;
let args = match get_dictionary_field(root, "arguments") {
Some(a) => a,
None => return DispatchResult::new(make_error_reply("InvalidRequest", "Missing arguments")),
};
let path = match dict_get_str(args, "path") {
Some(v) => v,
None => return DispatchResult::new(make_error_reply("InvalidRequest", "Missing path")),
};
match agent
.send_event(|r| Event::UploadAttachment(PathBuf::from(path), r))
.await
{
Ok(upload_guid) => {
let mut reply: XpcMap = HashMap::new();
dict_put_str(&mut reply, "type", "UploadAttachmentResponse");
dict_put_str(&mut reply, "upload_guid", &upload_guid);
DispatchResult::new(Message::Dictionary(reply))
}
Err(e) => DispatchResult::new(make_error_reply("DaemonError", &format!("{}", e))),
}
}
// Settings
"GetAllSettings" => match agent.send_event(Event::GetAllSettings).await {
Ok(settings) => {
let mut reply: XpcMap = HashMap::new();
dict_put_str(&mut reply, "type", "GetAllSettingsResponse");
dict_put_str(
&mut reply,
"server_url",
&settings.server_url.unwrap_or_default(),
);
dict_put_str(
&mut reply,
"username",
&settings.username.unwrap_or_default(),
);
DispatchResult::new(Message::Dictionary(reply))
}
Err(e) => DispatchResult::new(make_error_reply("DaemonError", &format!("{}", e))),
},
"UpdateSettings" => {
let args = match get_dictionary_field(root, "arguments") {
Some(a) => a,
None => return DispatchResult::new(make_error_reply("InvalidRequest", "Missing arguments")),
};
let server_url = dict_get_str(args, "server_url");
let username = dict_get_str(args, "username");
let settings = Settings {
server_url,
username,
token: None,
};
match agent
.send_event(|r| Event::UpdateSettings(settings, r))
.await
{
Ok(()) => DispatchResult::new(make_ok_reply()),
Err(e) => DispatchResult::new(make_error_reply("DaemonError", &format!("{}", e))),
}
}
// Subscribe
"SubscribeSignals" => {
let mut list = subscribers.lock().unwrap();
if !list.iter().any(|c| c.0 == current_client) {
list.push(XpcConn(current_client));
log::trace!(target: LOG_TARGET, "Client subscribed to signals (total subscribers: {})", list.len());
}
DispatchResult::new(make_ok_reply())
}
// Unknown method fallback
other => DispatchResult::new(make_error_reply("UnknownMethod", other)),
};
response.message = attach_request_id(response.message, request_id);
response
}

View File

@@ -0,0 +1,100 @@
use kordophoned::daemon::signals::Signal;
use std::collections::HashMap;
use std::ffi::CString;
use xpc_connection::Message;
pub type XpcMap = HashMap<CString, Message>;
pub fn cstr(s: &str) -> CString {
CString::new(s).unwrap_or_else(|_| CString::new("").unwrap())
}
pub fn get_dictionary_field<'a>(
map: &'a HashMap<CString, Message>,
key: &str,
) -> Option<&'a HashMap<CString, Message>> {
let k = CString::new(key).ok()?;
map.get(&k).and_then(|v| match v {
Message::Dictionary(d) => Some(d),
_ => None,
})
}
pub fn dict_get_str(map: &HashMap<CString, Message>, key: &str) -> Option<String> {
let k = CString::new(key).ok()?;
match map.get(&k) {
Some(Message::String(v)) => Some(v.to_string_lossy().into_owned()),
_ => None,
}
}
pub fn dict_get_i64_from_str(map: &HashMap<CString, Message>, key: &str) -> Option<i64> {
dict_get_str(map, key).and_then(|s| s.parse::<i64>().ok())
}
pub fn dict_put_str(map: &mut XpcMap, key: &str, value: impl AsRef<str>) {
map.insert(cstr(key), Message::String(cstr(value.as_ref())));
}
pub fn dict_put_i64_as_str(map: &mut XpcMap, key: &str, value: i64) {
dict_put_str(map, key, value.to_string());
}
pub fn array_from_strs(values: impl IntoIterator<Item = String>) -> Message {
let arr = values
.into_iter()
.map(|s| Message::String(cstr(&s)))
.collect();
Message::Array(arr)
}
pub fn make_ok_reply() -> Message {
let mut reply: XpcMap = HashMap::new();
dict_put_str(&mut reply, "type", "Ok");
Message::Dictionary(reply)
}
pub fn make_error_reply(code: &str, message: &str) -> Message {
let mut reply: HashMap<CString, Message> = HashMap::new();
reply.insert(cstr("type"), Message::String(cstr("Error")));
reply.insert(cstr("error"), Message::String(cstr(code)));
reply.insert(cstr("message"), Message::String(cstr(message)));
Message::Dictionary(reply)
}
pub fn attach_request_id(mut message: Message, request_id: Option<String>) -> Message {
if let (Some(id), Message::Dictionary(ref mut m)) = (request_id, &mut message) {
dict_put_str(m, "request_id", &id);
}
message
}
pub fn signal_to_message(signal: Signal) -> Message {
let mut root: XpcMap = HashMap::new();
let mut args: XpcMap = HashMap::new();
match signal {
Signal::ConversationsUpdated => {
dict_put_str(&mut root, "name", "ConversationsUpdated");
}
Signal::MessagesUpdated(conversation_id) => {
dict_put_str(&mut root, "name", "MessagesUpdated");
dict_put_str(&mut args, "conversation_id", &conversation_id);
}
Signal::AttachmentDownloaded(attachment_id) => {
dict_put_str(&mut root, "name", "AttachmentDownloadCompleted");
dict_put_str(&mut args, "attachment_id", &attachment_id);
}
Signal::AttachmentUploaded(upload_guid, attachment_guid) => {
dict_put_str(&mut root, "name", "AttachmentUploadCompleted");
dict_put_str(&mut args, "upload_guid", &upload_guid);
dict_put_str(&mut args, "attachment_guid", &attachment_guid);
}
Signal::UpdateStreamReconnected => {
dict_put_str(&mut root, "name", "UpdateStreamReconnected");
}
}
if !args.is_empty() {
root.insert(cstr("arguments"), Message::Dictionary(args));
}
Message::Dictionary(root)
}

4
core/kpcli/.gitignore vendored Normal file
View File

@@ -0,0 +1,4 @@
.env
.env.*

38
core/kpcli/Cargo.toml Normal file
View File

@@ -0,0 +1,38 @@
[package]
name = "kpcli"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
anyhow = "1.0.93"
clap = { version = "4.5.20", features = ["derive"] }
dotenv = "0.15.0"
env_logger = "0.11.8"
futures-util = "0.3.31"
kordophone = { path = "../kordophone" }
kordophone-db = { path = "../kordophone-db" }
log = "0.4.22"
pretty = { version = "0.12.3", features = ["termcolor"] }
prettytable = "0.10.0"
serde_json = "1.0"
time = "0.3.37"
tokio = "1.41.1"
async-trait = "0.1.80"
# D-Bus dependencies only on Linux
[target.'cfg(target_os = "linux")'.dependencies]
dbus = "0.9.7"
dbus-tree = "0.9.2"
# D-Bus codegen only on Linux
[target.'cfg(target_os = "linux")'.build-dependencies]
dbus-codegen = "0.10.0"
# XPC (libxpc) interface only on macOS
[target.'cfg(target_os = "macos")'.dependencies]
block = "0.1.6"
futures = "0.3.4"
xpc-connection = { git = "https://github.com/dfrankland/xpc-connection-rs.git", rev = "cd4fb3d", package = "xpc-connection" }
xpc-connection-sys = { git = "https://github.com/dfrankland/xpc-connection-rs.git", rev = "cd4fb3d", package = "xpc-connection-sys" }

27
core/kpcli/build.rs Normal file
View File

@@ -0,0 +1,27 @@
const KORDOPHONE_XML: &str = "../kordophoned/include/net.buzzert.kordophonecd.Server.xml";
#[cfg(not(target_os = "linux"))]
fn main() {
// No D-Bus codegen on non-Linux platforms
}
#[cfg(target_os = "linux")]
fn main() {
let out_dir = std::env::var("OUT_DIR").unwrap();
let out_path = std::path::Path::new(&out_dir).join("kordophone-client.rs");
let opts = dbus_codegen::GenOpts {
connectiontype: dbus_codegen::ConnectionType::Blocking,
methodtype: None,
..Default::default()
};
let xml = std::fs::read_to_string(KORDOPHONE_XML).expect("Error reading server dbus interface");
let output =
dbus_codegen::generate(&xml, &opts).expect("Error generating client dbus interface");
std::fs::write(out_path, output).expect("Error writing client dbus code");
println!("cargo:rerun-if-changed={}", KORDOPHONE_XML);
}

View File

@@ -0,0 +1,178 @@
use kordophone::api::event_socket::{EventSocket, SocketEvent, SocketUpdate};
use kordophone::api::http_client::Credentials;
use kordophone::api::http_client::HTTPAPIClient;
use kordophone::api::InMemoryAuthenticationStore;
use kordophone::APIInterface;
use crate::printers::{ConversationPrinter, MessagePrinter};
use anyhow::Result;
use clap::Subcommand;
use kordophone::model::event::EventData;
use kordophone::model::outgoing_message::OutgoingMessage;
use futures_util::StreamExt;
pub fn make_api_client_from_env() -> HTTPAPIClient<InMemoryAuthenticationStore> {
dotenv::dotenv().ok();
// read from env
let base_url = std::env::var("KORDOPHONE_API_URL").expect("KORDOPHONE_API_URL must be set");
let credentials = Credentials {
username: std::env::var("KORDOPHONE_USERNAME").expect("KORDOPHONE_USERNAME must be set"),
password: std::env::var("KORDOPHONE_PASSWORD").expect("KORDOPHONE_PASSWORD must be set"),
};
HTTPAPIClient::new(
base_url.parse().unwrap(),
InMemoryAuthenticationStore::new(Some(credentials)),
)
}
#[derive(Subcommand)]
pub enum Commands {
/// Prints all known conversations on the server.
Conversations,
/// Prints all messages in a conversation.
Messages { conversation_id: String },
/// Prints the server Kordophone version.
Version,
/// Prints all events from the server.
Events,
/// Prints all raw updates from the server.
RawUpdates,
/// Sends a message to the server.
SendMessage {
conversation_id: String,
message: String,
},
/// Marks a conversation as read.
Mark { conversation_id: String },
}
impl Commands {
pub async fn run(cmd: Commands) -> Result<()> {
let mut client = ClientCli::new();
match cmd {
Commands::Version => client.print_version().await,
Commands::Conversations => client.print_conversations().await,
Commands::Messages { conversation_id } => client.print_messages(conversation_id).await,
Commands::RawUpdates => client.print_raw_updates().await,
Commands::Events => client.print_events().await,
Commands::SendMessage {
conversation_id,
message,
} => client.send_message(conversation_id, message).await,
Commands::Mark { conversation_id } => {
client.mark_conversation_as_read(conversation_id).await
}
}
}
}
struct ClientCli {
api: HTTPAPIClient<InMemoryAuthenticationStore>,
}
impl ClientCli {
pub fn new() -> Self {
let api = make_api_client_from_env();
Self { api }
}
pub async fn print_version(&mut self) -> Result<()> {
let version = self.api.get_version().await?;
println!("Version: {}", version);
Ok(())
}
pub async fn print_conversations(&mut self) -> Result<()> {
let conversations = self.api.get_conversations().await?;
for conversation in conversations {
println!("{}", ConversationPrinter::new(&conversation.into()));
}
Ok(())
}
pub async fn print_messages(&mut self, conversation_id: String) -> Result<()> {
let messages = self
.api
.get_messages(&conversation_id, None, None, None)
.await?;
for message in messages {
println!("{}", MessagePrinter::new(&message.into()));
}
Ok(())
}
pub async fn print_events(&mut self) -> Result<()> {
let socket = self.api.open_event_socket(None).await?;
let (mut stream, _) = socket.events().await;
while let Some(Ok(socket_event)) = stream.next().await {
match socket_event {
SocketEvent::Update(event) => match event.data {
EventData::ConversationChanged(conversation) => {
println!("Conversation changed: {}", conversation.guid);
}
EventData::MessageReceived(conversation, message) => {
println!(
"Message received: msg: {} conversation: {}",
message.guid, conversation.guid
);
}
},
SocketEvent::Pong => {
println!("Pong");
}
}
}
Ok(())
}
pub async fn print_raw_updates(&mut self) -> Result<()> {
let socket = self.api.open_event_socket(None).await?;
println!("Listening for raw updates...");
let mut stream = socket.raw_updates().await;
while let Some(Ok(update)) = stream.next().await {
match update {
SocketUpdate::Update(updates) => {
for update in updates {
println!("Got update: {:?}", update);
}
}
SocketUpdate::Pong => {
println!("Pong");
}
}
}
Ok(())
}
pub async fn send_message(&mut self, conversation_id: String, message: String) -> Result<()> {
let outgoing_message = OutgoingMessage::builder()
.conversation_id(conversation_id)
.text(message)
.build();
let message = self.api.send_message(&outgoing_message).await?;
println!("Message sent: {}", message.guid);
Ok(())
}
pub async fn mark_conversation_as_read(&mut self, conversation_id: String) -> Result<()> {
self.api.mark_conversation_as_read(&conversation_id).await?;
println!("Conversation marked as read: {}", conversation_id);
Ok(())
}
}

View File

@@ -0,0 +1,212 @@
//! Linux-only D-Bus implementation of the `DaemonInterface`.
#![cfg(target_os = "linux")]
use super::{ConfigCommands, DaemonInterface};
use crate::printers::{ConversationPrinter, MessagePrinter};
use anyhow::Result;
use async_trait::async_trait;
use dbus::blocking::{Connection, Proxy};
use prettytable::table;
const DBUS_NAME: &str = "net.buzzert.kordophonecd";
const DBUS_PATH: &str = "/net/buzzert/kordophonecd/daemon";
#[allow(unused)]
mod dbus_interface {
#![allow(unused)]
include!(concat!(env!("OUT_DIR"), "/kordophone-client.rs"));
}
use dbus_interface::NetBuzzertKordophoneRepository as KordophoneRepository;
use dbus_interface::NetBuzzertKordophoneSettings as KordophoneSettings;
pub struct DBusDaemonInterface {
conn: Connection,
}
impl DBusDaemonInterface {
pub fn new() -> Result<Self> {
Ok(Self {
conn: Connection::new_session()?,
})
}
fn proxy(&self) -> Proxy<&Connection> {
self.conn
.with_proxy(DBUS_NAME, DBUS_PATH, std::time::Duration::from_millis(5000))
}
async fn print_settings(&mut self) -> Result<()> {
let server_url = KordophoneSettings::server_url(&self.proxy()).unwrap_or_default();
let username = KordophoneSettings::username(&self.proxy()).unwrap_or_default();
let table = table!([
b->"Server URL", &server_url
], [
b->"Username", &username
]);
table.printstd();
Ok(())
}
async fn set_server_url(&mut self, url: String) -> Result<()> {
KordophoneSettings::set_server_url(&self.proxy(), url)
.map_err(|e| anyhow::anyhow!("Failed to set server URL: {}", e))
}
async fn set_username(&mut self, username: String) -> Result<()> {
KordophoneSettings::set_username(&self.proxy(), username)
.map_err(|e| anyhow::anyhow!("Failed to set username: {}", e))
}
}
#[async_trait]
impl DaemonInterface for DBusDaemonInterface {
async fn print_version(&mut self) -> Result<()> {
let version = KordophoneRepository::get_version(&self.proxy())?;
println!("Server version: {}", version);
Ok(())
}
async fn print_conversations(&mut self) -> Result<()> {
let conversations = KordophoneRepository::get_conversations(&self.proxy(), 100, 0)?;
println!("Number of conversations: {}", conversations.len());
for conversation in conversations {
println!("{}", ConversationPrinter::new(&conversation.into()));
}
Ok(())
}
async fn sync_conversations(&mut self, conversation_id: Option<String>) -> Result<()> {
if let Some(conversation_id) = conversation_id {
KordophoneRepository::sync_conversation(&self.proxy(), &conversation_id)
.map_err(|e| anyhow::anyhow!("Failed to sync conversation: {}", e))
} else {
KordophoneRepository::sync_all_conversations(&self.proxy())
.map_err(|e| anyhow::anyhow!("Failed to sync conversations: {}", e))
}
}
async fn sync_conversations_list(&mut self) -> Result<()> {
KordophoneRepository::sync_conversation_list(&self.proxy())
.map_err(|e| anyhow::anyhow!("Failed to sync conversations: {}", e))
}
async fn print_messages(
&mut self,
conversation_id: String,
last_message_id: Option<String>,
) -> Result<()> {
let messages = KordophoneRepository::get_messages(
&self.proxy(),
&conversation_id,
&last_message_id.unwrap_or_default(),
)?;
println!("Number of messages: {}", messages.len());
for message in messages {
println!("{}", MessagePrinter::new(&message.into()));
}
Ok(())
}
async fn enqueue_outgoing_message(
&mut self,
conversation_id: String,
text: String,
) -> Result<()> {
let attachment_guids: Vec<&str> = vec![];
let outgoing_message_id = KordophoneRepository::send_message(
&self.proxy(),
&conversation_id,
&text,
attachment_guids,
)?;
println!("Outgoing message ID: {}", outgoing_message_id);
Ok(())
}
async fn wait_for_signals(&mut self) -> Result<()> {
use dbus::Message;
mod dbus_signals {
pub use super::dbus_interface::NetBuzzertKordophoneRepositoryConversationsUpdated as ConversationsUpdated;
}
let _id = self.proxy().match_signal(
|_: dbus_signals::ConversationsUpdated, _: &Connection, _: &Message| {
println!("Signal: Conversations updated");
true
},
);
println!("Waiting for signals...");
loop {
self.conn.process(std::time::Duration::from_millis(1000))?;
}
}
async fn config(&mut self, cmd: ConfigCommands) -> Result<()> {
match cmd {
ConfigCommands::Print => self.print_settings().await,
ConfigCommands::SetServerUrl { url } => self.set_server_url(url).await,
ConfigCommands::SetUsername { username } => self.set_username(username).await,
}
}
async fn delete_all_conversations(&mut self) -> Result<()> {
KordophoneRepository::delete_all_conversations(&self.proxy())
.map_err(|e| anyhow::anyhow!("Failed to delete all conversations: {}", e))
}
async fn download_attachment(&mut self, attachment_id: String) -> Result<()> {
// Trigger download.
KordophoneRepository::download_attachment(&self.proxy(), &attachment_id, false)?;
// Get attachment info.
let attachment_info =
KordophoneRepository::get_attachment_info(&self.proxy(), &attachment_id)?;
let (path, _preview_path, downloaded, _preview_downloaded) = attachment_info;
if downloaded {
println!("Attachment already downloaded: {}", path);
return Ok(());
}
println!("Downloading attachment: {}", attachment_id);
// Attach to the signal that the attachment has been downloaded.
let download_path = path.clone();
let _id = self.proxy().match_signal(
move |_: dbus_interface::NetBuzzertKordophoneRepositoryAttachmentDownloadCompleted,
_: &Connection,
_: &dbus::message::Message| {
println!("Signal: Attachment downloaded: {}", download_path);
std::process::exit(0);
},
);
let _id = self.proxy().match_signal(
|h: dbus_interface::NetBuzzertKordophoneRepositoryAttachmentDownloadFailed,
_: &Connection,
_: &dbus::message::Message| {
println!("Signal: Attachment download failed: {}", h.attachment_id);
std::process::exit(1);
},
);
// Wait for the signal.
loop {
self.conn.process(std::time::Duration::from_millis(1000))?;
}
}
async fn upload_attachment(&mut self, path: String) -> Result<()> {
let upload_guid = KordophoneRepository::upload_attachment(&self.proxy(), &path)?;
println!("Upload GUID: {}", upload_guid);
Ok(())
}
async fn mark_conversation_as_read(&mut self, conversation_id: String) -> Result<()> {
KordophoneRepository::mark_conversation_as_read(&self.proxy(), &conversation_id)
.map_err(|e| anyhow::anyhow!("Failed to mark conversation as read: {}", e))
}
}

View File

@@ -0,0 +1,224 @@
use anyhow::Result;
use async_trait::async_trait;
use clap::Subcommand;
// Platform-specific modules
#[cfg(target_os = "linux")]
mod dbus;
#[cfg(target_os = "macos")]
mod xpc;
#[cfg_attr(target_os = "macos", async_trait(?Send))]
#[cfg_attr(not(target_os = "macos"), async_trait)]
pub trait DaemonInterface {
async fn print_version(&mut self) -> Result<()>;
async fn print_conversations(&mut self) -> Result<()>;
async fn sync_conversations(&mut self, conversation_id: Option<String>) -> Result<()>;
async fn sync_conversations_list(&mut self) -> Result<()>;
async fn print_messages(
&mut self,
conversation_id: String,
last_message_id: Option<String>,
) -> Result<()>;
async fn enqueue_outgoing_message(
&mut self,
conversation_id: String,
text: String,
) -> Result<()>;
async fn wait_for_signals(&mut self) -> Result<()>;
async fn config(&mut self, cmd: ConfigCommands) -> Result<()>;
async fn delete_all_conversations(&mut self) -> Result<()>;
async fn download_attachment(&mut self, attachment_id: String) -> Result<()>;
async fn upload_attachment(&mut self, path: String) -> Result<()>;
async fn mark_conversation_as_read(&mut self, conversation_id: String) -> Result<()>;
}
struct StubDaemonInterface;
impl StubDaemonInterface {
fn new() -> Result<Self> {
Ok(Self)
}
}
#[cfg_attr(target_os = "macos", async_trait(?Send))]
#[cfg_attr(not(target_os = "macos"), async_trait)]
impl DaemonInterface for StubDaemonInterface {
async fn print_version(&mut self) -> Result<()> {
Err(anyhow::anyhow!(
"Daemon interface not implemented on this platform"
))
}
async fn print_conversations(&mut self) -> Result<()> {
Err(anyhow::anyhow!(
"Daemon interface not implemented on this platform"
))
}
async fn sync_conversations(&mut self, _conversation_id: Option<String>) -> Result<()> {
Err(anyhow::anyhow!(
"Daemon interface not implemented on this platform"
))
}
async fn sync_conversations_list(&mut self) -> Result<()> {
Err(anyhow::anyhow!(
"Daemon interface not implemented on this platform"
))
}
async fn print_messages(
&mut self,
_conversation_id: String,
_last_message_id: Option<String>,
) -> Result<()> {
Err(anyhow::anyhow!(
"Daemon interface not implemented on this platform"
))
}
async fn enqueue_outgoing_message(
&mut self,
_conversation_id: String,
_text: String,
) -> Result<()> {
Err(anyhow::anyhow!(
"Daemon interface not implemented on this platform"
))
}
async fn wait_for_signals(&mut self) -> Result<()> {
Err(anyhow::anyhow!(
"Daemon interface not implemented on this platform"
))
}
async fn config(&mut self, _cmd: ConfigCommands) -> Result<()> {
Err(anyhow::anyhow!(
"Daemon interface not implemented on this platform"
))
}
async fn delete_all_conversations(&mut self) -> Result<()> {
Err(anyhow::anyhow!(
"Daemon interface not implemented on this platform"
))
}
async fn download_attachment(&mut self, _attachment_id: String) -> Result<()> {
Err(anyhow::anyhow!(
"Daemon interface not implemented on this platform"
))
}
async fn upload_attachment(&mut self, _path: String) -> Result<()> {
Err(anyhow::anyhow!(
"Daemon interface not implemented on this platform"
))
}
async fn mark_conversation_as_read(&mut self, _conversation_id: String) -> Result<()> {
Err(anyhow::anyhow!(
"Daemon interface not implemented on this platform"
))
}
}
pub fn new_daemon_interface() -> Result<Box<dyn DaemonInterface>> {
#[cfg(target_os = "linux")]
{
Ok(Box::new(dbus::DBusDaemonInterface::new()?))
}
#[cfg(target_os = "macos")]
{
Ok(Box::new(xpc::XpcDaemonInterface::new()?))
}
#[cfg(not(any(target_os = "linux", target_os = "macos")))]
{
Ok(Box::new(StubDaemonInterface::new()?))
}
}
#[derive(Subcommand)]
pub enum Commands {
/// Gets all known conversations.
Conversations,
/// Runs a full sync operation for a conversation and its messages.
Sync { conversation_id: Option<String> },
/// Runs a sync operation for the conversation list.
SyncList,
/// Prints the server Kordophone version.
Version,
/// Configuration options
Config {
#[command(subcommand)]
command: ConfigCommands,
},
/// Waits for signals from the daemon.
Signals,
/// Prints the messages for a conversation.
Messages {
conversation_id: String,
last_message_id: Option<String>,
},
/// Deletes all conversations.
DeleteAllConversations,
/// Enqueues an outgoing message to be sent to a conversation.
SendMessage {
conversation_id: String,
text: String,
},
/// Downloads an attachment from the server to the attachment store. Returns the path to the attachment.
DownloadAttachment { attachment_id: String },
/// Uploads an attachment to the server, returns upload guid.
UploadAttachment { path: String },
/// Marks a conversation as read.
MarkConversationAsRead { conversation_id: String },
}
#[derive(Subcommand)]
pub enum ConfigCommands {
/// Prints the current settings.
Print,
/// Sets the server URL.
SetServerUrl { url: String },
/// Sets the username.
SetUsername { username: String },
}
impl Commands {
pub async fn run(cmd: Commands) -> Result<()> {
let mut client = new_daemon_interface()?;
match cmd {
Commands::Version => client.print_version().await,
Commands::Conversations => client.print_conversations().await,
Commands::Sync { conversation_id } => client.sync_conversations(conversation_id).await,
Commands::SyncList => client.sync_conversations_list().await,
Commands::Config { command } => client.config(command).await,
Commands::Signals => client.wait_for_signals().await,
Commands::Messages {
conversation_id,
last_message_id,
} => {
client
.print_messages(conversation_id, last_message_id)
.await
}
Commands::DeleteAllConversations => client.delete_all_conversations().await,
Commands::SendMessage {
conversation_id,
text,
} => client.enqueue_outgoing_message(conversation_id, text).await,
Commands::UploadAttachment { path } => client.upload_attachment(path).await,
Commands::DownloadAttachment { attachment_id } => {
client.download_attachment(attachment_id).await
}
Commands::MarkConversationAsRead { conversation_id } => {
client.mark_conversation_as_read(conversation_id).await
}
}
}
}

View File

@@ -0,0 +1,604 @@
use super::{ConfigCommands, DaemonInterface};
use anyhow::Result;
use async_trait::async_trait;
use futures_util::StreamExt;
use std::collections::HashMap;
use std::ffi::{CStr, CString};
use std::ops::Deref;
use std::{pin::Pin, task::Poll};
use xpc_connection::Message;
use futures::{
channel::mpsc::{unbounded as unbounded_channel, UnboundedReceiver, UnboundedSender},
Stream,
};
const SERVICE_NAME: &str = "net.buzzert.kordophonecd\0";
const GET_VERSION_METHOD: &str = "GetVersion";
const GET_CONVERSATIONS_METHOD: &str = "GetConversations";
// We can't use XPCClient from xpc-connection because of some strange decisions with which flags
// are passed to xpc_connection_create_mach_service.
struct XPCClient {
connection: xpc_connection_sys::xpc_connection_t,
receiver: UnboundedReceiver<Message>,
sender: UnboundedSender<Message>,
event_handler_is_running: bool,
}
impl XPCClient {
pub fn connect(name: impl AsRef<CStr>) -> Self {
use block::ConcreteBlock;
use xpc_connection::xpc_object_to_message;
use xpc_connection_sys::xpc_connection_resume;
use xpc_connection_sys::xpc_connection_set_event_handler;
let name = name.as_ref();
let connection = unsafe {
xpc_connection_sys::xpc_connection_create_mach_service(
name.as_ptr(),
std::ptr::null_mut(),
0,
)
};
let (sender, receiver) = unbounded_channel();
let sender_clone = sender.clone();
let block = ConcreteBlock::new(move |event| {
let message = xpc_object_to_message(event);
sender_clone.unbounded_send(message).ok()
});
let block = block.copy();
unsafe {
xpc_connection_set_event_handler(connection, block.deref() as *const _ as *mut _);
xpc_connection_resume(connection);
}
Self {
connection,
receiver,
sender,
event_handler_is_running: true,
}
}
pub fn send_message(&self, message: Message) {
use xpc_connection::message_to_xpc_object;
use xpc_connection_sys::xpc_connection_send_message;
use xpc_connection_sys::xpc_release;
let xpc_object = message_to_xpc_object(message);
unsafe {
xpc_connection_send_message(self.connection, xpc_object);
xpc_release(xpc_object);
}
}
pub fn send_message_with_reply(&self, message: Message) -> Message {
use xpc_connection::message_to_xpc_object;
use xpc_connection::xpc_object_to_message;
use xpc_connection_sys::{xpc_connection_send_message_with_reply_sync, xpc_release};
unsafe {
let xobj = message_to_xpc_object(message);
let reply = xpc_connection_send_message_with_reply_sync(self.connection, xobj);
xpc_release(xobj);
let msg = xpc_object_to_message(reply);
if !reply.is_null() {
xpc_release(reply);
}
msg
}
}
}
impl Drop for XPCClient {
fn drop(&mut self) {
use xpc_connection_sys::xpc_object_t;
use xpc_connection_sys::xpc_release;
unsafe { xpc_release(self.connection as xpc_object_t) };
}
}
impl Stream for XPCClient {
type Item = Message;
fn poll_next(
mut self: Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
) -> Poll<Option<Self::Item>> {
match Stream::poll_next(Pin::new(&mut self.receiver), cx) {
Poll::Ready(Some(Message::Error(xpc_connection::MessageError::ConnectionInvalid))) => {
self.event_handler_is_running = false;
Poll::Ready(None)
}
v => v,
}
}
}
unsafe impl Send for XPCClient {}
/// XPC-based implementation of DaemonInterface that sends method calls to the daemon over libxpc.
pub struct XpcDaemonInterface;
impl XpcDaemonInterface {
/// Create a new XpcDaemonInterface. No state is held.
pub fn new() -> Result<Self> {
Ok(Self)
}
fn build_service_name() -> Result<CString> {
let service_name = SERVICE_NAME.trim_end_matches('\0');
Ok(CString::new(service_name)?)
}
fn build_request(
method: &str,
args: Option<HashMap<CString, Message>>,
) -> HashMap<CString, Message> {
let mut request = HashMap::new();
request.insert(
CString::new("method").unwrap(),
Message::String(CString::new(method).unwrap()),
);
if let Some(arguments) = args {
request.insert(
CString::new("arguments").unwrap(),
Message::Dictionary(arguments),
);
}
request
}
async fn call_method(
&self,
client: &mut XPCClient,
method: &str,
args: Option<HashMap<CString, Message>>,
) -> anyhow::Result<HashMap<CString, Message>> {
let request = Self::build_request(method, args);
let reply = client.send_message_with_reply(Message::Dictionary(request));
match reply {
Message::Dictionary(map) => Ok(map),
other => Err(anyhow::anyhow!("Unexpected XPC reply: {:?}", other)),
}
}
fn key(k: &str) -> CString {
CString::new(k).unwrap()
}
fn get_string<'a>(map: &'a HashMap<CString, Message>, key: &str) -> Option<&'a CStr> {
map.get(&Self::key(key)).and_then(|v| match v {
Message::String(s) => Some(s.as_c_str()),
_ => None,
})
}
fn get_i64_from_str(map: &HashMap<CString, Message>, key: &str) -> Option<i64> {
Self::get_string(map, key).and_then(|s| s.to_string_lossy().parse().ok())
}
}
#[async_trait(?Send)]
impl DaemonInterface for XpcDaemonInterface {
async fn print_version(&mut self) -> Result<()> {
// Build service name and connect
let mach_port_name = Self::build_service_name()?;
let mut client = XPCClient::connect(&mach_port_name);
// Call generic method and parse reply
let map = self
.call_method(&mut client, GET_VERSION_METHOD, None)
.await?;
if let Some(ver) = Self::get_string(&map, "version") {
println!("Server version: {}", ver.to_string_lossy());
Ok(())
} else if let Some(ty) = Self::get_string(&map, "type") {
println!("XPC replied with type: {}", ty.to_string_lossy());
Ok(())
} else {
Err(anyhow::anyhow!(
"Unexpected XPC reply payload for GetVersion"
))
}
}
// Remaining methods unimplemented on macOS
async fn print_conversations(&mut self) -> Result<()> {
// Connect
let mach_port_name = Self::build_service_name()?;
let mut client = XPCClient::connect(&mach_port_name);
// Build arguments: limit=100, offset=0 (string-encoded for portability)
let mut args = HashMap::new();
args.insert(
CString::new("limit").unwrap(),
Message::String(CString::new("100").unwrap()),
);
args.insert(
CString::new("offset").unwrap(),
Message::String(CString::new("0").unwrap()),
);
// Call
let reply = self
.call_method(&mut client, GET_CONVERSATIONS_METHOD, Some(args))
.await?;
// Expect an array under "conversations"
match reply.get(&Self::key("conversations")) {
Some(Message::Array(items)) => {
println!("Number of conversations: {}", items.len());
for item in items {
if let Message::Dictionary(map) = item {
// Convert to PrintableConversation
let guid = Self::get_string(map, "guid")
.map(|s| s.to_string_lossy().into_owned())
.unwrap_or_default();
let display_name = Self::get_string(map, "display_name")
.map(|s| s.to_string_lossy().into_owned());
let last_preview = Self::get_string(map, "last_message_preview")
.map(|s| s.to_string_lossy().into_owned());
let unread_count =
Self::get_i64_from_str(map, "unread_count").unwrap_or(0) as i32;
let date_ts: i64 = Self::get_i64_from_str(map, "date").unwrap_or(0);
let participants: Vec<String> = match map.get(&Self::key("participants")) {
Some(Message::Array(arr)) => arr
.iter()
.filter_map(|m| match m {
Message::String(s) => Some(s.to_string_lossy().into_owned()),
_ => None,
})
.collect(),
_ => Vec::new(),
};
// Build PrintableConversation directly
let conv = crate::printers::PrintableConversation {
guid,
display_name,
last_message_preview: last_preview,
unread_count,
date: time::OffsetDateTime::from_unix_timestamp(date_ts)
.unwrap_or_else(|_| time::OffsetDateTime::UNIX_EPOCH),
participants,
};
println!("{}", crate::printers::ConversationPrinter::new(&conv));
}
}
Ok(())
}
Some(other) => Err(anyhow::anyhow!(
"Unexpected conversations payload: {:?}",
other
)),
None => Err(anyhow::anyhow!("Missing conversations in reply")),
}
}
async fn sync_conversations(&mut self, _conversation_id: Option<String>) -> Result<()> {
let mach_port_name = Self::build_service_name()?;
let mut client = XPCClient::connect(&mach_port_name);
if let Some(id) = _conversation_id {
let mut args = HashMap::new();
args.insert(
Self::key("conversation_id"),
Message::String(CString::new(id).unwrap()),
);
let _ = self
.call_method(&mut client, "SyncConversation", Some(args))
.await?;
return Ok(());
}
let _ = self
.call_method(&mut client, "SyncAllConversations", None)
.await?;
Ok(())
}
async fn sync_conversations_list(&mut self) -> Result<()> {
let mach_port_name = Self::build_service_name()?;
let mut client = XPCClient::connect(&mach_port_name);
let _ = self
.call_method(&mut client, "SyncConversationList", None)
.await?;
Ok(())
}
async fn print_messages(
&mut self,
_conversation_id: String,
_last_message_id: Option<String>,
) -> Result<()> {
let mach_port_name = Self::build_service_name()?;
let mut client = XPCClient::connect(&mach_port_name);
let mut args = HashMap::new();
args.insert(
Self::key("conversation_id"),
Message::String(CString::new(_conversation_id).unwrap()),
);
if let Some(last) = _last_message_id {
args.insert(
Self::key("last_message_id"),
Message::String(CString::new(last).unwrap()),
);
}
let reply = self
.call_method(&mut client, "GetMessages", Some(args))
.await?;
match reply.get(&Self::key("messages")) {
Some(Message::Array(items)) => {
println!("Number of messages: {}", items.len());
for item in items {
if let Message::Dictionary(map) = item {
let guid = Self::get_string(map, "id")
.map(|s| s.to_string_lossy().into_owned())
.unwrap_or_default();
let sender = Self::get_string(map, "sender")
.map(|s| s.to_string_lossy().into_owned())
.unwrap_or_default();
let text = Self::get_string(map, "text")
.map(|s| s.to_string_lossy().into_owned())
.unwrap_or_default();
let date_ts = Self::get_i64_from_str(map, "date").unwrap_or(0);
let msg = crate::printers::PrintableMessage {
guid,
date: time::OffsetDateTime::from_unix_timestamp(date_ts)
.unwrap_or_else(|_| time::OffsetDateTime::UNIX_EPOCH),
sender,
text,
file_transfer_guids: vec![],
attachment_metadata: None,
};
println!("{}", crate::printers::MessagePrinter::new(&msg));
}
}
Ok(())
}
_ => Err(anyhow::anyhow!("Unexpected messages payload")),
}
}
async fn enqueue_outgoing_message(
&mut self,
_conversation_id: String,
_text: String,
) -> Result<()> {
let mach_port_name = Self::build_service_name()?;
let mut client = XPCClient::connect(&mach_port_name);
let mut args = HashMap::new();
args.insert(
Self::key("conversation_id"),
Message::String(CString::new(_conversation_id).unwrap()),
);
args.insert(
Self::key("text"),
Message::String(CString::new(_text).unwrap()),
);
let reply = self
.call_method(&mut client, "SendMessage", Some(args))
.await?;
if let Some(uuid) = Self::get_string(&reply, "uuid") {
println!("Outgoing message ID: {}", uuid.to_string_lossy());
}
Ok(())
}
async fn wait_for_signals(&mut self) -> Result<()> {
let mach_port_name = Self::build_service_name()?;
let mut client = XPCClient::connect(&mach_port_name);
// Subscribe to begin receiving signals on this connection
eprintln!("[kpcli] Sending SubscribeSignals");
client.send_message(Message::Dictionary(Self::build_request(
"SubscribeSignals",
None,
)));
println!("Waiting for XPC signals...");
while let Some(msg) = client.next().await {
match msg {
Message::Dictionary(map) => {
eprintln!("[kpcli] Received signal dictionary");
let name_key = Self::key("name");
let args_key = Self::key("arguments");
let name = match map.get(&name_key) {
Some(Message::String(s)) => s.to_string_lossy().into_owned(),
_ => continue,
};
match name.as_str() {
"ConversationsUpdated" => {
println!("Signal: Conversations updated");
}
"MessagesUpdated" => {
if let Some(Message::Dictionary(args)) = map.get(&args_key) {
if let Some(Message::String(cid)) =
args.get(&Self::key("conversation_id"))
{
println!(
"Signal: Messages updated for conversation {}",
cid.to_string_lossy()
);
}
}
}
"UpdateStreamReconnected" => {
println!("Signal: Update stream reconnected");
}
"AttachmentDownloadCompleted" => {
if let Some(Message::Dictionary(args)) = map.get(&args_key) {
if let Some(Message::String(aid)) =
args.get(&Self::key("attachment_id"))
{
println!(
"Signal: Attachment downloaded: {}",
aid.to_string_lossy()
);
}
}
}
"AttachmentDownloadFailed" => {
if let Some(Message::Dictionary(args)) = map.get(&args_key) {
if let Some(Message::String(aid)) =
args.get(&Self::key("attachment_id"))
{
eprintln!(
"Signal: Attachment download failed: {}",
aid.to_string_lossy()
);
}
}
}
"AttachmentUploadCompleted" => {
if let Some(Message::Dictionary(args)) = map.get(&args_key) {
let upload = args
.get(&Self::key("upload_guid"))
.and_then(|v| match v {
Message::String(s) => {
Some(s.to_string_lossy().into_owned())
}
_ => None,
})
.unwrap_or_default();
let attachment = args
.get(&Self::key("attachment_guid"))
.and_then(|v| match v {
Message::String(s) => {
Some(s.to_string_lossy().into_owned())
}
_ => None,
})
.unwrap_or_default();
println!(
"Signal: Attachment uploaded: upload={}, attachment={}",
upload, attachment
);
}
}
"ConfigChanged" => {
println!("Signal: Config changed");
}
_ => {}
}
}
Message::Error(xpc_connection::MessageError::ConnectionInvalid) => {
eprintln!("[kpcli] XPC connection invalid");
break;
}
other => {
eprintln!("[kpcli] Unexpected XPC message: {:?}", other);
}
}
}
Ok(())
}
async fn config(&mut self, _cmd: ConfigCommands) -> Result<()> {
let mach_port_name = Self::build_service_name()?;
let mut client = XPCClient::connect(&mach_port_name);
match _cmd {
ConfigCommands::Print => {
let reply = self
.call_method(&mut client, "GetAllSettings", None)
.await?;
let server_url = Self::get_string(&reply, "server_url")
.map(|s| s.to_string_lossy().into_owned())
.unwrap_or_default();
let username = Self::get_string(&reply, "username")
.map(|s| s.to_string_lossy().into_owned())
.unwrap_or_default();
let table =
prettytable::table!([b->"Server URL", &server_url], [b->"Username", &username]);
table.printstd();
Ok(())
}
ConfigCommands::SetServerUrl { url } => {
let mut args = HashMap::new();
args.insert(
Self::key("server_url"),
Message::String(CString::new(url).unwrap()),
);
let _ = self
.call_method(&mut client, "UpdateSettings", Some(args))
.await?;
Ok(())
}
ConfigCommands::SetUsername { username } => {
let mut args = HashMap::new();
args.insert(
Self::key("username"),
Message::String(CString::new(username).unwrap()),
);
let _ = self
.call_method(&mut client, "UpdateSettings", Some(args))
.await?;
Ok(())
}
}
}
async fn delete_all_conversations(&mut self) -> Result<()> {
let mach_port_name = Self::build_service_name()?;
let mut client = XPCClient::connect(&mach_port_name);
let _ = self
.call_method(&mut client, "DeleteAllConversations", None)
.await?;
Ok(())
}
async fn download_attachment(&mut self, _attachment_id: String) -> Result<()> {
let mach_port_name = Self::build_service_name()?;
let mut client = XPCClient::connect(&mach_port_name);
let mut args = HashMap::new();
args.insert(
Self::key("attachment_id"),
Message::String(CString::new(_attachment_id).unwrap()),
);
args.insert(
Self::key("preview"),
Message::String(CString::new("false").unwrap()),
);
let _ = self
.call_method(&mut client, "DownloadAttachment", Some(args))
.await?;
Ok(())
}
async fn upload_attachment(&mut self, _path: String) -> Result<()> {
let mach_port_name = Self::build_service_name()?;
let mut client = XPCClient::connect(&mach_port_name);
let mut args = HashMap::new();
args.insert(
Self::key("path"),
Message::String(CString::new(_path).unwrap()),
);
let reply = self
.call_method(&mut client, "UploadAttachment", Some(args))
.await?;
if let Some(guid) = Self::get_string(&reply, "upload_guid") {
println!("Upload GUID: {}", guid.to_string_lossy());
}
Ok(())
}
async fn mark_conversation_as_read(&mut self, _conversation_id: String) -> Result<()> {
let mach_port_name = Self::build_service_name()?;
let mut client = XPCClient::connect(&mach_port_name);
let mut args = HashMap::new();
args.insert(
Self::key("conversation_id"),
Message::String(CString::new(_conversation_id).unwrap()),
);
let _ = self
.call_method(&mut client, "MarkConversationAsRead", Some(args))
.await?;
Ok(())
}
}

239
core/kpcli/src/db/mod.rs Normal file
View File

@@ -0,0 +1,239 @@
use anyhow::Result;
use clap::Subcommand;
use kordophone::APIInterface;
use std::{env, path::PathBuf};
use crate::{
client,
printers::{ConversationPrinter, MessagePrinter},
};
use kordophone_db::database::{Database, DatabaseAccess};
#[derive(Subcommand)]
pub enum Commands {
/// For dealing with the table of cached conversations.
Conversations {
#[clap(subcommand)]
command: ConversationCommands,
},
/// For dealing with the table of cached messages.
Messages {
#[clap(subcommand)]
command: MessageCommands,
},
/// For managing settings in the database.
Settings {
#[clap(subcommand)]
command: SettingsCommands,
},
}
#[derive(Subcommand)]
pub enum ConversationCommands {
/// Lists all conversations currently in the database.
List,
/// Syncs with an API client.
Sync,
}
#[derive(Subcommand)]
pub enum MessageCommands {
/// Prints all messages in a conversation.
List { conversation_id: String },
}
#[derive(Subcommand)]
pub enum SettingsCommands {
/// Lists all settings or gets a specific setting.
Get {
/// The key to get. If not provided, all settings will be listed.
key: Option<String>,
},
/// Sets a setting value.
Put {
/// The key to set.
key: String,
/// The value to set.
value: String,
},
/// Deletes a setting.
Delete {
/// The key to delete.
key: String,
},
}
impl Commands {
pub async fn run(cmd: Commands) -> Result<()> {
let mut db = DbClient::new()?;
match cmd {
Commands::Conversations { command: cmd } => match cmd {
ConversationCommands::List => db.print_conversations().await,
ConversationCommands::Sync => db.sync_with_client().await,
},
Commands::Messages { command: cmd } => match cmd {
MessageCommands::List { conversation_id } => {
db.print_messages(&conversation_id).await
}
},
Commands::Settings { command: cmd } => match cmd {
SettingsCommands::Get { key } => db.get_setting(key).await,
SettingsCommands::Put { key, value } => db.put_setting(key, value).await,
SettingsCommands::Delete { key } => db.delete_setting(key).await,
},
}
}
}
struct DbClient {
database: Database,
}
impl DbClient {
fn database_path() -> PathBuf {
env::var("KORDOPHONE_DB_PATH")
.unwrap_or_else(|_| {
let temp_dir = env::temp_dir();
temp_dir.join("kpcli_chat.db").to_str().unwrap().to_string()
})
.into()
}
pub fn new() -> Result<Self> {
let path = Self::database_path();
let path_str: &str = path.as_path().to_str().unwrap();
println!("kpcli: Using db at {}", path_str);
let db = Database::new(path_str)?;
Ok(Self { database: db })
}
pub async fn print_conversations(&mut self) -> Result<()> {
let all_conversations = self
.database
.with_repository(|repository| repository.all_conversations(i32::MAX, 0))
.await?;
println!("{} Conversations: ", all_conversations.len());
for conversation in all_conversations {
println!("{}", ConversationPrinter::new(&conversation.into()));
}
Ok(())
}
pub async fn print_messages(&mut self, conversation_id: &str) -> Result<()> {
let messages = self
.database
.with_repository(|repository| repository.get_messages_for_conversation(conversation_id))
.await?;
for message in messages {
println!("{}", MessagePrinter::new(&message.into()));
}
Ok(())
}
pub async fn sync_with_client(&mut self) -> Result<()> {
let mut client = client::make_api_client_from_env();
let fetched_conversations = client.get_conversations().await?;
let db_conversations: Vec<kordophone_db::models::Conversation> = fetched_conversations
.into_iter()
.map(kordophone_db::models::Conversation::from)
.collect();
// Process each conversation
for conversation in db_conversations {
let conversation_id = conversation.guid.clone();
// Insert the conversation
self.database
.with_repository(|repository| repository.insert_conversation(conversation))
.await?;
// Fetch and sync messages for this conversation
let messages = client
.get_messages(&conversation_id, None, None, None)
.await?;
let db_messages: Vec<kordophone_db::models::Message> = messages
.into_iter()
.map(kordophone_db::models::Message::from)
.collect();
// Insert each message
self.database
.with_repository(|repository| -> Result<()> {
for message in db_messages {
repository.insert_message(&conversation_id, message)?;
}
Ok(())
})
.await?;
}
Ok(())
}
pub async fn get_setting(&mut self, key: Option<String>) -> Result<()> {
self.database
.with_settings(|settings| {
match key {
Some(key) => {
// Get a specific setting
let value: Option<String> = settings.get(&key)?;
match value {
Some(v) => println!("{} = {}", key, v),
None => println!("Setting '{}' not found", key),
}
}
None => {
// List all settings
let keys = settings.list_keys()?;
if keys.is_empty() {
println!("No settings found");
} else {
println!("Settings:");
for key in keys {
let value: Option<String> = settings.get(&key)?;
match value {
Some(v) => println!(" {} = {}", key, v),
None => println!(" {} = <error reading value>", key),
}
}
}
}
}
Ok(())
})
.await
}
pub async fn put_setting(&mut self, key: String, value: String) -> Result<()> {
self.database
.with_settings(|settings| {
settings.put(&key, &value)?;
Ok(())
})
.await
}
pub async fn delete_setting(&mut self, key: String) -> Result<()> {
self.database
.with_settings(|settings| {
let count = settings.del(&key)?;
if count == 0 {
println!("Setting '{}' not found", key);
}
Ok(())
})
.await
}
}

69
core/kpcli/src/main.rs Normal file
View File

@@ -0,0 +1,69 @@
mod client;
mod daemon;
mod db;
mod printers;
use anyhow::Result;
use clap::{Parser, Subcommand};
use log::LevelFilter;
/// A command line interface for the Kordophone library and daemon
#[derive(Parser)]
#[command(name = "kpcli")]
struct Cli {
#[command(subcommand)]
command: Commands,
}
#[derive(Subcommand)]
enum Commands {
/// Commands for api client operations
Client {
#[command(subcommand)]
command: client::Commands,
},
/// Commands for the cache database
Db {
#[command(subcommand)]
command: db::Commands,
},
/// Commands for interacting with the daemon
Daemon {
#[command(subcommand)]
command: daemon::Commands,
},
}
async fn run_command(command: Commands) -> Result<()> {
match command {
Commands::Client { command } => client::Commands::run(command).await,
Commands::Db { command } => db::Commands::run(command).await,
Commands::Daemon { command } => daemon::Commands::run(command).await,
}
}
fn initialize_logging() {
// Weird: is this the best way to do this?
let log_level = std::env::var("RUST_LOG")
.map(|s| s.parse::<LevelFilter>().unwrap_or(LevelFilter::Info))
.unwrap_or(LevelFilter::Info);
env_logger::Builder::from_default_env()
.format_timestamp_secs()
.filter_level(log_level)
.init();
}
#[tokio::main]
async fn main() {
initialize_logging();
let cli = Cli::parse();
run_command(cli.command)
.await
.map_err(|e| println!("Error: {}", e))
.err();
}

273
core/kpcli/src/printers.rs Normal file
View File

@@ -0,0 +1,273 @@
use kordophone::model::message::AttachmentMetadata;
use pretty::RcDoc;
use std::collections::HashMap;
use std::fmt::Display;
use time::OffsetDateTime;
#[cfg(target_os = "linux")]
use dbus::arg::{self, RefArg};
pub struct PrintableConversation {
pub guid: String,
pub date: OffsetDateTime,
pub unread_count: i32,
pub last_message_preview: Option<String>,
pub participants: Vec<String>,
pub display_name: Option<String>,
}
impl From<kordophone::model::Conversation> for PrintableConversation {
fn from(value: kordophone::model::Conversation) -> Self {
Self {
guid: value.guid,
date: value.date,
unread_count: value.unread_count,
last_message_preview: value.last_message_preview,
participants: value.participant_display_names,
display_name: value.display_name,
}
}
}
impl From<kordophone_db::models::Conversation> for PrintableConversation {
fn from(value: kordophone_db::models::Conversation) -> Self {
Self {
guid: value.guid,
date: OffsetDateTime::from_unix_timestamp(value.date.and_utc().timestamp()).unwrap(),
unread_count: value.unread_count.into(),
last_message_preview: value.last_message_preview,
participants: value
.participants
.into_iter()
.map(|p| p.display_name())
.collect(),
display_name: value.display_name,
}
}
}
#[cfg(target_os = "linux")]
impl From<dbus::arg::PropMap> for PrintableConversation {
fn from(value: dbus::arg::PropMap) -> Self {
Self {
guid: value.get("guid").unwrap().as_str().unwrap().to_string(),
date: OffsetDateTime::from_unix_timestamp(value.get("date").unwrap().as_i64().unwrap())
.unwrap(),
unread_count: value
.get("unread_count")
.unwrap()
.as_i64()
.unwrap()
.try_into()
.unwrap(),
last_message_preview: value
.get("last_message_preview")
.unwrap()
.as_str()
.map(|s| s.to_string()),
participants: value
.get("participants")
.unwrap()
.0
.as_iter()
.unwrap()
.map(|s| s.as_str().unwrap().to_string())
.collect(),
display_name: value
.get("display_name")
.unwrap()
.as_str()
.map(|s| s.to_string()),
}
}
}
pub struct PrintableMessage {
pub guid: String,
pub date: OffsetDateTime,
pub sender: String,
pub text: String,
pub file_transfer_guids: Vec<String>,
pub attachment_metadata: Option<HashMap<String, AttachmentMetadata>>,
}
impl From<kordophone::model::Message> for PrintableMessage {
fn from(value: kordophone::model::Message) -> Self {
Self {
guid: value.guid,
date: value.date,
sender: value.sender.unwrap_or("<me>".to_string()),
text: value.text,
file_transfer_guids: value.file_transfer_guids,
attachment_metadata: value.attachment_metadata,
}
}
}
impl From<kordophone_db::models::Message> for PrintableMessage {
fn from(value: kordophone_db::models::Message) -> Self {
Self {
guid: value.id,
date: OffsetDateTime::from_unix_timestamp(value.date.and_utc().timestamp()).unwrap(),
sender: value.sender.display_name(),
text: value.text,
file_transfer_guids: value.file_transfer_guids,
attachment_metadata: value.attachment_metadata,
}
}
}
#[cfg(target_os = "linux")]
impl From<dbus::arg::PropMap> for PrintableMessage {
fn from(value: dbus::arg::PropMap) -> Self {
// Parse file transfer GUIDs from JSON if present
let file_transfer_guids = value
.get("file_transfer_guids")
.and_then(|v| v.as_str())
.and_then(|json_str| serde_json::from_str(json_str).ok())
.unwrap_or_default();
// Parse attachment metadata from JSON if present
let attachment_metadata = value
.get("attachment_metadata")
.and_then(|v| v.as_str())
.and_then(|json_str| serde_json::from_str(json_str).ok());
Self {
guid: value.get("id").unwrap().as_str().unwrap().to_string(),
date: OffsetDateTime::from_unix_timestamp(value.get("date").unwrap().as_i64().unwrap())
.unwrap(),
sender: value.get("sender").unwrap().as_str().unwrap().to_string(),
text: value.get("text").unwrap().as_str().unwrap().to_string(),
file_transfer_guids,
attachment_metadata,
}
}
}
pub struct ConversationPrinter<'a> {
doc: RcDoc<'a, PrintableConversation>,
}
impl<'a> ConversationPrinter<'a> {
pub fn new(conversation: &'a PrintableConversation) -> Self {
let preview = conversation
.last_message_preview
.as_deref()
.unwrap_or("<null>")
.replace('\n', " ");
let doc = RcDoc::text(format!("<Conversation: \"{}\"", &conversation.guid))
.append(
RcDoc::line()
.append("Display Name: ")
.append(conversation.display_name.as_deref().unwrap_or("<null>"))
.append(RcDoc::line())
.append("Date: ")
.append(conversation.date.to_string())
.append(RcDoc::line())
.append("Unread Count: ")
.append(conversation.unread_count.to_string())
.append(RcDoc::line())
.append("Participants: ")
.append("[")
.append(
RcDoc::line()
.append(
conversation
.participants
.iter()
.map(|name| RcDoc::text(name).append(",").append(RcDoc::line()))
.fold(RcDoc::nil(), |acc, x| acc.append(x)),
)
.nest(4),
)
.append("]")
.append(RcDoc::line())
.append("Last Message Preview: ")
.append(preview)
.nest(4),
)
.append(RcDoc::line())
.append(">");
ConversationPrinter { doc }
}
}
impl Display for ConversationPrinter<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.doc.render_fmt(180, f)
}
}
pub struct MessagePrinter<'a> {
doc: RcDoc<'a, PrintableMessage>,
}
impl Display for MessagePrinter<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.doc.render_fmt(180, f)
}
}
impl<'a> MessagePrinter<'a> {
pub fn new(message: &'a PrintableMessage) -> Self {
let mut doc = RcDoc::text(format!("<Message: \"{}\"", &message.guid)).append(
RcDoc::line()
.append("Date: ")
.append(message.date.to_string())
.append(RcDoc::line())
.append("Sender: ")
.append(&message.sender)
.append(RcDoc::line())
.append("Body: ")
.append(&message.text)
.nest(4),
);
// Add file transfer GUIDs and attachment metadata if present
if !message.file_transfer_guids.is_empty() {
doc = doc.append(RcDoc::line()).append(
RcDoc::line()
.append("Attachments:")
.append(
message
.file_transfer_guids
.iter()
.map(|guid| {
let mut attachment_doc = RcDoc::line().append("- ").append(guid);
// Add metadata if available for this GUID
if let Some(ref metadata) = message.attachment_metadata {
if let Some(attachment_meta) = metadata.get(guid) {
if let Some(ref attribution) =
attachment_meta.attribution_info
{
if let (Some(width), Some(height)) =
(attribution.width, attribution.height)
{
attachment_doc = attachment_doc
.append(RcDoc::line())
.append(" Dimensions: ")
.append(width.to_string())
.append(" × ")
.append(height.to_string());
}
}
}
}
attachment_doc
})
.fold(RcDoc::nil(), |acc, x| acc.append(x)),
)
.nest(4),
);
}
doc = doc.append(RcDoc::line()).append(">");
MessagePrinter { doc }
}
}

12
core/utilities/Cargo.toml Normal file
View File

@@ -0,0 +1,12 @@
[package]
name = "kordophone-utilities"
version = "0.1.0"
edition = "2024"
[dependencies]
env_logger = "0.11.5"
futures-util = "0.3.31"
hyper = { version = "0.14" }
kordophone = { path = "../kordophone" }
log = { version = "0.4.21", features = [] }
tokio = { version = "1.37.0", features = ["full"] }

View File

@@ -0,0 +1,94 @@
use std::env;
use std::process;
use kordophone::{
api::{HTTPAPIClient, InMemoryAuthenticationStore, EventSocket},
model::{ConversationID, event::EventData},
APIInterface,
};
use kordophone::api::http_client::Credentials;
use kordophone::api::AuthenticationStore;
use futures_util::StreamExt;
use hyper::Uri;
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
env_logger::init();
let args: Vec<String> = env::args().collect();
if args.len() < 2 {
eprintln!("Usage: {} <conversation_id1> [conversation_id2] [conversation_id3] ...", args[0]);
eprintln!("Environment variables required:");
eprintln!(" KORDOPHONE_API_URL - Server URL");
eprintln!(" KORDOPHONE_USERNAME - Username for authentication");
eprintln!(" KORDOPHONE_PASSWORD - Password for authentication");
process::exit(1);
}
// Read environment variables
let server_url: Uri = env::var("KORDOPHONE_API_URL")
.map_err(|_| "KORDOPHONE_API_URL environment variable not set")?
.parse()?;
let username = env::var("KORDOPHONE_USERNAME")
.map_err(|_| "KORDOPHONE_USERNAME environment variable not set")?;
let password = env::var("KORDOPHONE_PASSWORD")
.map_err(|_| "KORDOPHONE_PASSWORD environment variable not set")?;
let credentials = Credentials { username, password };
// Collect all conversation IDs from command line arguments
let target_conversation_ids: Vec<ConversationID> = args[1..].iter()
.map(|id| id.clone())
.collect();
println!("Monitoring {} conversation(s) for updates: {:?}",
target_conversation_ids.len(), target_conversation_ids);
let auth_store = InMemoryAuthenticationStore::new(Some(credentials.clone()));
let mut client = HTTPAPIClient::new(server_url, auth_store);
let _ = client.authenticate(credentials).await?;
// Open event socket
let event_socket = client.open_event_socket(None).await?;
let (mut stream, _sink) = event_socket.events().await;
println!("Connected to event stream, waiting for updates...");
// Process events
while let Some(event_result) = stream.next().await {
match event_result {
Ok(socket_event) => {
match socket_event {
kordophone::api::event_socket::SocketEvent::Update(event) => {
match event.data {
EventData::MessageReceived(conversation, _message) => {
if target_conversation_ids.contains(&conversation.guid) {
println!("Message update detected for conversation {}, marking as read...", conversation.guid);
match client.mark_conversation_as_read(&conversation.guid).await {
Ok(_) => println!("Successfully marked conversation {} as read", conversation.guid),
Err(e) => eprintln!("Failed to mark conversation {} as read: {:?}", conversation.guid, e),
}
}
},
_ => {}
}
},
kordophone::api::event_socket::SocketEvent::Pong => {
// Ignore pong messages
}
}
},
Err(e) => {
eprintln!("Error receiving event: {:?}", e);
break;
}
}
}
println!("Event stream ended");
Ok(())
}