Compare commits
36 Commits
codex/chat
...
ios-pull-t
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9572d0320f | ||
| bca408c971 | |||
| 2f265fd847 | |||
| 29e340fd08 | |||
| 6fbcaecbf8 | |||
| 519ebd15dd | |||
| 8051dd2c71 | |||
| 2313e560e8 | |||
| 94565298d8 | |||
| 7360604136 | |||
| ca6b5e0807 | |||
| 4b0cc3fbf7 | |||
| 2da73f802c | |||
| 4ad36d9bf6 | |||
| cf9832ca3b | |||
| 2c32ca66e2 | |||
| 015253c0af | |||
| 8d6c069a33 | |||
| d579b5bf75 | |||
| 01ee807991 | |||
| fd9ee455fb | |||
| 38da3cea72 | |||
| 11e6875de9 | |||
| 5a690b276f | |||
| d7967eaa75 | |||
| 2125c5dfa4 | |||
| 815655a73c | |||
| b85409d977 | |||
| d9f27213e7 | |||
| 3a6c40cb3c | |||
| 188c460826 | |||
| 90278020f5 | |||
| cafe4bb9ae | |||
| 57a6287b2b | |||
| ba6fc9c660 | |||
| 85f8d6b5f3 |
@@ -24,6 +24,10 @@ COPY server/package.json server/package-lock.json ./
|
||||
COPY server/scripts ./scripts
|
||||
COPY server/prisma ./prisma
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends openssh-client \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN npm ci --omit=dev --no-audit --no-fund
|
||||
|
||||
COPY --from=server-build /app/server/dist ./dist
|
||||
|
||||
1
dist/default.conf
vendored
1
dist/default.conf
vendored
@@ -1,6 +1,7 @@
|
||||
server {
|
||||
listen 80;
|
||||
server_name _;
|
||||
client_max_body_size 32m;
|
||||
root /usr/share/nginx/html;
|
||||
index index.html;
|
||||
|
||||
|
||||
@@ -13,8 +13,24 @@ services:
|
||||
ANTHROPIC_API_KEY: ${ANTHROPIC_API_KEY:-}
|
||||
XAI_API_KEY: ${XAI_API_KEY:-}
|
||||
EXA_API_KEY: ${EXA_API_KEY:-}
|
||||
CHAT_WEB_SEARCH_ENGINE: ${CHAT_WEB_SEARCH_ENGINE:-exa}
|
||||
SEARXNG_BASE_URL: ${SEARXNG_BASE_URL:-}
|
||||
CHAT_MAX_TOOL_ROUNDS: ${CHAT_MAX_TOOL_ROUNDS:-100}
|
||||
CHAT_CODEX_TOOL_ENABLED: ${CHAT_CODEX_TOOL_ENABLED:-false}
|
||||
CHAT_CODEX_REMOTE_HOST: ${CHAT_CODEX_REMOTE_HOST:-}
|
||||
CHAT_CODEX_REMOTE_USER: ${CHAT_CODEX_REMOTE_USER:-}
|
||||
CHAT_CODEX_REMOTE_PORT: ${CHAT_CODEX_REMOTE_PORT:-22}
|
||||
CHAT_CODEX_REMOTE_WORKDIR: ${CHAT_CODEX_REMOTE_WORKDIR:-/workspace/sybil-codex}
|
||||
# Prefer mounting a private key read-only and pointing CHAT_CODEX_SSH_KEY_PATH at it.
|
||||
CHAT_CODEX_SSH_KEY_PATH: ${CHAT_CODEX_SSH_KEY_PATH:-}
|
||||
CHAT_CODEX_SSH_PRIVATE_KEY_B64: ${CHAT_CODEX_SSH_PRIVATE_KEY_B64:-}
|
||||
CHAT_CODEX_EXEC_TIMEOUT_MS: ${CHAT_CODEX_EXEC_TIMEOUT_MS:-600000}
|
||||
CHAT_SHELL_TOOL_ENABLED: ${CHAT_SHELL_TOOL_ENABLED:-false}
|
||||
CHAT_SHELL_EXEC_TIMEOUT_MS: ${CHAT_SHELL_EXEC_TIMEOUT_MS:-120000}
|
||||
volumes:
|
||||
- sybil_data:/data
|
||||
# Example key mount for codex_exec:
|
||||
# - ./secrets/devbox_id_ed25519:/run/secrets/codex_ssh_key:ro
|
||||
expose:
|
||||
- "8787"
|
||||
restart: unless-stopped
|
||||
|
||||
134
docs/api/rest.md
134
docs/api/rest.md
@@ -10,6 +10,12 @@ Content type:
|
||||
- Requests with bodies use `application/json`.
|
||||
- Responses are JSON unless noted otherwise.
|
||||
|
||||
Chat upload limits:
|
||||
- Chat completion and direct message payloads support inline attachments up to a 32 MB request body.
|
||||
- Up to 8 attachments per message.
|
||||
- Image attachments: PNG or JPEG only, max 6 MB each.
|
||||
- Text attachments: up to 8 MB source size each; server accepts at most 200,000 characters of inlined text content per attachment.
|
||||
|
||||
## Health + Auth
|
||||
|
||||
### `GET /health`
|
||||
@@ -31,6 +37,7 @@ Content type:
|
||||
}
|
||||
}
|
||||
```
|
||||
- OpenAI model lists are filtered to models that are expected to work with the backend's Responses API implementation.
|
||||
|
||||
## Chats
|
||||
|
||||
@@ -38,9 +45,29 @@ Content type:
|
||||
- Response: `{ "chats": ChatSummary[] }`
|
||||
|
||||
### `POST /v1/chats`
|
||||
- Body: `{ "title"?: string }`
|
||||
- Body:
|
||||
```json
|
||||
{
|
||||
"title": "optional title",
|
||||
"provider": "optional openai|anthropic|xai",
|
||||
"model": "optional model id",
|
||||
"messages": [
|
||||
{
|
||||
"role": "system|user|assistant|tool",
|
||||
"content": "string",
|
||||
"name": "optional",
|
||||
"attachments": []
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
- Response: `{ "chat": ChatSummary }`
|
||||
|
||||
Behavior notes:
|
||||
- `provider` and `model` must be supplied together when present.
|
||||
- When `provider`/`model` are supplied, the new chat initializes `initiatedProvider`/`initiatedModel` and `lastUsedProvider`/`lastUsedModel`.
|
||||
- Optional `messages` are inserted as the initial transcript. Attachment metadata uses the same schema and limits as chat completion messages.
|
||||
|
||||
### `PATCH /v1/chats/:chatId`
|
||||
- Body: `{ "title": string }`
|
||||
- Response: `{ "chat": ChatSummary }`
|
||||
@@ -74,11 +101,34 @@ Behavior notes:
|
||||
"role": "system|user|assistant|tool",
|
||||
"content": "string",
|
||||
"name": "optional",
|
||||
"metadata": {}
|
||||
"metadata": {},
|
||||
"attachments": [
|
||||
{
|
||||
"kind": "image",
|
||||
"id": "attachment-id",
|
||||
"filename": "photo.jpg",
|
||||
"mimeType": "image/jpeg",
|
||||
"sizeBytes": 12345,
|
||||
"dataUrl": "data:image/jpeg;base64,..."
|
||||
},
|
||||
{
|
||||
"kind": "text",
|
||||
"id": "attachment-id",
|
||||
"filename": "notes.md",
|
||||
"mimeType": "text/markdown",
|
||||
"sizeBytes": 4567,
|
||||
"text": "# Notes\\n...",
|
||||
"truncated": false
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
- Response: `{ "message": Message }`
|
||||
|
||||
Notes:
|
||||
- `attachments` is optional and is merged into stored `message.metadata.attachments`.
|
||||
- Tool messages should not include attachments.
|
||||
|
||||
## Chat Completions (non-streaming)
|
||||
|
||||
### `POST /v1/chat-completions`
|
||||
@@ -89,7 +139,30 @@ Behavior notes:
|
||||
"provider": "openai|anthropic|xai",
|
||||
"model": "string",
|
||||
"messages": [
|
||||
{ "role": "system|user|assistant|tool", "content": "string", "name": "optional" }
|
||||
{
|
||||
"role": "system|user|assistant|tool",
|
||||
"content": "string",
|
||||
"name": "optional",
|
||||
"attachments": [
|
||||
{
|
||||
"kind": "image",
|
||||
"id": "attachment-id",
|
||||
"filename": "photo.jpg",
|
||||
"mimeType": "image/jpeg",
|
||||
"sizeBytes": 12345,
|
||||
"dataUrl": "data:image/jpeg;base64,..."
|
||||
},
|
||||
{
|
||||
"kind": "text",
|
||||
"id": "attachment-id",
|
||||
"filename": "notes.md",
|
||||
"mimeType": "text/markdown",
|
||||
"sizeBytes": 4567,
|
||||
"text": "# Notes\\n...",
|
||||
"truncated": false
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"temperature": 0.2,
|
||||
"maxTokens": 256
|
||||
@@ -112,11 +185,33 @@ Behavior notes:
|
||||
- For `chatId` calls, server stores only *new* non-assistant messages from provided history to avoid duplicates.
|
||||
- Server persists final assistant output and call metadata (`LlmCall`) in DB.
|
||||
- Server updates chat-level model metadata on each call: `lastUsedProvider`/`lastUsedModel`; first successful/failed call also initializes `initiatedProvider`/`initiatedModel` if unset.
|
||||
- For `openai` and `xai`, backend enables tool use during chat completion with an internal system instruction.
|
||||
- Available tool calls for chat: `web_search` and `fetch_url`.
|
||||
- `web_search` uses Exa and returns ranked results with per-result summaries/snippets.
|
||||
- Attachments are optional and currently apply to `user` messages. Persisted chat history stores them under `message.metadata.attachments`.
|
||||
- Images are forwarded inline to providers as multimodal image parts. Use PNG or JPEG for cross-provider compatibility.
|
||||
- Text files are forwarded as explicit text blocks rather than provider-managed file references. Large text attachments should already be truncated client-side before submission.
|
||||
- For `openai`, backend calls OpenAI's Responses API and enables internal tool use with an internal system instruction.
|
||||
- For `xai`, backend calls xAI's OpenAI-compatible Chat Completions API and enables internal tool use with the same internal system instruction.
|
||||
- For `openai`, image attachments are sent as Responses `input_image` items and text attachments are sent as `input_text` items.
|
||||
- For `xai`, image attachments are sent as Chat Completions content parts alongside text.
|
||||
- For `openai`, Responses calls that can enter the server-managed tool loop use `store: true` so reasoning and function-call items can be passed between tool rounds.
|
||||
- For `anthropic`, image attachments are sent as Messages API `image` blocks using base64 source data; text attachments are added as `text` blocks.
|
||||
- Available tool calls for chat: `web_search` and `fetch_url`. When `CHAT_CODEX_TOOL_ENABLED=true`, `codex_exec` is also available. When `CHAT_SHELL_TOOL_ENABLED=true`, `shell_exec` is also available.
|
||||
- `web_search` returns ranked results with per-result summaries/snippets. Its backend engine is selected by `CHAT_WEB_SEARCH_ENGINE` (`exa` default, or `searxng` with `SEARXNG_BASE_URL` set). SearXNG mode requires the instance to allow `format=json`.
|
||||
- `fetch_url` fetches a URL and returns plaintext page content (HTML converted to text server-side).
|
||||
- When a tool call is executed, backend stores a chat `Message` with `role: "tool"` and tool metadata (`metadata.kind = "tool_call"`), then stores the assistant output.
|
||||
- `codex_exec` delegates coding, shell, repository inspection, and other complex software tasks to a persistent remote Codex CLI workspace over SSH. The server runs `codex exec --dangerously-bypass-approvals-and-sandbox --skip-git-repo-check <non-interactive wrapped prompt>` on the configured devbox inside `CHAT_CODEX_REMOTE_WORKDIR`, with SSH stdin closed.
|
||||
- `shell_exec` runs arbitrary non-interactive shell commands on the same configured devbox, starting in `CHAT_CODEX_REMOTE_WORKDIR`. It uses `bash -lc` when bash exists, otherwise `sh -lc`, closes SSH stdin, and does not run inside the Sybil server container.
|
||||
- Devbox tool configuration:
|
||||
- `CHAT_MAX_TOOL_ROUNDS=100` (optional; maximum model/tool result cycles before the backend returns a limit message)
|
||||
- `CHAT_CODEX_TOOL_ENABLED=true`
|
||||
- `CHAT_SHELL_TOOL_ENABLED=true`
|
||||
- `CHAT_CODEX_REMOTE_HOST=<host-or-ip>` (required when enabled)
|
||||
- `CHAT_CODEX_REMOTE_USER=<ssh-user>` (optional; omitted if `CHAT_CODEX_REMOTE_HOST` already contains `user@host`)
|
||||
- `CHAT_CODEX_REMOTE_PORT=22` (optional)
|
||||
- `CHAT_CODEX_REMOTE_WORKDIR=/workspace/sybil-codex` (optional; created on the remote host if missing)
|
||||
- `CHAT_CODEX_SSH_KEY_PATH=/run/secrets/codex_ssh_key` (recommended private-key delivery via read-only volume mount)
|
||||
- `CHAT_CODEX_SSH_PRIVATE_KEY_B64=<base64-private-key>` (optional fallback when a volume mount is not practical)
|
||||
- `CHAT_CODEX_EXEC_TIMEOUT_MS=600000` (optional)
|
||||
- `CHAT_SHELL_EXEC_TIMEOUT_MS=120000` (optional)
|
||||
- When a tool call is executed, backend stores a chat `Message` with `role: "tool"` and tool metadata (`metadata.kind = "tool_call"`). Streaming requests persist each completed tool call as its SSE `tool_call` event is emitted, then store the assistant output when the completion finishes.
|
||||
- `anthropic` currently runs without server-managed tool calls.
|
||||
|
||||
## Searches
|
||||
@@ -161,6 +256,7 @@ Behavior notes:
|
||||
|
||||
Search run notes:
|
||||
- Backend executes Exa search and Exa answer.
|
||||
- Search mode is independent from chat `web_search` tool configuration and remains Exa-only.
|
||||
- Persists answer text/citations + ranked results.
|
||||
- If both search and answer fail, endpoint returns an error.
|
||||
|
||||
@@ -188,10 +284,32 @@ Search run notes:
|
||||
"role": "system|user|assistant|tool",
|
||||
"content": "...",
|
||||
"name": null,
|
||||
"metadata": null
|
||||
"metadata": {
|
||||
"attachments": [
|
||||
{
|
||||
"kind": "image",
|
||||
"id": "attachment-id",
|
||||
"filename": "photo.jpg",
|
||||
"mimeType": "image/jpeg",
|
||||
"sizeBytes": 12345,
|
||||
"dataUrl": "data:image/jpeg;base64,..."
|
||||
},
|
||||
{
|
||||
"kind": "text",
|
||||
"id": "attachment-id",
|
||||
"filename": "notes.md",
|
||||
"mimeType": "text/markdown",
|
||||
"sizeBytes": 4567,
|
||||
"text": "# Notes\\n...",
|
||||
"truncated": false
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
`metadata` remains nullable. Tool-call log messages still use `metadata.kind = "tool_call"`; regular user messages with attachments use `metadata.attachments`.
|
||||
|
||||
`ChatDetail`
|
||||
```json
|
||||
{
|
||||
|
||||
@@ -9,6 +9,7 @@ Transport:
|
||||
- HTTP response uses `Content-Type: text/event-stream; charset=utf-8`
|
||||
- Events are emitted in SSE format (`event: ...`, `data: ...`)
|
||||
- Request body is JSON
|
||||
- Request body supports the same inline attachment schema and limits documented in `docs/api/rest.md`.
|
||||
|
||||
Authentication:
|
||||
- Same as REST endpoints (`Authorization: Bearer <token>` when token mode is enabled)
|
||||
@@ -18,10 +19,34 @@ Authentication:
|
||||
```json
|
||||
{
|
||||
"chatId": "optional-chat-id",
|
||||
"persist": true,
|
||||
"provider": "openai|anthropic|xai",
|
||||
"model": "string",
|
||||
"messages": [
|
||||
{ "role": "system|user|assistant|tool", "content": "string", "name": "optional" }
|
||||
{
|
||||
"role": "system|user|assistant|tool",
|
||||
"content": "string",
|
||||
"name": "optional",
|
||||
"attachments": [
|
||||
{
|
||||
"kind": "image",
|
||||
"id": "attachment-id",
|
||||
"filename": "photo.jpg",
|
||||
"mimeType": "image/jpeg",
|
||||
"sizeBytes": 12345,
|
||||
"dataUrl": "data:image/jpeg;base64,..."
|
||||
},
|
||||
{
|
||||
"kind": "text",
|
||||
"id": "attachment-id",
|
||||
"filename": "notes.md",
|
||||
"mimeType": "text/markdown",
|
||||
"sizeBytes": 4567,
|
||||
"text": "# Notes\\n...",
|
||||
"truncated": false
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"temperature": 0.2,
|
||||
"maxTokens": 256
|
||||
@@ -29,9 +54,12 @@ Authentication:
|
||||
```
|
||||
|
||||
Notes:
|
||||
- If `chatId` is omitted, backend creates a new chat.
|
||||
- `persist` defaults to `true`.
|
||||
- If `persist` is `true` and `chatId` is omitted, backend creates a new chat.
|
||||
- If `chatId` is provided, backend validates it exists.
|
||||
- Backend stores only new non-assistant input history rows to avoid duplicates.
|
||||
- If `persist` is `false`, `chatId` must be omitted. Backend does not create a chat and does not persist input messages, tool-call messages, assistant output, or `LlmCall` metadata.
|
||||
- For persisted streams, backend stores only new non-assistant input history rows to avoid duplicates.
|
||||
- Attachments are optional and are persisted under `message.metadata.attachments` on stored user messages when `persist` is `true`.
|
||||
|
||||
## Event Stream Contract
|
||||
|
||||
@@ -46,13 +74,15 @@ Event order:
|
||||
```json
|
||||
{
|
||||
"type": "meta",
|
||||
"chatId": "chat-id",
|
||||
"callId": "llm-call-id",
|
||||
"chatId": "chat-id-or-null",
|
||||
"callId": "llm-call-id-or-null",
|
||||
"provider": "openai",
|
||||
"model": "gpt-4.1-mini"
|
||||
}
|
||||
```
|
||||
|
||||
For `persist: false` streams, `chatId` and `callId` are `null`.
|
||||
|
||||
### `delta`
|
||||
|
||||
```json
|
||||
@@ -102,29 +132,43 @@ Event order:
|
||||
|
||||
## Provider Streaming Behavior
|
||||
|
||||
- `openai`: backend may execute internal tool calls (`web_search`, `fetch_url`) before producing final text.
|
||||
- `xai`: same tool-enabled behavior as OpenAI.
|
||||
- `anthropic`: streamed via event stream; emits `delta` from `content_block_delta` with `text_delta`.
|
||||
- `openai`: backend uses OpenAI's Responses API and may execute internal function tool calls (`web_search`, `fetch_url`, optional `codex_exec`, and optional `shell_exec`) before producing final text.
|
||||
- `xai`: backend uses xAI's OpenAI-compatible Chat Completions API and may execute the same internal tool calls before producing final text.
|
||||
- `openai`: image attachments are sent as Responses `input_image` items; text attachments are sent as `input_text` items.
|
||||
- `xai`: image attachments are sent as Chat Completions content parts; text attachments are inlined as text parts.
|
||||
- `openai`: Responses calls that can enter the server-managed tool loop use `store: true` so reasoning and function-call items can be passed between tool rounds.
|
||||
- `anthropic`: streamed via event stream; emits `delta` from `content_block_delta` with `text_delta`. Image attachments are sent as base64 `image` blocks and text attachments are appended as `text` blocks.
|
||||
- `web_search` uses `CHAT_WEB_SEARCH_ENGINE` (`exa` default, or `searxng` with `SEARXNG_BASE_URL` set). SearXNG mode requires the instance to allow `format=json`. This only affects chat-mode tool calls, not search-mode endpoints.
|
||||
- `codex_exec` is available only when `CHAT_CODEX_TOOL_ENABLED=true`. It SSHes to `CHAT_CODEX_REMOTE_HOST`, creates/uses `CHAT_CODEX_REMOTE_WORKDIR`, and runs `codex exec --dangerously-bypass-approvals-and-sandbox --skip-git-repo-check <non-interactive wrapped prompt>` there with SSH stdin closed. Prefer `CHAT_CODEX_SSH_KEY_PATH` with a read-only mounted private key; `CHAT_CODEX_SSH_PRIVATE_KEY_B64` is also supported.
|
||||
- `shell_exec` is available only when `CHAT_SHELL_TOOL_ENABLED=true`. It uses the same devbox SSH configuration, starts in `CHAT_CODEX_REMOTE_WORKDIR`, and runs non-interactive shell commands there with SSH stdin closed, not inside the Sybil server container.
|
||||
- `CHAT_MAX_TOOL_ROUNDS` controls how many model/tool result cycles may occur before the backend returns a tool-call limit message; default is 100.
|
||||
|
||||
Tool-enabled streaming notes (`openai`/`xai`):
|
||||
- Stream still emits standard `meta`, `delta`, `done|error` events.
|
||||
- Stream may emit `tool_call` events while tool calls are executed.
|
||||
- `delta` events stream incrementally as text is generated.
|
||||
- `delta` events carry assistant text and are emitted incrementally for normal text rounds. The backend may buffer model-native text briefly while determining whether a provider round contains tool calls.
|
||||
- OpenAI Responses stream events are normalized by the backend into this SSE contract; clients do not consume OpenAI's raw Responses stream event names.
|
||||
|
||||
## Persistence + Consistency Model
|
||||
|
||||
Backend database remains source of truth.
|
||||
|
||||
During stream:
|
||||
For persisted streams:
|
||||
- Client may optimistically render accumulated `delta` text.
|
||||
- Backend persists each completed tool call as a `tool` message before emitting its `tool_call` SSE event, so chat detail refreshes can show completed tool calls while the assistant response is still running.
|
||||
|
||||
On successful completion:
|
||||
On successful persisted completion:
|
||||
- Backend persists assistant `Message` and updates `LlmCall` usage/latency in a transaction.
|
||||
- Backend then emits `done`.
|
||||
|
||||
On failure:
|
||||
On persisted failure:
|
||||
- Backend records call error and emits `error`.
|
||||
|
||||
For `persist: false` streams:
|
||||
- Client may render the same `meta`, `tool_call`, `delta`, and terminal events.
|
||||
- Backend does not write any chat, message, tool-call log, assistant output, or call metadata rows.
|
||||
- `done.text` is the canonical assistant text if the client later imports the result into a saved chat.
|
||||
|
||||
Client recommendation (for iOS/web):
|
||||
1. Render deltas in real time for UX.
|
||||
2. On `done`, refresh chat detail from REST (`GET /v1/chats/:chatId`) and use DB-backed data as canonical.
|
||||
|
||||
@@ -35,7 +35,7 @@ Instructions for work under `/Users/buzzert/src/sybil-2/ios`.
|
||||
|
||||
## Practical Notes
|
||||
- Default API URL is `http://127.0.0.1:8787` (configurable in-app).
|
||||
- Previously saved `/api` API roots are normalized to the server root by the iOS client.
|
||||
- The iOS client preserves an explicit `/api` base path for proxied deployments.
|
||||
- Provider fallback models:
|
||||
- OpenAI: `gpt-4.1-mini`
|
||||
- Anthropic: `claude-3-5-sonnet-latest`
|
||||
|
||||
@@ -9,5 +9,8 @@ struct SybilApp: App
|
||||
WindowGroup {
|
||||
SplitView()
|
||||
}
|
||||
.commands {
|
||||
SybilCommands()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
targets:
|
||||
SybilApp:
|
||||
type: application
|
||||
platform: iOS
|
||||
supportedDestinations:
|
||||
- iOS
|
||||
- macCatalyst
|
||||
deploymentTarget: "18.0"
|
||||
sources:
|
||||
- Sources
|
||||
@@ -12,16 +14,19 @@ targets:
|
||||
settings:
|
||||
base:
|
||||
PRODUCT_BUNDLE_IDENTIFIER: net.buzzert.sybil2
|
||||
PRODUCT_NAME: Sybil
|
||||
PRODUCT_MODULE_NAME: SybilApp
|
||||
DEVELOPMENT_TEAM: DQQH5H6GBD
|
||||
CODE_SIGN_STYLE: Automatic
|
||||
SWIFT_VERSION: 6.0
|
||||
TARGETED_DEVICE_FAMILY: "1,2"
|
||||
SUPPORTS_MAC_DESIGNED_FOR_IPHONE_IPAD: NO
|
||||
TARGETED_DEVICE_FAMILY: "1,2,6"
|
||||
GENERATE_INFOPLIST_FILE: YES
|
||||
ASSETCATALOG_COMPILER_APPICON_NAME: AppIcon
|
||||
MARKETING_VERSION: 1.0
|
||||
CURRENT_PROJECT_VERSION: 1
|
||||
MARKETING_VERSION: 1.4
|
||||
CURRENT_PROJECT_VERSION: 5
|
||||
INFOPLIST_KEY_CFBundleDisplayName: Sybil
|
||||
INFOPLIST_KEY_ITSAppUsesNonExemptEncryption: NO
|
||||
INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents: YES
|
||||
INFOPLIST_KEY_UILaunchScreen_Generation: YES
|
||||
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone: UIInterfaceOrientationPortrait
|
||||
|
||||
33
ios/Packages/Sybil/Package.resolved
Normal file
33
ios/Packages/Sybil/Package.resolved
Normal file
@@ -0,0 +1,33 @@
|
||||
{
|
||||
"originHash" : "a6321e2b291c1094ca66f749c90095f05aac7f8c6b4a6e54e0e77a1bb0e1a79f",
|
||||
"pins" : [
|
||||
{
|
||||
"identity" : "networkimage",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/gonzalezreal/NetworkImage",
|
||||
"state" : {
|
||||
"revision" : "2849f5323265386e200484b0d0f896e73c3411b9",
|
||||
"version" : "6.0.1"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-cmark",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/swiftlang/swift-cmark",
|
||||
"state" : {
|
||||
"revision" : "5d9bdaa4228b381639fff09403e39a04926e2dbe",
|
||||
"version" : "0.7.1"
|
||||
}
|
||||
},
|
||||
{
|
||||
"identity" : "swift-markdown-ui",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/gonzalezreal/swift-markdown-ui.git",
|
||||
"state" : {
|
||||
"revision" : "5f613358148239d0292c0cef674a3c2314737f9e",
|
||||
"version" : "2.4.1"
|
||||
}
|
||||
}
|
||||
],
|
||||
"version" : 3
|
||||
}
|
||||
@@ -3,9 +3,36 @@ import SwiftUI
|
||||
public struct SplitView: View {
|
||||
@State private var viewModel = SybilViewModel()
|
||||
@Environment(\.horizontalSizeClass) private var horizontalSizeClass
|
||||
@Environment(\.scenePhase) private var scenePhase
|
||||
@State private var shouldRefreshOnForeground = false
|
||||
@State private var composerFocusRequest = 0
|
||||
|
||||
public init() {
|
||||
private var keyboardActions: SybilKeyboardActions? {
|
||||
guard !viewModel.isCheckingSession, viewModel.isAuthenticated else {
|
||||
return nil
|
||||
}
|
||||
|
||||
return SybilKeyboardActions(
|
||||
newChat: {
|
||||
viewModel.startNewChat()
|
||||
composerFocusRequest += 1
|
||||
},
|
||||
newSearch: {
|
||||
viewModel.startNewSearch()
|
||||
composerFocusRequest += 1
|
||||
},
|
||||
previousConversation: {
|
||||
viewModel.selectPreviousSidebarItem()
|
||||
},
|
||||
nextConversation: {
|
||||
viewModel.selectNextSidebarItem()
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
@MainActor public init() {
|
||||
SybilFontRegistry.registerIfNeeded()
|
||||
SybilTheme.applySystemAppearance()
|
||||
}
|
||||
|
||||
public var body: some View {
|
||||
@@ -25,17 +52,96 @@ public struct SplitView: View {
|
||||
} else {
|
||||
NavigationSplitView {
|
||||
SybilSidebarView(viewModel: viewModel)
|
||||
.navigationTitle("Sybil")
|
||||
} detail: {
|
||||
SybilWorkspaceView(viewModel: viewModel)
|
||||
SybilWorkspaceView(viewModel: viewModel, composerFocusRequest: composerFocusRequest) {
|
||||
viewModel.startNewChat()
|
||||
composerFocusRequest += 1
|
||||
}
|
||||
}
|
||||
.navigationSplitViewStyle(.balanced)
|
||||
.tint(SybilTheme.primary)
|
||||
}
|
||||
}
|
||||
.font(.sybil(.body))
|
||||
.preferredColorScheme(.dark)
|
||||
.focusedSceneValue(\.sybilKeyboardActions, keyboardActions)
|
||||
.task {
|
||||
await viewModel.bootstrap()
|
||||
}
|
||||
.onChange(of: scenePhase) { _, nextPhase in
|
||||
switch nextPhase {
|
||||
case .background:
|
||||
shouldRefreshOnForeground = true
|
||||
case .active:
|
||||
guard shouldRefreshOnForeground, horizontalSizeClass != .compact else {
|
||||
return
|
||||
}
|
||||
shouldRefreshOnForeground = false
|
||||
Task {
|
||||
await viewModel.refreshVisibleContent(
|
||||
refreshCollections: true,
|
||||
refreshSelection: viewModel.hasRefreshableSelection
|
||||
)
|
||||
}
|
||||
case .inactive:
|
||||
break
|
||||
@unknown default:
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public struct SybilCommands: Commands {
|
||||
@FocusedValue(\.sybilKeyboardActions) private var keyboardActions
|
||||
|
||||
public init() {}
|
||||
|
||||
public var body: some Commands {
|
||||
CommandGroup(replacing: .newItem) {
|
||||
Button("New Chat") {
|
||||
keyboardActions?.newChat()
|
||||
}
|
||||
.keyboardShortcut("n", modifiers: .command)
|
||||
.disabled(keyboardActions == nil)
|
||||
|
||||
Button("New Search") {
|
||||
keyboardActions?.newSearch()
|
||||
}
|
||||
.keyboardShortcut("n", modifiers: [.command, .shift])
|
||||
.disabled(keyboardActions == nil)
|
||||
}
|
||||
|
||||
CommandMenu("Conversation") {
|
||||
Button("Previous Conversation") {
|
||||
keyboardActions?.previousConversation()
|
||||
}
|
||||
.keyboardShortcut("[", modifiers: .command)
|
||||
.disabled(keyboardActions == nil)
|
||||
|
||||
Button("Next Conversation") {
|
||||
keyboardActions?.nextConversation()
|
||||
}
|
||||
.keyboardShortcut("]", modifiers: .command)
|
||||
.disabled(keyboardActions == nil)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private struct SybilKeyboardActions {
|
||||
var newChat: () -> Void
|
||||
var newSearch: () -> Void
|
||||
var previousConversation: () -> Void
|
||||
var nextConversation: () -> Void
|
||||
}
|
||||
|
||||
private struct SybilKeyboardActionsKey: FocusedValueKey {
|
||||
typealias Value = SybilKeyboardActions
|
||||
}
|
||||
|
||||
private extension FocusedValues {
|
||||
var sybilKeyboardActions: SybilKeyboardActions? {
|
||||
get { self[SybilKeyboardActionsKey.self] }
|
||||
set { self[SybilKeyboardActionsKey.self] = newValue }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,16 +17,18 @@ struct AnyEncodable: Encodable {
|
||||
}
|
||||
}
|
||||
|
||||
actor SybilAPIClient {
|
||||
actor SybilAPIClient: SybilAPIClienting {
|
||||
private let configuration: APIConfiguration
|
||||
private let session: URLSession
|
||||
|
||||
@MainActor
|
||||
private static let iso8601FormatterWithFractional: ISO8601DateFormatter = {
|
||||
let formatter = ISO8601DateFormatter()
|
||||
formatter.formatOptions = [.withInternetDateTime, .withFractionalSeconds]
|
||||
return formatter
|
||||
}()
|
||||
|
||||
|
||||
@MainActor
|
||||
private static let iso8601Formatter: ISO8601DateFormatter = {
|
||||
let formatter = ISO8601DateFormatter()
|
||||
formatter.formatOptions = [.withInternetDateTime]
|
||||
|
||||
25
ios/Packages/Sybil/Sources/Sybil/SybilAPIClienting.swift
Normal file
25
ios/Packages/Sybil/Sources/Sybil/SybilAPIClienting.swift
Normal file
@@ -0,0 +1,25 @@
|
||||
import Foundation
|
||||
|
||||
protocol SybilAPIClienting: Sendable {
|
||||
func verifySession() async throws -> AuthSession
|
||||
func listChats() async throws -> [ChatSummary]
|
||||
func createChat(title: String?) async throws -> ChatSummary
|
||||
func getChat(chatID: String) async throws -> ChatDetail
|
||||
func deleteChat(chatID: String) async throws
|
||||
func suggestChatTitle(chatID: String, content: String) async throws -> ChatSummary
|
||||
func listSearches() async throws -> [SearchSummary]
|
||||
func createSearch(title: String?, query: String?) async throws -> SearchSummary
|
||||
func getSearch(searchID: String) async throws -> SearchDetail
|
||||
func createChatFromSearch(searchID: String, title: String?) async throws -> ChatSummary
|
||||
func deleteSearch(searchID: String) async throws
|
||||
func listModels() async throws -> ModelCatalogResponse
|
||||
func runCompletionStream(
|
||||
body: CompletionStreamRequest,
|
||||
onEvent: @escaping @Sendable (CompletionStreamEvent) async -> Void
|
||||
) async throws
|
||||
func runSearchStream(
|
||||
searchID: String,
|
||||
body: SearchRunRequest,
|
||||
onEvent: @escaping @Sendable (SearchStreamEvent) async -> Void
|
||||
) async throws
|
||||
}
|
||||
222
ios/Packages/Sybil/Sources/Sybil/SybilAttachmentViews.swift
Normal file
222
ios/Packages/Sybil/Sources/Sybil/SybilAttachmentViews.swift
Normal file
@@ -0,0 +1,222 @@
|
||||
import SwiftUI
|
||||
|
||||
enum SybilAttachmentTone {
|
||||
case composer
|
||||
case user
|
||||
case assistant
|
||||
}
|
||||
|
||||
struct SybilAttachmentListView: View {
|
||||
var attachments: [ChatAttachment]
|
||||
var tone: SybilAttachmentTone
|
||||
var onRemove: ((String) -> Void)? = nil
|
||||
|
||||
var body: some View {
|
||||
VStack(alignment: .leading, spacing: 8) {
|
||||
ForEach(attachments) { attachment in
|
||||
Group {
|
||||
if attachment.kind == .image {
|
||||
imageCard(attachment)
|
||||
} else {
|
||||
textCard(attachment)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ViewBuilder
|
||||
private func imageCard(_ attachment: ChatAttachment) -> some View {
|
||||
VStack(alignment: .leading, spacing: 0) {
|
||||
if let image = SybilChatAttachmentSupport.image(for: attachment) {
|
||||
Image(uiImage: image)
|
||||
.resizable()
|
||||
.scaledToFill()
|
||||
.frame(maxWidth: .infinity)
|
||||
.frame(height: 180)
|
||||
.clipped()
|
||||
} else {
|
||||
ZStack {
|
||||
RoundedRectangle(cornerRadius: 14)
|
||||
.fill(Color.black.opacity(0.18))
|
||||
Image(systemName: "photo")
|
||||
.font(.system(size: 22, weight: .medium))
|
||||
.foregroundStyle(SybilTheme.textMuted)
|
||||
}
|
||||
.frame(height: 140)
|
||||
}
|
||||
|
||||
HStack(alignment: .top, spacing: 10) {
|
||||
Image(systemName: "photo")
|
||||
.font(.system(size: 13, weight: .semibold))
|
||||
.foregroundStyle(titleColor.opacity(0.92))
|
||||
.frame(width: 26, height: 26)
|
||||
.background(
|
||||
RoundedRectangle(cornerRadius: 8)
|
||||
.fill(Color.white.opacity(0.06))
|
||||
.overlay(
|
||||
RoundedRectangle(cornerRadius: 8)
|
||||
.stroke(Color.white.opacity(0.08), lineWidth: 1)
|
||||
)
|
||||
)
|
||||
|
||||
VStack(alignment: .leading, spacing: 2) {
|
||||
Text(attachment.filename)
|
||||
.font(.sybil(.footnote, weight: .medium))
|
||||
.foregroundStyle(titleColor)
|
||||
.lineLimit(1)
|
||||
|
||||
Text(attachment.mimeType)
|
||||
.font(.sybil(.caption2))
|
||||
.foregroundStyle(SybilTheme.textMuted)
|
||||
.lineLimit(1)
|
||||
}
|
||||
|
||||
Spacer(minLength: 0)
|
||||
|
||||
if let onRemove {
|
||||
removeButton(for: attachment.id, onRemove: onRemove)
|
||||
}
|
||||
}
|
||||
.padding(12)
|
||||
}
|
||||
.background(cardBackground)
|
||||
.clipShape(RoundedRectangle(cornerRadius: 14))
|
||||
.overlay(
|
||||
RoundedRectangle(cornerRadius: 14)
|
||||
.stroke(cardBorder, lineWidth: 1)
|
||||
)
|
||||
}
|
||||
|
||||
@ViewBuilder
|
||||
private func textCard(_ attachment: ChatAttachment) -> some View {
|
||||
VStack(alignment: .leading, spacing: 10) {
|
||||
HStack(alignment: .top, spacing: 10) {
|
||||
Image(systemName: "doc.text")
|
||||
.font(.system(size: 13, weight: .semibold))
|
||||
.foregroundStyle(titleColor.opacity(0.92))
|
||||
.frame(width: 26, height: 26)
|
||||
.background(
|
||||
RoundedRectangle(cornerRadius: 8)
|
||||
.fill(Color.white.opacity(0.06))
|
||||
.overlay(
|
||||
RoundedRectangle(cornerRadius: 8)
|
||||
.stroke(Color.white.opacity(0.08), lineWidth: 1)
|
||||
)
|
||||
)
|
||||
|
||||
VStack(alignment: .leading, spacing: 2) {
|
||||
Text(attachment.filename)
|
||||
.font(.sybil(.footnote, weight: .medium))
|
||||
.foregroundStyle(titleColor)
|
||||
.lineLimit(1)
|
||||
|
||||
Text(
|
||||
attachment.truncated == true
|
||||
? "\(attachment.mimeType) • truncated"
|
||||
: attachment.mimeType
|
||||
)
|
||||
.font(.sybil(.caption2))
|
||||
.foregroundStyle(SybilTheme.textMuted)
|
||||
.lineLimit(1)
|
||||
}
|
||||
|
||||
Spacer(minLength: 0)
|
||||
|
||||
if let onRemove {
|
||||
removeButton(for: attachment.id, onRemove: onRemove)
|
||||
}
|
||||
}
|
||||
|
||||
Text(SybilChatAttachmentSupport.previewText(for: attachment))
|
||||
.font(.system(.caption, design: .monospaced))
|
||||
.foregroundStyle(bodyColor)
|
||||
.frame(maxWidth: .infinity, alignment: .leading)
|
||||
.padding(10)
|
||||
.background(
|
||||
RoundedRectangle(cornerRadius: 10)
|
||||
.fill(Color.black.opacity(0.16))
|
||||
.overlay(
|
||||
RoundedRectangle(cornerRadius: 10)
|
||||
.stroke(Color.white.opacity(0.05), lineWidth: 1)
|
||||
)
|
||||
)
|
||||
}
|
||||
.padding(12)
|
||||
.background(cardBackground)
|
||||
.clipShape(RoundedRectangle(cornerRadius: 14))
|
||||
.overlay(
|
||||
RoundedRectangle(cornerRadius: 14)
|
||||
.stroke(cardBorder, lineWidth: 1)
|
||||
)
|
||||
}
|
||||
|
||||
private func removeButton(for attachmentID: String, onRemove: @escaping (String) -> Void) -> some View {
|
||||
Button {
|
||||
onRemove(attachmentID)
|
||||
} label: {
|
||||
Image(systemName: "xmark")
|
||||
.font(.system(size: 11, weight: .bold))
|
||||
.foregroundStyle(SybilTheme.textMuted)
|
||||
.frame(width: 24, height: 24)
|
||||
.background(
|
||||
Circle()
|
||||
.fill(Color.white.opacity(0.06))
|
||||
)
|
||||
}
|
||||
.buttonStyle(.plain)
|
||||
.accessibilityLabel("Remove attachment")
|
||||
}
|
||||
|
||||
private var cardBackground: some ShapeStyle {
|
||||
switch tone {
|
||||
case .composer:
|
||||
return AnyShapeStyle(
|
||||
LinearGradient(
|
||||
colors: [SybilTheme.surface.opacity(0.86), SybilTheme.surfaceStrong.opacity(0.78)],
|
||||
startPoint: .topLeading,
|
||||
endPoint: .bottomTrailing
|
||||
)
|
||||
)
|
||||
case .user:
|
||||
return AnyShapeStyle(Color.black.opacity(0.14))
|
||||
case .assistant:
|
||||
return AnyShapeStyle(
|
||||
LinearGradient(
|
||||
colors: [SybilTheme.surface.opacity(0.58), SybilTheme.surfaceStrong.opacity(0.42)],
|
||||
startPoint: .topLeading,
|
||||
endPoint: .bottomTrailing
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
private var cardBorder: Color {
|
||||
switch tone {
|
||||
case .composer:
|
||||
return SybilTheme.border.opacity(0.82)
|
||||
case .user:
|
||||
return Color.white.opacity(0.12)
|
||||
case .assistant:
|
||||
return SybilTheme.border.opacity(0.58)
|
||||
}
|
||||
}
|
||||
|
||||
private var titleColor: Color {
|
||||
switch tone {
|
||||
case .composer, .assistant:
|
||||
return SybilTheme.text
|
||||
case .user:
|
||||
return SybilTheme.text
|
||||
}
|
||||
}
|
||||
|
||||
private var bodyColor: Color {
|
||||
switch tone {
|
||||
case .composer, .assistant:
|
||||
return SybilTheme.text.opacity(0.94)
|
||||
case .user:
|
||||
return SybilTheme.text.opacity(0.96)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,36 @@
|
||||
import UIKit
|
||||
|
||||
@MainActor
|
||||
final class SybilBackgroundTaskAssertion {
|
||||
private let name: String
|
||||
private var identifier: UIBackgroundTaskIdentifier = .invalid
|
||||
|
||||
init?(name: String, onExpiration: @escaping @MainActor () -> Void = {}) {
|
||||
self.name = name
|
||||
identifier = UIApplication.shared.beginBackgroundTask(withName: name) { [weak self] in
|
||||
Task { @MainActor in
|
||||
guard let self else { return }
|
||||
SybilLog.warning(SybilLog.app, "Background task expired: \(self.name)")
|
||||
onExpiration()
|
||||
self.end()
|
||||
}
|
||||
}
|
||||
|
||||
guard identifier != .invalid else {
|
||||
SybilLog.warning(SybilLog.app, "Failed to acquire background task: \(name)")
|
||||
return nil
|
||||
}
|
||||
|
||||
SybilLog.debug(SybilLog.app, "Acquired background task: \(name)")
|
||||
}
|
||||
|
||||
func end() {
|
||||
guard identifier != .invalid else {
|
||||
return
|
||||
}
|
||||
|
||||
UIApplication.shared.endBackgroundTask(identifier)
|
||||
identifier = .invalid
|
||||
SybilLog.debug(SybilLog.app, "Ended background task: \(name)")
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,354 @@
|
||||
import Foundation
|
||||
import UniformTypeIdentifiers
|
||||
import UIKit
|
||||
|
||||
enum ChatAttachmentError: LocalizedError {
|
||||
case unsupportedType(String)
|
||||
case imageTooLarge(String)
|
||||
case textTooLarge(String)
|
||||
case unreadableFile(String)
|
||||
case unsupportedImageFormat(String)
|
||||
case tooManyAttachments(Int)
|
||||
|
||||
var errorDescription: String? {
|
||||
switch self {
|
||||
case let .unsupportedType(filename):
|
||||
return "Unsupported file type for '\(filename)'. Use PNG/JPEG images or text-based files."
|
||||
case let .imageTooLarge(filename):
|
||||
return "Image '\(filename)' exceeds the 6 MB upload limit."
|
||||
case let .textTooLarge(filename):
|
||||
return "Text file '\(filename)' exceeds the 8 MB upload limit."
|
||||
case let .unreadableFile(filename):
|
||||
return "Could not read '\(filename)'."
|
||||
case let .unsupportedImageFormat(filename):
|
||||
return "Image '\(filename)' could not be converted to PNG or JPEG."
|
||||
case let .tooManyAttachments(limit):
|
||||
return "You can attach up to \(limit) files per message."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum SybilChatAttachmentSupport {
|
||||
static let maxAttachmentsPerMessage = 8
|
||||
static let maxImageBytes = 6 * 1024 * 1024
|
||||
static let maxTextBytes = 8 * 1024 * 1024
|
||||
static let maxTextCharacters = 200_000
|
||||
|
||||
private static let supportedTextExtensions: Set<String> = [
|
||||
"txt", "md", "markdown", "csv", "tsv", "json", "jsonl", "xml", "yaml", "yml", "html", "htm",
|
||||
"css", "js", "jsx", "ts", "tsx", "py", "rb", "java", "c", "cc", "cpp", "h", "hpp", "go",
|
||||
"rs", "sh", "sql", "log", "toml", "ini", "cfg", "conf", "swift", "kt", "m", "mm"
|
||||
]
|
||||
|
||||
private static let supportedTextMimeTypes: Set<String> = [
|
||||
"application/json",
|
||||
"application/ld+json",
|
||||
"application/sql",
|
||||
"application/toml",
|
||||
"application/x-httpd-php",
|
||||
"application/x-javascript",
|
||||
"application/x-sh",
|
||||
"application/xml",
|
||||
"application/yaml",
|
||||
"application/x-yaml",
|
||||
"image/svg+xml"
|
||||
]
|
||||
|
||||
static func attachmentSummary(_ attachments: [ChatAttachment]) -> String {
|
||||
guard !attachments.isEmpty else { return "" }
|
||||
let names = attachments.map(\.filename).joined(separator: ", ")
|
||||
return attachments.count == 1 ? names : "Attached: \(names)"
|
||||
}
|
||||
|
||||
static func metadataValue(for attachments: [ChatAttachment]) -> JSONValue? {
|
||||
guard !attachments.isEmpty else { return nil }
|
||||
return .object([
|
||||
"attachments": .array(attachments.map(\.jsonValue))
|
||||
])
|
||||
}
|
||||
|
||||
static func buildAttachments(from urls: [URL]) throws -> [ChatAttachment] {
|
||||
try urls.map { try buildAttachment(fromFileURL: $0) }
|
||||
}
|
||||
|
||||
static func buildImageAttachment(image: UIImage, filename: String = "pasted-image.jpg") throws -> ChatAttachment {
|
||||
if let pngData = image.pngData(), pngData.count <= maxImageBytes {
|
||||
return try buildImageAttachment(data: pngData, filename: filename, contentType: .png)
|
||||
}
|
||||
|
||||
guard let jpegData = image.jpegData(compressionQuality: 0.92) else {
|
||||
throw ChatAttachmentError.unsupportedImageFormat(filename)
|
||||
}
|
||||
|
||||
return try buildImageAttachment(data: jpegData, filename: filename, contentType: .jpeg)
|
||||
}
|
||||
|
||||
static func buildTextAttachment(text: String, filename: String = "pasted-text.txt", mimeType: String = "text/plain") throws -> ChatAttachment {
|
||||
let data = Data(text.utf8)
|
||||
return try buildTextAttachment(data: data, filename: filename, mimeType: mimeType)
|
||||
}
|
||||
|
||||
@MainActor
|
||||
static func buildAttachments(from itemProviders: [NSItemProvider]) async throws -> [ChatAttachment] {
|
||||
var attachments: [ChatAttachment] = []
|
||||
|
||||
for provider in itemProviders {
|
||||
if let fileURL = try await loadFileURL(from: provider) {
|
||||
attachments.append(try buildAttachment(fromFileURL: fileURL))
|
||||
continue
|
||||
}
|
||||
|
||||
if provider.hasItemConformingToTypeIdentifier(UTType.image.identifier) {
|
||||
if let attachment = try await loadImageAttachment(from: provider) {
|
||||
attachments.append(attachment)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return attachments
|
||||
}
|
||||
|
||||
static func previewText(for attachment: ChatAttachment) -> String {
|
||||
let normalized = (attachment.text ?? "")
|
||||
.replacingOccurrences(of: "\r", with: "")
|
||||
.trimmingCharacters(in: .whitespacesAndNewlines)
|
||||
|
||||
if normalized.isEmpty {
|
||||
return "(empty file)"
|
||||
}
|
||||
|
||||
if normalized.count <= 280 {
|
||||
return normalized
|
||||
}
|
||||
|
||||
let endIndex = normalized.index(normalized.startIndex, offsetBy: 280)
|
||||
return normalized[..<endIndex].trimmingCharacters(in: .whitespacesAndNewlines) + "..."
|
||||
}
|
||||
|
||||
static func image(for attachment: ChatAttachment) -> UIImage? {
|
||||
guard attachment.kind == .image,
|
||||
let dataURL = attachment.dataUrl,
|
||||
let data = decodeDataURL(dataURL)
|
||||
else {
|
||||
return nil
|
||||
}
|
||||
return UIImage(data: data)
|
||||
}
|
||||
|
||||
private static func buildAttachment(fromFileURL url: URL) throws -> ChatAttachment {
|
||||
let accessed = url.startAccessingSecurityScopedResource()
|
||||
defer {
|
||||
if accessed {
|
||||
url.stopAccessingSecurityScopedResource()
|
||||
}
|
||||
}
|
||||
|
||||
let filename = url.lastPathComponent.isEmpty ? "attachment" : url.lastPathComponent
|
||||
let resourceValues = try? url.resourceValues(forKeys: [.contentTypeKey])
|
||||
let contentType = resourceValues?.contentType ?? UTType(filenameExtension: url.pathExtension)
|
||||
|
||||
let data: Data
|
||||
do {
|
||||
data = try Data(contentsOf: url)
|
||||
} catch {
|
||||
throw ChatAttachmentError.unreadableFile(filename)
|
||||
}
|
||||
|
||||
if contentType?.conforms(to: .image) == true {
|
||||
return try buildImageAttachment(data: data, filename: filename, contentType: contentType)
|
||||
}
|
||||
|
||||
if isTextLike(contentType: contentType, mimeType: contentType?.preferredMIMEType, filename: filename) {
|
||||
return try buildTextAttachment(data: data, filename: filename, mimeType: contentType?.preferredMIMEType ?? "text/plain")
|
||||
}
|
||||
|
||||
throw ChatAttachmentError.unsupportedType(filename)
|
||||
}
|
||||
|
||||
static func buildImageAttachment(data: Data, filename: String, contentType: UTType?) throws -> ChatAttachment {
|
||||
var mimeType = contentType?.preferredMIMEType
|
||||
var payload = data
|
||||
|
||||
if mimeType != "image/png" && mimeType != "image/jpeg" {
|
||||
guard let image = UIImage(data: data) else {
|
||||
throw ChatAttachmentError.unsupportedImageFormat(filename)
|
||||
}
|
||||
|
||||
if let pngData = image.pngData(), pngData.count <= maxImageBytes {
|
||||
payload = pngData
|
||||
mimeType = "image/png"
|
||||
} else if let jpegData = image.jpegData(compressionQuality: 0.92) {
|
||||
payload = jpegData
|
||||
mimeType = "image/jpeg"
|
||||
} else {
|
||||
throw ChatAttachmentError.unsupportedImageFormat(filename)
|
||||
}
|
||||
}
|
||||
|
||||
if payload.count > maxImageBytes {
|
||||
throw ChatAttachmentError.imageTooLarge(filename)
|
||||
}
|
||||
|
||||
let normalizedMimeType = (mimeType == "image/png") ? "image/png" : "image/jpeg"
|
||||
let dataUrl = "data:\(normalizedMimeType);base64,\(payload.base64EncodedString())"
|
||||
|
||||
return .image(
|
||||
filename: filename,
|
||||
mimeType: normalizedMimeType,
|
||||
sizeBytes: payload.count,
|
||||
dataUrl: dataUrl
|
||||
)
|
||||
}
|
||||
|
||||
private static func buildTextAttachment(data: Data, filename: String, mimeType: String) throws -> ChatAttachment {
|
||||
if data.count > maxTextBytes {
|
||||
throw ChatAttachmentError.textTooLarge(filename)
|
||||
}
|
||||
|
||||
let normalized = String(decoding: data, as: UTF8.self)
|
||||
.replacingOccurrences(of: "\r\n", with: "\n")
|
||||
.replacingOccurrences(of: "\u{0000}", with: "")
|
||||
|
||||
let truncated = normalized.count > maxTextCharacters
|
||||
let trimmedText: String
|
||||
if truncated {
|
||||
let endIndex = normalized.index(normalized.startIndex, offsetBy: maxTextCharacters)
|
||||
trimmedText = String(normalized[..<endIndex])
|
||||
} else {
|
||||
trimmedText = normalized
|
||||
}
|
||||
|
||||
return .text(
|
||||
filename: filename,
|
||||
mimeType: mimeType,
|
||||
sizeBytes: data.count,
|
||||
text: trimmedText,
|
||||
truncated: truncated
|
||||
)
|
||||
}
|
||||
|
||||
private static func isTextLike(contentType: UTType?, mimeType: String?, filename: String) -> Bool {
|
||||
if let contentType {
|
||||
if contentType.conforms(to: .text) || contentType.conforms(to: .plainText) || contentType.conforms(to: .sourceCode) {
|
||||
return true
|
||||
}
|
||||
if contentType.conforms(to: .json) || contentType.conforms(to: .xml) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
if let mimeType {
|
||||
if mimeType.hasPrefix("text/") {
|
||||
return true
|
||||
}
|
||||
if supportedTextMimeTypes.contains(mimeType.lowercased()) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
let ext = URL(fileURLWithPath: filename).pathExtension.lowercased()
|
||||
return supportedTextExtensions.contains(ext)
|
||||
}
|
||||
|
||||
private static func decodeDataURL(_ value: String) -> Data? {
|
||||
guard let separator = value.firstIndex(of: ",") else {
|
||||
return nil
|
||||
}
|
||||
|
||||
let encoded = value[value.index(after: separator)...]
|
||||
return Data(base64Encoded: String(encoded))
|
||||
}
|
||||
|
||||
@MainActor
|
||||
private static func loadFileURL(from provider: NSItemProvider) async throws -> URL? {
|
||||
guard provider.hasItemConformingToTypeIdentifier(UTType.fileURL.identifier) else {
|
||||
return nil
|
||||
}
|
||||
|
||||
return try await withCheckedThrowingContinuation { continuation in
|
||||
provider.loadItem(forTypeIdentifier: UTType.fileURL.identifier, options: nil) { item, error in
|
||||
if let error {
|
||||
continuation.resume(throwing: error)
|
||||
return
|
||||
}
|
||||
|
||||
if let url = item as? URL {
|
||||
continuation.resume(returning: url)
|
||||
return
|
||||
}
|
||||
|
||||
if let data = item as? Data,
|
||||
let url = URL(dataRepresentation: data, relativeTo: nil) {
|
||||
continuation.resume(returning: url)
|
||||
return
|
||||
}
|
||||
|
||||
if let string = item as? String,
|
||||
let url = URL(string: string) {
|
||||
continuation.resume(returning: url)
|
||||
return
|
||||
}
|
||||
|
||||
continuation.resume(returning: nil)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@MainActor
|
||||
private static func loadImageAttachment(from provider: NSItemProvider) async throws -> ChatAttachment? {
|
||||
let preferredImageType: UTType = if provider.hasItemConformingToTypeIdentifier(UTType.png.identifier) {
|
||||
.png
|
||||
} else if provider.hasItemConformingToTypeIdentifier(UTType.jpeg.identifier) {
|
||||
.jpeg
|
||||
} else {
|
||||
.image
|
||||
}
|
||||
|
||||
if let data = try await loadDataRepresentation(from: provider, type: preferredImageType) {
|
||||
let filenameExtension = preferredImageType.preferredFilenameExtension ?? "jpg"
|
||||
let filename = "pasted-image.\(filenameExtension)"
|
||||
return try buildImageAttachment(data: data, filename: filename, contentType: preferredImageType)
|
||||
}
|
||||
|
||||
if let image = try await loadUIImage(from: provider),
|
||||
let jpegData = image.jpegData(compressionQuality: 0.92) {
|
||||
return try buildImageAttachment(data: jpegData, filename: "pasted-image.jpg", contentType: .jpeg)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@MainActor
|
||||
private static func loadDataRepresentation(from provider: NSItemProvider, type: UTType) async throws -> Data? {
|
||||
guard provider.hasItemConformingToTypeIdentifier(type.identifier) else {
|
||||
return nil
|
||||
}
|
||||
|
||||
return try await withCheckedThrowingContinuation { continuation in
|
||||
provider.loadDataRepresentation(forTypeIdentifier: type.identifier) { data, error in
|
||||
if let error {
|
||||
continuation.resume(throwing: error)
|
||||
return
|
||||
}
|
||||
continuation.resume(returning: data)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@MainActor
|
||||
private static func loadUIImage(from provider: NSItemProvider) async throws -> UIImage? {
|
||||
guard provider.canLoadObject(ofClass: UIImage.self) else {
|
||||
return nil
|
||||
}
|
||||
|
||||
return try await withCheckedThrowingContinuation { continuation in
|
||||
provider.loadObject(ofClass: UIImage.self) { object, error in
|
||||
if let error {
|
||||
continuation.resume(throwing: error)
|
||||
return
|
||||
}
|
||||
continuation.resume(returning: object as? UIImage)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -5,6 +5,8 @@ struct SybilChatTranscriptView: View {
|
||||
var messages: [Message]
|
||||
var isLoading: Bool
|
||||
var isSending: Bool
|
||||
var onRefresh: (() async -> Void)? = nil
|
||||
@State private var hasHandledInitialTranscriptScroll = false
|
||||
|
||||
private var hasPendingAssistant: Bool {
|
||||
messages.contains { message in
|
||||
@@ -25,6 +27,7 @@ struct SybilChatTranscriptView: View {
|
||||
|
||||
ForEach(messages) { message in
|
||||
MessageBubble(message: message, isSending: isSending)
|
||||
.frame(maxWidth: .infinity)
|
||||
.id(message.id)
|
||||
}
|
||||
|
||||
@@ -49,22 +52,33 @@ struct SybilChatTranscriptView: View {
|
||||
.padding(.vertical, 18)
|
||||
}
|
||||
.frame(maxWidth: .infinity, alignment: .leading)
|
||||
.refreshable {
|
||||
await onRefresh?()
|
||||
}
|
||||
.tint(SybilTheme.primary)
|
||||
.scrollDismissesKeyboard(.interactively)
|
||||
.onAppear {
|
||||
proxy.scrollTo("chat-bottom-anchor", anchor: .bottom)
|
||||
scrollToBottom(with: proxy, animated: false)
|
||||
}
|
||||
.onChange(of: messages.map(\.id)) { _, _ in
|
||||
withAnimation(.easeOut(duration: 0.22)) {
|
||||
proxy.scrollTo("chat-bottom-anchor", anchor: .bottom)
|
||||
}
|
||||
scrollToBottom(with: proxy, animated: hasHandledInitialTranscriptScroll && !isLoading)
|
||||
hasHandledInitialTranscriptScroll = true
|
||||
}
|
||||
.onChange(of: isSending) { _, _ in
|
||||
withAnimation(.easeOut(duration: 0.22)) {
|
||||
proxy.scrollTo("chat-bottom-anchor", anchor: .bottom)
|
||||
}
|
||||
scrollToBottom(with: proxy, animated: hasHandledInitialTranscriptScroll)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func scrollToBottom(with proxy: ScrollViewProxy, animated: Bool) {
|
||||
if animated {
|
||||
withAnimation(.easeOut(duration: 0.22)) {
|
||||
proxy.scrollTo("chat-bottom-anchor", anchor: .bottom)
|
||||
}
|
||||
} else {
|
||||
proxy.scrollTo("chat-bottom-anchor", anchor: .bottom)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private struct MessageBubble: View {
|
||||
@@ -86,10 +100,8 @@ private struct MessageBubble: View {
|
||||
}
|
||||
|
||||
var body: some View {
|
||||
HStack(alignment: .top) {
|
||||
if isUser {
|
||||
Spacer(minLength: 44)
|
||||
}
|
||||
HStack(alignment: .top, spacing: 0) {
|
||||
leadingSpacer
|
||||
|
||||
if let toolCallMetadata {
|
||||
ToolCallActivityChip(
|
||||
@@ -99,6 +111,13 @@ private struct MessageBubble: View {
|
||||
)
|
||||
} else {
|
||||
VStack(alignment: .leading, spacing: 8) {
|
||||
if !message.attachments.isEmpty {
|
||||
SybilAttachmentListView(
|
||||
attachments: message.attachments,
|
||||
tone: isUser ? .user : .assistant
|
||||
)
|
||||
}
|
||||
|
||||
if isPendingAssistant {
|
||||
HStack(spacing: 8) {
|
||||
ProgressView()
|
||||
@@ -109,7 +128,7 @@ private struct MessageBubble: View {
|
||||
.foregroundStyle(SybilTheme.textMuted)
|
||||
}
|
||||
.padding(.vertical, 2)
|
||||
} else {
|
||||
} else if !message.content.trimmingCharacters(in: .whitespacesAndNewlines).isEmpty {
|
||||
Markdown(message.content)
|
||||
.tint(SybilTheme.primary)
|
||||
.foregroundStyle(isUser ? SybilTheme.text : SybilTheme.text.opacity(0.95))
|
||||
@@ -136,12 +155,24 @@ private struct MessageBubble: View {
|
||||
.frame(maxWidth: isUser ? 420 : nil, alignment: isUser ? .trailing : .leading)
|
||||
}
|
||||
|
||||
if !isUser {
|
||||
Spacer(minLength: 0)
|
||||
}
|
||||
trailingSpacer
|
||||
}
|
||||
.frame(maxWidth: .infinity, alignment: isUser ? .trailing : .leading)
|
||||
}
|
||||
|
||||
@ViewBuilder
|
||||
private var leadingSpacer: some View {
|
||||
if isUser {
|
||||
Spacer(minLength: 44)
|
||||
}
|
||||
}
|
||||
|
||||
@ViewBuilder
|
||||
private var trailingSpacer: some View {
|
||||
if !isUser {
|
||||
Spacer(minLength: 0)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private struct ToolCallActivityChip: View {
|
||||
|
||||
@@ -10,6 +10,7 @@ extension Theme {
|
||||
.text {
|
||||
FontFamily(.custom("Inter"))
|
||||
FontSize(15)
|
||||
ForegroundColor(SybilTheme.text)
|
||||
}
|
||||
.code {
|
||||
FontFamilyVariant(.monospaced)
|
||||
|
||||
@@ -21,6 +21,132 @@ public enum MessageRole: String, Codable, Hashable, Sendable {
|
||||
case tool
|
||||
}
|
||||
|
||||
public struct ChatAttachment: Codable, Hashable, Identifiable, Sendable {
|
||||
public enum Kind: String, Codable, Hashable, Sendable {
|
||||
case image
|
||||
case text
|
||||
}
|
||||
|
||||
public var id: String
|
||||
public var kind: Kind
|
||||
public var filename: String
|
||||
public var mimeType: String
|
||||
public var sizeBytes: Int
|
||||
public var dataUrl: String?
|
||||
public var text: String?
|
||||
public var truncated: Bool?
|
||||
|
||||
public init(
|
||||
id: String,
|
||||
kind: Kind,
|
||||
filename: String,
|
||||
mimeType: String,
|
||||
sizeBytes: Int,
|
||||
dataUrl: String? = nil,
|
||||
text: String? = nil,
|
||||
truncated: Bool? = nil
|
||||
) {
|
||||
self.id = id
|
||||
self.kind = kind
|
||||
self.filename = filename
|
||||
self.mimeType = mimeType
|
||||
self.sizeBytes = sizeBytes
|
||||
self.dataUrl = dataUrl
|
||||
self.text = text
|
||||
self.truncated = truncated
|
||||
}
|
||||
|
||||
public static func image(
|
||||
id: String = UUID().uuidString,
|
||||
filename: String,
|
||||
mimeType: String,
|
||||
sizeBytes: Int,
|
||||
dataUrl: String
|
||||
) -> ChatAttachment {
|
||||
ChatAttachment(
|
||||
id: id,
|
||||
kind: .image,
|
||||
filename: filename,
|
||||
mimeType: mimeType,
|
||||
sizeBytes: sizeBytes,
|
||||
dataUrl: dataUrl
|
||||
)
|
||||
}
|
||||
|
||||
public static func text(
|
||||
id: String = UUID().uuidString,
|
||||
filename: String,
|
||||
mimeType: String,
|
||||
sizeBytes: Int,
|
||||
text: String,
|
||||
truncated: Bool
|
||||
) -> ChatAttachment {
|
||||
ChatAttachment(
|
||||
id: id,
|
||||
kind: .text,
|
||||
filename: filename,
|
||||
mimeType: mimeType,
|
||||
sizeBytes: sizeBytes,
|
||||
text: text,
|
||||
truncated: truncated
|
||||
)
|
||||
}
|
||||
|
||||
var jsonValue: JSONValue {
|
||||
var object: [String: JSONValue] = [
|
||||
"kind": .string(kind.rawValue),
|
||||
"id": .string(id),
|
||||
"filename": .string(filename),
|
||||
"mimeType": .string(mimeType),
|
||||
"sizeBytes": .number(Double(sizeBytes))
|
||||
]
|
||||
|
||||
if let dataUrl {
|
||||
object["dataUrl"] = .string(dataUrl)
|
||||
}
|
||||
if let text {
|
||||
object["text"] = .string(text)
|
||||
}
|
||||
if let truncated {
|
||||
object["truncated"] = .bool(truncated)
|
||||
}
|
||||
|
||||
return .object(object)
|
||||
}
|
||||
|
||||
static func attachments(from metadata: JSONValue?) -> [ChatAttachment] {
|
||||
guard let metadataObject = metadata?.objectValue,
|
||||
let values = metadataObject["attachments"]?.arrayValue
|
||||
else {
|
||||
return []
|
||||
}
|
||||
|
||||
return values.compactMap { value in
|
||||
guard let object = value.objectValue,
|
||||
let kindRaw = object["kind"]?.stringValue,
|
||||
let kind = Kind(rawValue: kindRaw),
|
||||
let id = object["id"]?.stringValue,
|
||||
let filename = object["filename"]?.stringValue,
|
||||
let mimeType = object["mimeType"]?.stringValue,
|
||||
let sizeNumber = object["sizeBytes"]?.numberValue
|
||||
else {
|
||||
return nil
|
||||
}
|
||||
|
||||
return ChatAttachment(
|
||||
id: id,
|
||||
kind: kind,
|
||||
filename: filename,
|
||||
mimeType: mimeType,
|
||||
sizeBytes: Int(sizeNumber),
|
||||
dataUrl: object["dataUrl"]?.stringValue,
|
||||
text: object["text"]?.stringValue,
|
||||
truncated: object["truncated"]?.boolValue
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public struct ChatSummary: Codable, Identifiable, Hashable, Sendable {
|
||||
public var id: String
|
||||
public var title: String?
|
||||
@@ -48,6 +174,10 @@ public struct Message: Codable, Identifiable, Hashable, Sendable {
|
||||
public var name: String?
|
||||
public var metadata: JSONValue? = nil
|
||||
|
||||
public var attachments: [ChatAttachment] {
|
||||
ChatAttachment.attachments(from: metadata)
|
||||
}
|
||||
|
||||
public var toolCallMetadata: ToolCallMetadata? {
|
||||
guard role == .tool,
|
||||
let object = metadata?.objectValue,
|
||||
@@ -155,6 +285,20 @@ public enum JSONValue: Codable, Hashable, Sendable {
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
public var arrayValue: [JSONValue]? {
|
||||
if case let .array(value) = self {
|
||||
return value
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
public var boolValue: Bool? {
|
||||
if case let .bool(value) = self {
|
||||
return value
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
public struct ChatDetail: Codable, Identifiable, Hashable, Sendable {
|
||||
@@ -239,11 +383,13 @@ public struct CompletionRequestMessage: Codable, Sendable {
|
||||
public var role: MessageRole
|
||||
public var content: String
|
||||
public var name: String?
|
||||
public var attachments: [ChatAttachment]?
|
||||
|
||||
public init(role: MessageRole, content: String, name: String? = nil) {
|
||||
public init(role: MessageRole, content: String, name: String? = nil, attachments: [ChatAttachment]? = nil) {
|
||||
self.role = role
|
||||
self.content = content
|
||||
self.name = name
|
||||
self.attachments = attachments
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -23,6 +23,9 @@ enum PhoneRoute: Hashable {
|
||||
struct SybilPhoneShellView: View {
|
||||
@Bindable var viewModel: SybilViewModel
|
||||
@State private var path: [PhoneRoute] = []
|
||||
@Environment(\.scenePhase) private var scenePhase
|
||||
@State private var shouldRefreshOnForeground = false
|
||||
@State private var composerFocusRequest = 0
|
||||
|
||||
var body: some View {
|
||||
NavigationStack(path: $path) {
|
||||
@@ -30,15 +33,41 @@ struct SybilPhoneShellView: View {
|
||||
.navigationTitle("")
|
||||
.navigationBarTitleDisplayMode(.inline)
|
||||
.toolbar {
|
||||
ToolbarItem(placement: .principal) {
|
||||
SybilWordmark(size: 19)
|
||||
ToolbarItem(placement: .topBarLeading) {
|
||||
SybilWordmark(size: 18)
|
||||
}
|
||||
}
|
||||
.navigationDestination(for: PhoneRoute.self) { route in
|
||||
SybilPhoneDestinationView(viewModel: viewModel, route: route)
|
||||
SybilPhoneDestinationView(
|
||||
viewModel: viewModel,
|
||||
path: $path,
|
||||
composerFocusRequest: $composerFocusRequest,
|
||||
route: route
|
||||
)
|
||||
}
|
||||
}
|
||||
.tint(SybilTheme.primary)
|
||||
.onChange(of: scenePhase) { _, nextPhase in
|
||||
switch nextPhase {
|
||||
case .background:
|
||||
shouldRefreshOnForeground = true
|
||||
case .active:
|
||||
guard shouldRefreshOnForeground else {
|
||||
return
|
||||
}
|
||||
shouldRefreshOnForeground = false
|
||||
Task {
|
||||
await viewModel.refreshVisibleContent(
|
||||
refreshCollections: path.isEmpty,
|
||||
refreshSelection: !path.isEmpty && viewModel.hasRefreshableSelection
|
||||
)
|
||||
}
|
||||
case .inactive:
|
||||
break
|
||||
@unknown default:
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -103,6 +132,10 @@ private struct SybilPhoneSidebarRoot: View {
|
||||
}
|
||||
.padding(10)
|
||||
}
|
||||
.refreshable {
|
||||
await viewModel.refreshCollectionsFromUser()
|
||||
}
|
||||
.tint(SybilTheme.primary)
|
||||
}
|
||||
}
|
||||
.background(SybilTheme.panelGradient)
|
||||
@@ -225,15 +258,25 @@ private struct SybilPhoneSidebarRow: View {
|
||||
|
||||
private struct SybilPhoneDestinationView: View {
|
||||
@Bindable var viewModel: SybilViewModel
|
||||
@Binding var path: [PhoneRoute]
|
||||
@Binding var composerFocusRequest: Int
|
||||
let route: PhoneRoute
|
||||
|
||||
var body: some View {
|
||||
SybilWorkspaceView(viewModel: viewModel)
|
||||
.frame(maxWidth: .infinity, maxHeight: .infinity, alignment: .topLeading)
|
||||
.navigationBarTitleDisplayMode(.inline)
|
||||
.task(id: route) {
|
||||
applyRoute()
|
||||
SybilWorkspaceView(viewModel: viewModel, composerFocusRequest: composerFocusRequest) {
|
||||
viewModel.startNewChat()
|
||||
composerFocusRequest += 1
|
||||
if path.isEmpty {
|
||||
path = [.draftChat]
|
||||
} else {
|
||||
path[path.index(before: path.endIndex)] = .draftChat
|
||||
}
|
||||
}
|
||||
.frame(maxWidth: .infinity, maxHeight: .infinity, alignment: .topLeading)
|
||||
.navigationBarTitleDisplayMode(.inline)
|
||||
.task(id: route) {
|
||||
applyRoute()
|
||||
}
|
||||
}
|
||||
|
||||
private func applyRoute() {
|
||||
|
||||
@@ -6,6 +6,7 @@ struct SybilSearchResultsView: View {
|
||||
var isLoading: Bool
|
||||
var isRunning: Bool
|
||||
var isStartingChat: Bool = false
|
||||
var onRefresh: (() async -> Void)? = nil
|
||||
var onStartChat: (() -> Void)? = nil
|
||||
|
||||
var body: some View {
|
||||
@@ -100,6 +101,10 @@ struct SybilSearchResultsView: View {
|
||||
.padding(.horizontal, 14)
|
||||
.padding(.vertical, 20)
|
||||
}
|
||||
.refreshable {
|
||||
await onRefresh?()
|
||||
}
|
||||
.tint(SybilTheme.primary)
|
||||
.scrollDismissesKeyboard(.interactively)
|
||||
.frame(maxWidth: .infinity, alignment: .leading)
|
||||
}
|
||||
|
||||
@@ -72,11 +72,6 @@ final class SybilSettingsStore {
|
||||
return nil
|
||||
}
|
||||
|
||||
let path = components.path.trimmingCharacters(in: CharacterSet(charactersIn: "/"))
|
||||
if path.lowercased() == "api" {
|
||||
components.path = ""
|
||||
}
|
||||
|
||||
return components.url
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,8 +18,6 @@ struct SybilSidebarView: View {
|
||||
var body: some View {
|
||||
VStack(spacing: 0) {
|
||||
VStack(alignment: .leading, spacing: 14) {
|
||||
SybilWordmark(size: 31)
|
||||
|
||||
VStack(spacing: 10) {
|
||||
sidebarActionButton(
|
||||
title: "New chat",
|
||||
@@ -151,29 +149,32 @@ struct SybilSidebarView: View {
|
||||
}
|
||||
.padding(10)
|
||||
}
|
||||
.refreshable {
|
||||
await viewModel.refreshCollectionsFromUser()
|
||||
}
|
||||
.tint(SybilTheme.primary)
|
||||
}
|
||||
|
||||
Divider()
|
||||
.overlay(SybilTheme.border)
|
||||
|
||||
Button {
|
||||
viewModel.openSettings()
|
||||
} label: {
|
||||
Label("Settings", systemImage: "gearshape")
|
||||
.font(.sybil(.subheadline, weight: .medium))
|
||||
.foregroundStyle(SybilTheme.text)
|
||||
.padding(.horizontal, 12)
|
||||
.padding(.vertical, 10)
|
||||
.frame(maxWidth: .infinity, alignment: .leading)
|
||||
.background(
|
||||
RoundedRectangle(cornerRadius: 12)
|
||||
.fill(viewModel.selectedItem == .settings ? SybilTheme.primary.opacity(0.28) : Color.clear)
|
||||
)
|
||||
}
|
||||
.buttonStyle(.plain)
|
||||
.padding(10)
|
||||
}
|
||||
.background(SybilTheme.panelGradient)
|
||||
.navigationTitle("")
|
||||
.navigationBarTitleDisplayMode(.inline)
|
||||
.toolbar {
|
||||
ToolbarItem(placement: .topBarLeading) {
|
||||
SybilWordmark(size: 18)
|
||||
}
|
||||
|
||||
ToolbarItem(placement: .topBarTrailing) {
|
||||
Button {
|
||||
viewModel.openSettings()
|
||||
} label: {
|
||||
Image(systemName: viewModel.selectedItem == .settings ? "gearshape.fill" : "gearshape")
|
||||
.font(.system(size: 16, weight: .semibold))
|
||||
.foregroundStyle(viewModel.selectedItem == .settings ? SybilTheme.primary : SybilTheme.textMuted)
|
||||
}
|
||||
.accessibilityLabel("Settings")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func sidebarActionButton(
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import CoreText
|
||||
import Foundation
|
||||
import SwiftUI
|
||||
import UIKit
|
||||
|
||||
enum SybilFontRegistry {
|
||||
static func registerIfNeeded() {
|
||||
@@ -78,6 +79,23 @@ enum SybilTheme {
|
||||
static let userBubble = Color(red: 0.29, green: 0.13, blue: 0.65)
|
||||
static let danger = Color(red: 0.96, green: 0.32, blue: 0.40)
|
||||
|
||||
@MainActor static func applySystemAppearance() {
|
||||
let navAppearance = UINavigationBarAppearance()
|
||||
navAppearance.configureWithOpaqueBackground()
|
||||
navAppearance.backgroundColor = UIColor(red: 0.02, green: 0.02, blue: 0.05, alpha: 1)
|
||||
navAppearance.shadowColor = UIColor(red: 0.24, green: 0.20, blue: 0.38, alpha: 0.9)
|
||||
navAppearance.titleTextAttributes = [
|
||||
.foregroundColor: UIColor(red: 0.96, green: 0.94, blue: 1.0, alpha: 1)
|
||||
]
|
||||
navAppearance.largeTitleTextAttributes = navAppearance.titleTextAttributes
|
||||
|
||||
UINavigationBar.appearance().prefersLargeTitles = false
|
||||
UINavigationBar.appearance().standardAppearance = navAppearance
|
||||
UINavigationBar.appearance().compactAppearance = navAppearance
|
||||
UINavigationBar.appearance().scrollEdgeAppearance = navAppearance
|
||||
UINavigationBar.appearance().compactScrollEdgeAppearance = navAppearance
|
||||
}
|
||||
|
||||
static var backgroundGradient: LinearGradient {
|
||||
LinearGradient(
|
||||
colors: [
|
||||
|
||||
@@ -91,13 +91,20 @@ final class SybilViewModel {
|
||||
var errorMessage: String?
|
||||
|
||||
var composer = ""
|
||||
var composerAttachments: [ChatAttachment] = []
|
||||
var provider: Provider
|
||||
var modelCatalog: [Provider: ProviderModelInfo] = [:]
|
||||
var model: String
|
||||
|
||||
@ObservationIgnored
|
||||
private var hasBootstrapped = false
|
||||
private var pendingChatState: PendingChatState?
|
||||
@ObservationIgnored
|
||||
private var selectionTask: Task<Void, Never>?
|
||||
@ObservationIgnored
|
||||
private var chatBackgroundTask: SybilBackgroundTaskAssertion?
|
||||
@ObservationIgnored
|
||||
private let clientFactory: (APIConfiguration) -> any SybilAPIClienting
|
||||
|
||||
private let fallbackModels: [Provider: [String]] = [
|
||||
.openai: ["gpt-4.1-mini"],
|
||||
@@ -105,8 +112,14 @@ final class SybilViewModel {
|
||||
.xai: ["grok-3-mini"]
|
||||
]
|
||||
|
||||
init(settings: SybilSettingsStore = SybilSettingsStore()) {
|
||||
init(
|
||||
settings: SybilSettingsStore = SybilSettingsStore(),
|
||||
clientFactory: @escaping (APIConfiguration) -> any SybilAPIClienting = { configuration in
|
||||
SybilAPIClient(configuration: configuration)
|
||||
}
|
||||
) {
|
||||
self.settings = settings
|
||||
self.clientFactory = clientFactory
|
||||
self.provider = settings.preferredProvider
|
||||
self.model = settings.preferredModelByProvider[settings.preferredProvider] ?? "gpt-4.1-mini"
|
||||
}
|
||||
@@ -202,6 +215,19 @@ final class SybilViewModel {
|
||||
return draftKind != nil || selectedItem != nil
|
||||
}
|
||||
|
||||
var canSendComposer: Bool {
|
||||
if isSending {
|
||||
return false
|
||||
}
|
||||
|
||||
let content = composer.trimmingCharacters(in: .whitespacesAndNewlines)
|
||||
if isSearchMode {
|
||||
return !content.isEmpty
|
||||
}
|
||||
|
||||
return !content.isEmpty || !composerAttachments.isEmpty
|
||||
}
|
||||
|
||||
var displayedMessages: [Message] {
|
||||
let canonical = displayableMessages(selectedChat?.messages ?? [])
|
||||
guard let pending = pendingChatState else {
|
||||
@@ -267,6 +293,19 @@ final class SybilViewModel {
|
||||
return searches.first(where: { $0.id == searchID })
|
||||
}
|
||||
|
||||
var hasRefreshableSelection: Bool {
|
||||
guard draftKind == nil, let selectedItem else {
|
||||
return false
|
||||
}
|
||||
|
||||
switch selectedItem {
|
||||
case .chat, .search:
|
||||
return true
|
||||
case .settings:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func bootstrap() async {
|
||||
guard !hasBootstrapped else {
|
||||
return
|
||||
@@ -282,6 +321,7 @@ final class SybilViewModel {
|
||||
authError = nil
|
||||
errorMessage = nil
|
||||
pendingChatState = nil
|
||||
composerAttachments = []
|
||||
settings.persist()
|
||||
|
||||
SybilLog.info(
|
||||
@@ -358,6 +398,7 @@ final class SybilViewModel {
|
||||
selectedSearch = nil
|
||||
errorMessage = nil
|
||||
composer = ""
|
||||
composerAttachments = []
|
||||
}
|
||||
|
||||
func startNewSearch() {
|
||||
@@ -368,6 +409,7 @@ final class SybilViewModel {
|
||||
selectedSearch = nil
|
||||
errorMessage = nil
|
||||
composer = ""
|
||||
composerAttachments = []
|
||||
}
|
||||
|
||||
func openSettings() {
|
||||
@@ -377,6 +419,7 @@ final class SybilViewModel {
|
||||
selectedChat = nil
|
||||
selectedSearch = nil
|
||||
errorMessage = nil
|
||||
composerAttachments = []
|
||||
}
|
||||
|
||||
func select(_ selection: SidebarSelection) {
|
||||
@@ -384,6 +427,9 @@ final class SybilViewModel {
|
||||
draftKind = nil
|
||||
selectedItem = selection
|
||||
errorMessage = nil
|
||||
if case .search = selection {
|
||||
composerAttachments = []
|
||||
}
|
||||
|
||||
if case .settings = selection {
|
||||
selectedChat = nil
|
||||
@@ -397,6 +443,62 @@ final class SybilViewModel {
|
||||
}
|
||||
}
|
||||
|
||||
func selectPreviousSidebarItem() {
|
||||
selectAdjacentSidebarItem(offset: -1)
|
||||
}
|
||||
|
||||
func selectNextSidebarItem() {
|
||||
selectAdjacentSidebarItem(offset: 1)
|
||||
}
|
||||
|
||||
private func selectAdjacentSidebarItem(offset: Int) {
|
||||
let items = sidebarItems
|
||||
guard !items.isEmpty else {
|
||||
return
|
||||
}
|
||||
|
||||
let currentIndex = selectedItem.flatMap { selection in
|
||||
items.firstIndex { $0.selection == selection }
|
||||
}
|
||||
let startingIndex = currentIndex ?? (offset < 0 ? items.count : -1)
|
||||
let nextIndex = (startingIndex + offset + items.count) % items.count
|
||||
let nextSelection = items[nextIndex].selection
|
||||
|
||||
guard draftKind != nil || selectedItem != nextSelection else {
|
||||
return
|
||||
}
|
||||
|
||||
select(nextSelection)
|
||||
}
|
||||
|
||||
func refreshCollectionsFromUser() async {
|
||||
guard isAuthenticated else {
|
||||
return
|
||||
}
|
||||
|
||||
errorMessage = nil
|
||||
|
||||
guard draftKind == nil else {
|
||||
await refreshCollectionsPreservingDraft()
|
||||
return
|
||||
}
|
||||
|
||||
await refreshCollections(preferredSelection: selectedItem)
|
||||
}
|
||||
|
||||
func refreshSelectionFromUser() async {
|
||||
guard isAuthenticated, !isSending, !isCreatingSearchChat else {
|
||||
return
|
||||
}
|
||||
|
||||
guard selectedItem != nil, draftKind == nil else {
|
||||
return
|
||||
}
|
||||
|
||||
errorMessage = nil
|
||||
await refreshSelectionIfNeeded()
|
||||
}
|
||||
|
||||
func deleteItem(_ selection: SidebarSelection) async {
|
||||
guard isAuthenticated else {
|
||||
return
|
||||
@@ -428,13 +530,46 @@ final class SybilViewModel {
|
||||
await reconnect()
|
||||
}
|
||||
|
||||
func refreshVisibleContent(refreshCollections shouldRefreshCollections: Bool, refreshSelection shouldRefreshSelection: Bool) async {
|
||||
guard isAuthenticated, !isCheckingSession else {
|
||||
return
|
||||
}
|
||||
|
||||
guard shouldRefreshCollections || shouldRefreshSelection else {
|
||||
return
|
||||
}
|
||||
|
||||
SybilLog.info(
|
||||
SybilLog.ui,
|
||||
"Foreground refresh requested (collections=\(shouldRefreshCollections), selection=\(shouldRefreshSelection))"
|
||||
)
|
||||
|
||||
if shouldRefreshCollections {
|
||||
await refreshCollections(preferredSelection: selectedItem, refreshSelection: shouldRefreshSelection)
|
||||
return
|
||||
}
|
||||
|
||||
if shouldRefreshSelection {
|
||||
await refreshSelectionIfNeeded()
|
||||
}
|
||||
}
|
||||
|
||||
func sendComposer() async {
|
||||
let content = composer.trimmingCharacters(in: .whitespacesAndNewlines)
|
||||
guard !content.isEmpty, !isSending else {
|
||||
let attachments = composerAttachments
|
||||
|
||||
guard !isSending else {
|
||||
return
|
||||
}
|
||||
|
||||
if isSearchMode {
|
||||
guard !content.isEmpty else { return }
|
||||
} else if content.isEmpty && attachments.isEmpty {
|
||||
return
|
||||
}
|
||||
|
||||
composer = ""
|
||||
composerAttachments = []
|
||||
errorMessage = nil
|
||||
isSending = true
|
||||
|
||||
@@ -444,7 +579,7 @@ final class SybilViewModel {
|
||||
try await sendSearch(query: content)
|
||||
} else {
|
||||
SybilLog.info(SybilLog.ui, "Sending chat prompt")
|
||||
try await sendChat(content: content)
|
||||
try await sendChat(content: content, attachments: attachments)
|
||||
}
|
||||
} catch {
|
||||
errorMessage = normalizeAPIError(error)
|
||||
@@ -468,12 +603,38 @@ final class SybilViewModel {
|
||||
}
|
||||
}
|
||||
|
||||
pendingChatState = nil
|
||||
if !isSearchMode {
|
||||
composer = content
|
||||
composerAttachments = attachments
|
||||
pendingChatState = nil
|
||||
}
|
||||
}
|
||||
|
||||
isSending = false
|
||||
}
|
||||
|
||||
func appendComposerAttachments(_ attachments: [ChatAttachment]) throws {
|
||||
guard !attachments.isEmpty else {
|
||||
return
|
||||
}
|
||||
|
||||
guard !isSearchMode else {
|
||||
errorMessage = "Attachments are only available in chat mode."
|
||||
return
|
||||
}
|
||||
|
||||
if composerAttachments.count + attachments.count > SybilChatAttachmentSupport.maxAttachmentsPerMessage {
|
||||
throw ChatAttachmentError.tooManyAttachments(SybilChatAttachmentSupport.maxAttachmentsPerMessage)
|
||||
}
|
||||
|
||||
composerAttachments += attachments
|
||||
errorMessage = nil
|
||||
}
|
||||
|
||||
func removeComposerAttachment(id: String) {
|
||||
composerAttachments.removeAll { $0.id == id }
|
||||
}
|
||||
|
||||
func startChatFromSelectedSearch() async {
|
||||
guard let search = selectedSearch, !isCreatingSearchChat, !isSending else {
|
||||
return
|
||||
@@ -484,10 +645,11 @@ final class SybilViewModel {
|
||||
|
||||
do {
|
||||
let client = try client()
|
||||
let chat = try await client.createChatFromSearch(searchID: search.id)
|
||||
let chat = try await client.createChatFromSearch(searchID: search.id, title: nil)
|
||||
draftKind = nil
|
||||
pendingChatState = nil
|
||||
composer = ""
|
||||
composerAttachments = []
|
||||
|
||||
chats.removeAll(where: { $0.id == chat.id })
|
||||
chats.insert(chat, at: 0)
|
||||
@@ -504,7 +666,7 @@ final class SybilViewModel {
|
||||
isCreatingSearchChat = false
|
||||
}
|
||||
|
||||
private func loadInitialData(using client: SybilAPIClient) async {
|
||||
private func loadInitialData(using client: any SybilAPIClienting) async {
|
||||
isLoadingCollections = true
|
||||
errorMessage = nil
|
||||
|
||||
@@ -576,7 +738,34 @@ final class SybilViewModel {
|
||||
settings.persist()
|
||||
}
|
||||
|
||||
private func refreshCollections(preferredSelection: SidebarSelection?) async {
|
||||
private func refreshCollectionsPreservingDraft() async {
|
||||
isLoadingCollections = true
|
||||
|
||||
do {
|
||||
let client = try client()
|
||||
async let chatsValue = client.listChats()
|
||||
async let searchesValue = client.listSearches()
|
||||
let (nextChats, nextSearches) = try await (chatsValue, searchesValue)
|
||||
|
||||
chats = nextChats
|
||||
searches = nextSearches
|
||||
|
||||
SybilLog.info(
|
||||
SybilLog.app,
|
||||
"Refreshed collections for draft: \(nextChats.count) chats, \(nextSearches.count) searches"
|
||||
)
|
||||
} catch {
|
||||
errorMessage = normalizeAPIError(error)
|
||||
SybilLog.error(SybilLog.app, "Refresh draft collections failed", error: error)
|
||||
}
|
||||
|
||||
isLoadingCollections = false
|
||||
}
|
||||
|
||||
private func refreshCollections(
|
||||
preferredSelection: SidebarSelection?,
|
||||
refreshSelection: Bool = true
|
||||
) async {
|
||||
isLoadingCollections = true
|
||||
|
||||
do {
|
||||
@@ -608,7 +797,7 @@ final class SybilViewModel {
|
||||
selectedItem = sidebarItems.first?.selection
|
||||
}
|
||||
|
||||
if selectedItem != nil {
|
||||
if refreshSelection, selectedItem != nil {
|
||||
await refreshSelectionIfNeeded()
|
||||
}
|
||||
} catch {
|
||||
@@ -668,13 +857,14 @@ final class SybilViewModel {
|
||||
selectedSearch = nil
|
||||
}
|
||||
|
||||
private func sendChat(content: String) async throws {
|
||||
private func sendChat(content: String, attachments: [ChatAttachment]) async throws {
|
||||
let optimisticUser = Message(
|
||||
id: "temp-user-\(UUID().uuidString)",
|
||||
createdAt: Date(),
|
||||
role: .user,
|
||||
content: content,
|
||||
name: nil
|
||||
name: nil,
|
||||
metadata: SybilChatAttachmentSupport.metadataValue(for: attachments)
|
||||
)
|
||||
|
||||
let optimisticAssistant = Message(
|
||||
@@ -694,7 +884,7 @@ final class SybilViewModel {
|
||||
|
||||
var chatID = currentChatID
|
||||
if chatID == nil {
|
||||
let created = try await client.createChat()
|
||||
let created = try await client.createChat(title: nil)
|
||||
chatID = created.id
|
||||
draftKind = nil
|
||||
selectedItem = .chat(created.id)
|
||||
@@ -740,8 +930,8 @@ final class SybilViewModel {
|
||||
baseChat.messages
|
||||
.filter { !$0.isToolCallLog }
|
||||
.map {
|
||||
CompletionRequestMessage(role: $0.role, content: $0.content, name: $0.name)
|
||||
} + [CompletionRequestMessage(role: .user, content: content)]
|
||||
CompletionRequestMessage(role: $0.role, content: $0.content, name: $0.name, attachments: $0.attachments.isEmpty ? nil : $0.attachments)
|
||||
} + [CompletionRequestMessage(role: .user, content: content, attachments: attachments.isEmpty ? nil : attachments)]
|
||||
|
||||
let streamStatus = CompletionStreamStatus()
|
||||
|
||||
@@ -749,7 +939,8 @@ final class SybilViewModel {
|
||||
Task { [weak self] in
|
||||
guard let self else { return }
|
||||
do {
|
||||
let updated = try await client.suggestChatTitle(chatID: chatID, content: content)
|
||||
let titleSeed = !content.isEmpty ? content : SybilChatAttachmentSupport.attachmentSummary(attachments)
|
||||
let updated = try await client.suggestChatTitle(chatID: chatID, content: titleSeed.isEmpty ? "Uploaded files" : titleSeed)
|
||||
await MainActor.run {
|
||||
self.chats = self.chats.map { existing in
|
||||
if existing.id == updated.id {
|
||||
@@ -769,6 +960,15 @@ final class SybilViewModel {
|
||||
}
|
||||
}
|
||||
|
||||
chatBackgroundTask?.end()
|
||||
chatBackgroundTask = SybilBackgroundTaskAssertion(name: "Sybil Chat Response") {
|
||||
SybilLog.warning(SybilLog.app, "Chat response background time expired")
|
||||
}
|
||||
defer {
|
||||
chatBackgroundTask?.end()
|
||||
chatBackgroundTask = nil
|
||||
}
|
||||
|
||||
try await client.runCompletionStream(
|
||||
body: CompletionStreamRequest(
|
||||
chatId: chatID,
|
||||
@@ -1019,6 +1219,13 @@ final class SybilViewModel {
|
||||
return String(firstUserMessage.prefix(48))
|
||||
}
|
||||
|
||||
if let firstUserMessage = messages?.first(where: { $0.role == .user }) {
|
||||
let attachmentSummary = SybilChatAttachmentSupport.attachmentSummary(firstUserMessage.attachments)
|
||||
if !attachmentSummary.isEmpty {
|
||||
return String(attachmentSummary.prefix(48))
|
||||
}
|
||||
}
|
||||
|
||||
return "New chat"
|
||||
}
|
||||
|
||||
@@ -1105,7 +1312,7 @@ final class SybilViewModel {
|
||||
return false
|
||||
}
|
||||
|
||||
private func client() throws -> SybilAPIClient {
|
||||
private func client() throws -> any SybilAPIClienting {
|
||||
guard let baseURL = settings.normalizedAPIBaseURL else {
|
||||
throw APIError.invalidBaseURL
|
||||
}
|
||||
@@ -1115,8 +1322,8 @@ final class SybilViewModel {
|
||||
"Creating API client for \(baseURL.absoluteString) (token: \(settings.trimmedTokenOrNil == nil ? "none" : "set"))"
|
||||
)
|
||||
|
||||
return SybilAPIClient(
|
||||
configuration: APIConfiguration(
|
||||
return clientFactory(
|
||||
APIConfiguration(
|
||||
baseURL: baseURL,
|
||||
authToken: settings.trimmedTokenOrNil
|
||||
)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,286 @@
|
||||
import CoreGraphics
|
||||
import Foundation
|
||||
import Testing
|
||||
@testable import Sybil
|
||||
|
||||
@Test func example() async throws {
|
||||
// Write your test here and use APIs like `#expect(...)` to check expected conditions.
|
||||
private struct MockClientCallSnapshot: Sendable {
|
||||
var listChats = 0
|
||||
var listSearches = 0
|
||||
var getChat = 0
|
||||
var getSearch = 0
|
||||
}
|
||||
|
||||
private struct UnexpectedClientCall: Error {}
|
||||
|
||||
private actor MockSybilClient: SybilAPIClienting {
|
||||
private let chatsResponse: [ChatSummary]
|
||||
private let searchesResponse: [SearchSummary]
|
||||
private let chatDetails: [String: ChatDetail]
|
||||
private let searchDetails: [String: SearchDetail]
|
||||
|
||||
private var snapshot = MockClientCallSnapshot()
|
||||
|
||||
init(
|
||||
chatsResponse: [ChatSummary] = [],
|
||||
searchesResponse: [SearchSummary] = [],
|
||||
chatDetails: [String: ChatDetail] = [:],
|
||||
searchDetails: [String: SearchDetail] = [:]
|
||||
) {
|
||||
self.chatsResponse = chatsResponse
|
||||
self.searchesResponse = searchesResponse
|
||||
self.chatDetails = chatDetails
|
||||
self.searchDetails = searchDetails
|
||||
}
|
||||
|
||||
func currentSnapshot() -> MockClientCallSnapshot {
|
||||
snapshot
|
||||
}
|
||||
|
||||
func verifySession() async throws -> AuthSession {
|
||||
AuthSession(authenticated: true, mode: "open")
|
||||
}
|
||||
|
||||
func listChats() async throws -> [ChatSummary] {
|
||||
snapshot.listChats += 1
|
||||
return chatsResponse
|
||||
}
|
||||
|
||||
func createChat(title: String?) async throws -> ChatSummary {
|
||||
throw UnexpectedClientCall()
|
||||
}
|
||||
|
||||
func getChat(chatID: String) async throws -> ChatDetail {
|
||||
snapshot.getChat += 1
|
||||
guard let detail = chatDetails[chatID] else {
|
||||
throw UnexpectedClientCall()
|
||||
}
|
||||
return detail
|
||||
}
|
||||
|
||||
func deleteChat(chatID: String) async throws {
|
||||
throw UnexpectedClientCall()
|
||||
}
|
||||
|
||||
func suggestChatTitle(chatID: String, content: String) async throws -> ChatSummary {
|
||||
throw UnexpectedClientCall()
|
||||
}
|
||||
|
||||
func listSearches() async throws -> [SearchSummary] {
|
||||
snapshot.listSearches += 1
|
||||
return searchesResponse
|
||||
}
|
||||
|
||||
func createSearch(title: String?, query: String?) async throws -> SearchSummary {
|
||||
throw UnexpectedClientCall()
|
||||
}
|
||||
|
||||
func getSearch(searchID: String) async throws -> SearchDetail {
|
||||
snapshot.getSearch += 1
|
||||
guard let detail = searchDetails[searchID] else {
|
||||
throw UnexpectedClientCall()
|
||||
}
|
||||
return detail
|
||||
}
|
||||
|
||||
func createChatFromSearch(searchID: String, title: String?) async throws -> ChatSummary {
|
||||
throw UnexpectedClientCall()
|
||||
}
|
||||
|
||||
func deleteSearch(searchID: String) async throws {
|
||||
throw UnexpectedClientCall()
|
||||
}
|
||||
|
||||
func listModels() async throws -> ModelCatalogResponse {
|
||||
ModelCatalogResponse(providers: [:])
|
||||
}
|
||||
|
||||
func runCompletionStream(
|
||||
body: CompletionStreamRequest,
|
||||
onEvent: @escaping @Sendable (CompletionStreamEvent) async -> Void
|
||||
) async throws {
|
||||
throw UnexpectedClientCall()
|
||||
}
|
||||
|
||||
func runSearchStream(
|
||||
searchID: String,
|
||||
body: SearchRunRequest,
|
||||
onEvent: @escaping @Sendable (SearchStreamEvent) async -> Void
|
||||
) async throws {
|
||||
throw UnexpectedClientCall()
|
||||
}
|
||||
}
|
||||
|
||||
@MainActor
|
||||
private func testSettings(named name: String) -> SybilSettingsStore {
|
||||
let defaults = UserDefaults(suiteName: name)!
|
||||
defaults.removePersistentDomain(forName: name)
|
||||
let settings = SybilSettingsStore(defaults: defaults)
|
||||
settings.apiBaseURL = "http://127.0.0.1:8787"
|
||||
return settings
|
||||
}
|
||||
|
||||
private func makeChatSummary(id: String, date: Date) -> ChatSummary {
|
||||
ChatSummary(
|
||||
id: id,
|
||||
title: "Chat \(id)",
|
||||
createdAt: date,
|
||||
updatedAt: date,
|
||||
initiatedProvider: .openai,
|
||||
initiatedModel: "gpt-4.1-mini",
|
||||
lastUsedProvider: .openai,
|
||||
lastUsedModel: "gpt-4.1-mini"
|
||||
)
|
||||
}
|
||||
|
||||
private func makeChatDetail(id: String, date: Date, body: String) -> ChatDetail {
|
||||
ChatDetail(
|
||||
id: id,
|
||||
title: "Chat \(id)",
|
||||
createdAt: date,
|
||||
updatedAt: date,
|
||||
initiatedProvider: .openai,
|
||||
initiatedModel: "gpt-4.1-mini",
|
||||
lastUsedProvider: .openai,
|
||||
lastUsedModel: "gpt-4.1-mini",
|
||||
messages: [
|
||||
Message(
|
||||
id: "message-\(id)",
|
||||
createdAt: date,
|
||||
role: .assistant,
|
||||
content: body,
|
||||
name: nil
|
||||
)
|
||||
]
|
||||
)
|
||||
}
|
||||
|
||||
private func makeSearchSummary(id: String, date: Date) -> SearchSummary {
|
||||
SearchSummary(
|
||||
id: id,
|
||||
title: "Search \(id)",
|
||||
query: "query-\(id)",
|
||||
createdAt: date,
|
||||
updatedAt: date
|
||||
)
|
||||
}
|
||||
|
||||
private func makeSearchDetail(id: String, date: Date, answer: String) -> SearchDetail {
|
||||
SearchDetail(
|
||||
id: id,
|
||||
title: "Search \(id)",
|
||||
query: "query-\(id)",
|
||||
createdAt: date,
|
||||
updatedAt: date,
|
||||
requestId: "request-\(id)",
|
||||
latencyMs: 42,
|
||||
error: nil,
|
||||
answerText: answer,
|
||||
answerRequestId: "answer-\(id)",
|
||||
answerCitations: [],
|
||||
answerError: nil,
|
||||
results: []
|
||||
)
|
||||
}
|
||||
|
||||
@MainActor
|
||||
@Test func normalizedAPIBaseURLPreservesExplicitAPIPath() async throws {
|
||||
let defaults = UserDefaults(suiteName: #function)!
|
||||
defaults.removePersistentDomain(forName: #function)
|
||||
|
||||
let settings = SybilSettingsStore(defaults: defaults)
|
||||
settings.apiBaseURL = "https://sybil.bajor.cloud/api/"
|
||||
|
||||
#expect(settings.normalizedAPIBaseURL?.absoluteString == "https://sybil.bajor.cloud/api")
|
||||
}
|
||||
|
||||
@MainActor
|
||||
@Test func normalizedAPIBaseURLTrimsWhitespaceAndTrailingSlashes() async throws {
|
||||
let defaults = UserDefaults(suiteName: #function)!
|
||||
defaults.removePersistentDomain(forName: #function)
|
||||
|
||||
let settings = SybilSettingsStore(defaults: defaults)
|
||||
settings.apiBaseURL = " http://127.0.0.1:8787/// "
|
||||
|
||||
#expect(settings.normalizedAPIBaseURL?.absoluteString == "http://127.0.0.1:8787")
|
||||
}
|
||||
|
||||
@MainActor
|
||||
@Test func foregroundListRefreshDoesNotReloadHiddenSelection() async throws {
|
||||
let date = Date(timeIntervalSince1970: 1_700_000_000)
|
||||
let chat = makeChatSummary(id: "chat-1", date: date)
|
||||
let search = makeSearchSummary(id: "search-1", date: date)
|
||||
let client = MockSybilClient(
|
||||
chatsResponse: [chat],
|
||||
searchesResponse: [search],
|
||||
chatDetails: ["chat-1": makeChatDetail(id: "chat-1", date: date, body: "fresh chat body")]
|
||||
)
|
||||
let viewModel = SybilViewModel(settings: testSettings(named: #function)) { _ in client }
|
||||
viewModel.isAuthenticated = true
|
||||
viewModel.isCheckingSession = false
|
||||
viewModel.selectedItem = .chat("chat-1")
|
||||
|
||||
await viewModel.refreshVisibleContent(refreshCollections: true, refreshSelection: false)
|
||||
|
||||
let snapshot = await client.currentSnapshot()
|
||||
#expect(snapshot.listChats == 1)
|
||||
#expect(snapshot.listSearches == 1)
|
||||
#expect(snapshot.getChat == 0)
|
||||
#expect(snapshot.getSearch == 0)
|
||||
#expect(viewModel.selectedItem == .chat("chat-1"))
|
||||
}
|
||||
|
||||
@MainActor
|
||||
@Test func foregroundChatRefreshReloadsSelectedTranscript() async throws {
|
||||
let date = Date(timeIntervalSince1970: 1_700_000_100)
|
||||
let detail = makeChatDetail(id: "chat-2", date: date, body: "refreshed transcript")
|
||||
let client = MockSybilClient(chatDetails: ["chat-2": detail])
|
||||
let viewModel = SybilViewModel(settings: testSettings(named: #function)) { _ in client }
|
||||
viewModel.isAuthenticated = true
|
||||
viewModel.isCheckingSession = false
|
||||
viewModel.selectedItem = .chat("chat-2")
|
||||
|
||||
await viewModel.refreshVisibleContent(refreshCollections: false, refreshSelection: true)
|
||||
|
||||
let snapshot = await client.currentSnapshot()
|
||||
#expect(snapshot.listChats == 0)
|
||||
#expect(snapshot.listSearches == 0)
|
||||
#expect(snapshot.getChat == 1)
|
||||
#expect(viewModel.selectedChat?.messages.first?.content == "refreshed transcript")
|
||||
}
|
||||
|
||||
@MainActor
|
||||
@Test func foregroundSearchRefreshReloadsSelectedSearch() async throws {
|
||||
let date = Date(timeIntervalSince1970: 1_700_000_200)
|
||||
let detail = makeSearchDetail(id: "search-2", date: date, answer: "fresh answer")
|
||||
let client = MockSybilClient(searchDetails: ["search-2": detail])
|
||||
let viewModel = SybilViewModel(settings: testSettings(named: #function)) { _ in client }
|
||||
viewModel.isAuthenticated = true
|
||||
viewModel.isCheckingSession = false
|
||||
viewModel.selectedItem = .search("search-2")
|
||||
|
||||
await viewModel.refreshVisibleContent(refreshCollections: false, refreshSelection: true)
|
||||
|
||||
let snapshot = await client.currentSnapshot()
|
||||
#expect(snapshot.listChats == 0)
|
||||
#expect(snapshot.listSearches == 0)
|
||||
#expect(snapshot.getSearch == 1)
|
||||
#expect(viewModel.selectedSearch?.answerText == "fresh answer")
|
||||
}
|
||||
|
||||
@Test func newChatSwipeMetricsClampProgressAndLatch() async throws {
|
||||
let width: CGFloat = 390
|
||||
let maxTravel = NewChatSwipeMetrics.maxTravel(for: width)
|
||||
let latchDistance = NewChatSwipeMetrics.latchDistance(for: width)
|
||||
|
||||
#expect(NewChatSwipeMetrics.clampedOffset(for: -500, width: width) == -maxTravel)
|
||||
#expect(NewChatSwipeMetrics.progress(for: -maxTravel / 2, width: width) == 0.5)
|
||||
#expect(NewChatSwipeMetrics.blurRadius(for: -maxTravel, width: width) == 10)
|
||||
#expect(NewChatSwipeMetrics.isLatched(offset: -(latchDistance + 1), width: width))
|
||||
#expect(!NewChatSwipeMetrics.isLatched(offset: -(latchDistance - 1), width: width))
|
||||
#expect(NewChatSwipeMetrics.isLatched(offset: -(latchDistance - 1), width: width, isCurrentlyLatched: true))
|
||||
#expect(!NewChatSwipeMetrics.isLatched(offset: -(NewChatSwipeMetrics.latchReleaseDistance(for: width) - 1), width: width, isCurrentlyLatched: true))
|
||||
#expect(NewChatSwipeMetrics.shouldBeginPan(leftwardTravel: 24, verticalTravel: 8, leftwardVelocity: 0, verticalVelocity: 0))
|
||||
#expect(NewChatSwipeMetrics.shouldBeginPan(leftwardTravel: 2, verticalTravel: 1, leftwardVelocity: 120, verticalVelocity: 30))
|
||||
#expect(!NewChatSwipeMetrics.shouldBeginPan(leftwardTravel: 8, verticalTravel: 24, leftwardVelocity: 20, verticalVelocity: 140))
|
||||
#expect(!NewChatSwipeMetrics.shouldBeginPan(leftwardTravel: 18, verticalTravel: 18, leftwardVelocity: 80, verticalVelocity: 90))
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Sybil Server
|
||||
|
||||
Backend API for:
|
||||
- LLM multiplexer (OpenAI / Anthropic / xAI (Grok))
|
||||
- LLM multiplexer (OpenAI Responses / Anthropic / xAI Chat Completions-compatible Grok)
|
||||
- Personal chat database (chats/messages + LLM call log)
|
||||
|
||||
## Stack
|
||||
@@ -44,6 +44,19 @@ If `ADMIN_TOKEN` is not set, the server runs in open mode (dev).
|
||||
- `ANTHROPIC_API_KEY`
|
||||
- `XAI_API_KEY`
|
||||
- `EXA_API_KEY`
|
||||
- `CHAT_WEB_SEARCH_ENGINE` (`exa` by default, or `searxng` for chat tool calls only)
|
||||
- `SEARXNG_BASE_URL` (required when `CHAT_WEB_SEARCH_ENGINE=searxng`; instance must allow `format=json`)
|
||||
- `CHAT_MAX_TOOL_ROUNDS` (`100` by default; maximum model/tool result cycles per chat completion)
|
||||
- `CHAT_CODEX_TOOL_ENABLED` (`false` by default; enables the `codex_exec` chat tool for OpenAI/xAI)
|
||||
- `CHAT_CODEX_REMOTE_HOST` (required when Codex tool is enabled; SSH host/IP or `user@host`)
|
||||
- `CHAT_CODEX_REMOTE_USER` (optional SSH user when host does not include one)
|
||||
- `CHAT_CODEX_REMOTE_PORT` (`22` by default)
|
||||
- `CHAT_CODEX_REMOTE_WORKDIR` (`/workspace/sybil-codex` by default; created and reused on the devbox)
|
||||
- `CHAT_CODEX_SSH_KEY_PATH` (recommended: path to a read-only mounted private key)
|
||||
- `CHAT_CODEX_SSH_PRIVATE_KEY_B64` (optional fallback private key delivery)
|
||||
- `CHAT_CODEX_EXEC_TIMEOUT_MS` (`600000` by default)
|
||||
- `CHAT_SHELL_TOOL_ENABLED` (`false` by default; enables the `shell_exec` chat tool for OpenAI/xAI on the same devbox)
|
||||
- `CHAT_SHELL_EXEC_TIMEOUT_MS` (`120000` by default)
|
||||
|
||||
## API
|
||||
- `GET /health`
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
"prebuild": "node scripts/ensure-prisma-client.mjs",
|
||||
"dev": "node ./node_modules/tsx/dist/cli.mjs watch src/index.ts",
|
||||
"start": "node dist/index.js",
|
||||
"test": "node --test --import tsx tests/**/*.test.ts",
|
||||
"build": "node ./node_modules/typescript/bin/tsc -p tsconfig.json",
|
||||
"prisma:generate": "node ./node_modules/prisma/build/index.js generate",
|
||||
"db:migrate": "node ./node_modules/prisma/build/index.js migrate dev",
|
||||
|
||||
@@ -1,5 +1,52 @@
|
||||
import path from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import { config as loadDotenv } from "dotenv";
|
||||
import { z } from "zod";
|
||||
import "dotenv/config";
|
||||
|
||||
loadDotenv({ quiet: true });
|
||||
loadDotenv({ path: path.resolve(path.dirname(fileURLToPath(import.meta.url)), "../../.env"), quiet: true });
|
||||
|
||||
const OptionalUrlSchema = z.preprocess(
|
||||
(value) => (typeof value === "string" && value.trim() === "" ? undefined : value),
|
||||
z.string().trim().url().optional()
|
||||
);
|
||||
|
||||
const ChatWebSearchEngineSchema = z.preprocess(
|
||||
(value) => {
|
||||
if (typeof value !== "string") return value;
|
||||
const trimmed = value.trim();
|
||||
return trimmed ? trimmed.toLowerCase() : undefined;
|
||||
},
|
||||
z.enum(["exa", "searxng"]).default("exa")
|
||||
);
|
||||
|
||||
const BooleanFlagSchema = z.preprocess((value) => {
|
||||
if (typeof value !== "string") return value;
|
||||
const normalized = value.trim().toLowerCase();
|
||||
if (!normalized) return undefined;
|
||||
if (["1", "true", "yes", "on"].includes(normalized)) return true;
|
||||
if (["0", "false", "no", "off"].includes(normalized)) return false;
|
||||
return value;
|
||||
}, z.boolean().default(false));
|
||||
|
||||
const OptionalTrimmedStringSchema = z.preprocess(
|
||||
(value) => (typeof value === "string" && value.trim() === "" ? undefined : value),
|
||||
z.string().trim().min(1).optional()
|
||||
);
|
||||
|
||||
function defaultedPositiveInt(defaultValue: number) {
|
||||
return z.preprocess(
|
||||
(value) => (typeof value === "string" && value.trim() === "" ? undefined : value),
|
||||
z.coerce.number().int().positive().default(defaultValue)
|
||||
);
|
||||
}
|
||||
|
||||
function defaultedTrimmedString(defaultValue: string) {
|
||||
return z.preprocess(
|
||||
(value) => (typeof value === "string" && value.trim() === "" ? undefined : value),
|
||||
z.string().trim().min(1).default(defaultValue)
|
||||
);
|
||||
}
|
||||
|
||||
const EnvSchema = z.object({
|
||||
PORT: z.coerce.number().int().positive().default(8787),
|
||||
@@ -13,6 +60,42 @@ const EnvSchema = z.object({
|
||||
ANTHROPIC_API_KEY: z.string().optional(),
|
||||
XAI_API_KEY: z.string().optional(),
|
||||
EXA_API_KEY: z.string().optional(),
|
||||
|
||||
// Chat-mode web_search tool configuration. Search mode remains Exa-only for now.
|
||||
CHAT_WEB_SEARCH_ENGINE: ChatWebSearchEngineSchema,
|
||||
SEARXNG_BASE_URL: OptionalUrlSchema,
|
||||
CHAT_MAX_TOOL_ROUNDS: defaultedPositiveInt(100),
|
||||
|
||||
// Optional chat-mode Codex tool. When enabled, the server SSHes into a remote
|
||||
// devbox and runs `codex exec` in a persistent scratch directory there.
|
||||
CHAT_CODEX_TOOL_ENABLED: BooleanFlagSchema,
|
||||
CHAT_CODEX_REMOTE_HOST: OptionalTrimmedStringSchema,
|
||||
CHAT_CODEX_REMOTE_USER: OptionalTrimmedStringSchema,
|
||||
CHAT_CODEX_REMOTE_PORT: defaultedPositiveInt(22),
|
||||
CHAT_CODEX_REMOTE_WORKDIR: defaultedTrimmedString("/workspace/sybil-codex"),
|
||||
CHAT_CODEX_SSH_KEY_PATH: OptionalTrimmedStringSchema,
|
||||
CHAT_CODEX_SSH_PRIVATE_KEY_B64: OptionalTrimmedStringSchema,
|
||||
CHAT_CODEX_EXEC_TIMEOUT_MS: defaultedPositiveInt(600_000),
|
||||
|
||||
// Optional arbitrary shell tool that runs only on the configured devbox.
|
||||
CHAT_SHELL_TOOL_ENABLED: BooleanFlagSchema,
|
||||
CHAT_SHELL_EXEC_TIMEOUT_MS: defaultedPositiveInt(120_000),
|
||||
}).superRefine((value, ctx) => {
|
||||
if (value.CHAT_WEB_SEARCH_ENGINE === "searxng" && !value.SEARXNG_BASE_URL) {
|
||||
ctx.addIssue({
|
||||
code: "custom",
|
||||
path: ["SEARXNG_BASE_URL"],
|
||||
message: "SEARXNG_BASE_URL is required when CHAT_WEB_SEARCH_ENGINE=searxng",
|
||||
});
|
||||
}
|
||||
|
||||
if ((value.CHAT_CODEX_TOOL_ENABLED || value.CHAT_SHELL_TOOL_ENABLED) && !value.CHAT_CODEX_REMOTE_HOST) {
|
||||
ctx.addIssue({
|
||||
code: "custom",
|
||||
path: ["CHAT_CODEX_REMOTE_HOST"],
|
||||
message: "CHAT_CODEX_REMOTE_HOST is required when CHAT_CODEX_TOOL_ENABLED=true or CHAT_SHELL_TOOL_ENABLED=true",
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
export type Env = z.infer<typeof EnvSchema>;
|
||||
|
||||
@@ -9,6 +9,7 @@ import { warmModelCatalog } from "./llm/model-catalog.js";
|
||||
import { registerRoutes } from "./routes.js";
|
||||
|
||||
const app = Fastify({
|
||||
bodyLimit: 32 * 1024 * 1024,
|
||||
disableRequestLogging: true,
|
||||
logger: {
|
||||
transport: {
|
||||
|
||||
@@ -1,15 +1,33 @@
|
||||
import { execFile } from "node:child_process";
|
||||
import { mkdtemp, rm, writeFile } from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { promisify } from "node:util";
|
||||
import { convert as htmlToText } from "html-to-text";
|
||||
import type OpenAI from "openai";
|
||||
import { z } from "zod";
|
||||
import { env } from "../env.js";
|
||||
import { exaClient } from "../search/exa.js";
|
||||
import { searchSearxng } from "../search/searxng.js";
|
||||
import { buildOpenAIConversationMessage, buildOpenAIResponsesInputMessage } from "./message-content.js";
|
||||
import type { ChatMessage } from "./types.js";
|
||||
|
||||
const MAX_TOOL_ROUNDS = 4;
|
||||
const MAX_TOOL_ROUNDS = env.CHAT_MAX_TOOL_ROUNDS;
|
||||
const DEFAULT_WEB_RESULTS = 5;
|
||||
const MAX_WEB_RESULTS = 10;
|
||||
const DEFAULT_FETCH_MAX_CHARACTERS = 12_000;
|
||||
const MAX_FETCH_MAX_CHARACTERS = 50_000;
|
||||
const FETCH_TIMEOUT_MS = 12_000;
|
||||
const MAX_CODEX_PROMPT_CHARACTERS = 60_000;
|
||||
const DEFAULT_CODEX_MAX_OUTPUT_CHARACTERS = 24_000;
|
||||
const MAX_CODEX_MAX_OUTPUT_CHARACTERS = 80_000;
|
||||
const MAX_SHELL_COMMAND_CHARACTERS = 20_000;
|
||||
const DEFAULT_SHELL_MAX_OUTPUT_CHARACTERS = 24_000;
|
||||
const MAX_SHELL_MAX_OUTPUT_CHARACTERS = 80_000;
|
||||
const REMOTE_EXEC_MAX_BUFFER_BYTES = 1_000_000;
|
||||
const MAX_DANGLING_TOOL_INTENT_RETRIES = 1;
|
||||
|
||||
const execFileAsync = promisify(execFile);
|
||||
|
||||
const WebSearchArgsSchema = z
|
||||
.object({
|
||||
@@ -21,6 +39,8 @@ const WebSearchArgsSchema = z
|
||||
})
|
||||
.strict();
|
||||
|
||||
type WebSearchArgs = z.infer<typeof WebSearchArgsSchema>;
|
||||
|
||||
const FetchUrlArgsSchema = z
|
||||
.object({
|
||||
url: z.string().trim().url(),
|
||||
@@ -28,7 +48,79 @@ const FetchUrlArgsSchema = z
|
||||
})
|
||||
.strict();
|
||||
|
||||
const CHAT_TOOLS: any[] = [
|
||||
const CodexExecArgsSchema = z
|
||||
.object({
|
||||
prompt: z.string().trim().min(1).max(MAX_CODEX_PROMPT_CHARACTERS),
|
||||
maxCharacters: z.coerce.number().int().min(1_000).max(MAX_CODEX_MAX_OUTPUT_CHARACTERS).optional(),
|
||||
})
|
||||
.strict();
|
||||
|
||||
type CodexExecArgs = z.infer<typeof CodexExecArgsSchema>;
|
||||
|
||||
const ShellExecArgsSchema = z
|
||||
.object({
|
||||
command: z.string().trim().min(1).max(MAX_SHELL_COMMAND_CHARACTERS),
|
||||
maxCharacters: z.coerce.number().int().min(1_000).max(MAX_SHELL_MAX_OUTPUT_CHARACTERS).optional(),
|
||||
})
|
||||
.strict();
|
||||
|
||||
type ShellExecArgs = z.infer<typeof ShellExecArgsSchema>;
|
||||
|
||||
const CODEX_EXEC_TOOL = {
|
||||
type: "function",
|
||||
function: {
|
||||
name: "codex_exec",
|
||||
description:
|
||||
"Delegate a coding, terminal, or multi-step software task to a persistent remote Codex CLI workspace. Use for complex code changes, repository inspection, running programs/tests, debugging build failures, or other tasks that need a real shell. The task runs non-interactively; the remote Codex instance must make reasonable assumptions, complete the task, and return a final summary with relevant stdout/stderr.",
|
||||
parameters: {
|
||||
type: "object",
|
||||
properties: {
|
||||
prompt: {
|
||||
type: "string",
|
||||
description:
|
||||
"A complete, self-contained instruction for the remote Codex instance. Include the goal, relevant context, constraints, and what result to report back.",
|
||||
},
|
||||
maxCharacters: {
|
||||
type: "integer",
|
||||
minimum: 1_000,
|
||||
maximum: MAX_CODEX_MAX_OUTPUT_CHARACTERS,
|
||||
description: "Maximum stdout/stderr characters returned to the model (default 24000).",
|
||||
},
|
||||
},
|
||||
required: ["prompt"],
|
||||
additionalProperties: false,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const SHELL_EXEC_TOOL = {
|
||||
type: "function",
|
||||
function: {
|
||||
name: "shell_exec",
|
||||
description:
|
||||
"Run an arbitrary non-interactive shell command on the configured remote devbox, starting in the persistent scratch workspace. Use for quick Python scripts, calculations, file inspection, package/tool checks, tests, and command-line work that needs a real shell. This does not run inside the Sybil server container.",
|
||||
parameters: {
|
||||
type: "object",
|
||||
properties: {
|
||||
command: {
|
||||
type: "string",
|
||||
description:
|
||||
"Shell command to run on the devbox. The command is executed with bash -lc when bash exists, otherwise sh -lc, starting in the persistent scratch workspace.",
|
||||
},
|
||||
maxCharacters: {
|
||||
type: "integer",
|
||||
minimum: 1_000,
|
||||
maximum: MAX_SHELL_MAX_OUTPUT_CHARACTERS,
|
||||
description: "Maximum stdout/stderr characters returned to the model (default 24000).",
|
||||
},
|
||||
},
|
||||
required: ["command"],
|
||||
additionalProperties: false,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const BASE_CHAT_TOOLS: any[] = [
|
||||
{
|
||||
type: "function",
|
||||
function: {
|
||||
@@ -90,10 +182,34 @@ const CHAT_TOOLS: any[] = [
|
||||
},
|
||||
];
|
||||
|
||||
const CHAT_TOOLS: any[] = [
|
||||
...BASE_CHAT_TOOLS,
|
||||
...(env.CHAT_CODEX_TOOL_ENABLED ? [CODEX_EXEC_TOOL] : []),
|
||||
...(env.CHAT_SHELL_TOOL_ENABLED ? [SHELL_EXEC_TOOL] : []),
|
||||
];
|
||||
|
||||
const RESPONSES_CHAT_TOOLS: any[] = CHAT_TOOLS.map((tool) => {
|
||||
if (tool?.type !== "function") return tool;
|
||||
return {
|
||||
type: "function",
|
||||
name: tool.function.name,
|
||||
description: tool.function.description,
|
||||
parameters: tool.function.parameters,
|
||||
strict: false,
|
||||
};
|
||||
});
|
||||
|
||||
export const CHAT_TOOL_SYSTEM_PROMPT =
|
||||
"You can use tools to gather up-to-date web information when needed. " +
|
||||
"Use web_search for discovery and recent facts, and fetch_url to read the full content of a specific page. " +
|
||||
"Prefer tools when the user asks for current events, verification, sources, or details you do not already have. " +
|
||||
"When you decide tool use is needed, call the tool immediately in the same response; do not say you are running a tool unless you actually call it. " +
|
||||
(env.CHAT_CODEX_TOOL_ENABLED
|
||||
? "Use codex_exec when a request needs substantial coding work, repository inspection, shell commands, tests, debugging, or another complex task suited to a persistent Codex workspace. Provide codex_exec a complete prompt with the goal, constraints, assumptions, and expected report-back format. Never ask codex_exec to wait for user input or run interactive commands. "
|
||||
: "") +
|
||||
(env.CHAT_SHELL_TOOL_ENABLED
|
||||
? "Use shell_exec for direct non-interactive command-line work on the remote devbox, including quick Python programs, calculations, file inspection, running tests, and small scripts. "
|
||||
: "") +
|
||||
"Do not fabricate tool outputs; reason only from provided tool results.";
|
||||
|
||||
type ToolRunOutcome = {
|
||||
@@ -187,6 +303,24 @@ function buildToolSummary(name: string, args: Record<string, unknown>, status: "
|
||||
return url ? `Fetching URL ${toSingleLine(url, 140)} failed.${errSuffix}` : `Fetching URL failed.${errSuffix}`;
|
||||
}
|
||||
|
||||
if (name === "codex_exec") {
|
||||
const prompt = typeof args.prompt === "string" ? args.prompt.trim() : "";
|
||||
if (status === "completed") {
|
||||
return prompt ? `Ran Codex task: '${toSingleLine(prompt, 120)}'.` : "Ran Codex task.";
|
||||
}
|
||||
return prompt ? `Codex task '${toSingleLine(prompt, 120)}' failed.${errSuffix}` : `Codex task failed.${errSuffix}`;
|
||||
}
|
||||
|
||||
if (name === "shell_exec") {
|
||||
const command = typeof args.command === "string" ? args.command.trim() : "";
|
||||
if (status === "completed") {
|
||||
return command ? `Ran devbox shell command: '${toSingleLine(command, 120)}'.` : "Ran devbox shell command.";
|
||||
}
|
||||
return command
|
||||
? `Devbox shell command '${toSingleLine(command, 120)}' failed.${errSuffix}`
|
||||
: `Devbox shell command failed.${errSuffix}`;
|
||||
}
|
||||
|
||||
if (status === "completed") {
|
||||
return `Ran tool '${name}'.`;
|
||||
}
|
||||
@@ -246,29 +380,18 @@ function extractHtmlTitle(html: string) {
|
||||
}
|
||||
|
||||
function normalizeIncomingMessages(messages: ChatMessage[]) {
|
||||
const normalized = messages.map((m) => {
|
||||
if (m.role === "tool") {
|
||||
const name = m.name?.trim() || "tool";
|
||||
return {
|
||||
role: "user",
|
||||
content: `Tool output (${name}):\n${m.content}`,
|
||||
};
|
||||
}
|
||||
if (m.role === "assistant" || m.role === "system" || m.role === "user") {
|
||||
const out: any = { role: m.role, content: m.content };
|
||||
if (m.name && (m.role === "assistant" || m.role === "user")) {
|
||||
out.name = m.name;
|
||||
}
|
||||
return out;
|
||||
}
|
||||
return { role: "user", content: m.content };
|
||||
});
|
||||
const normalized = messages.map((message) => buildOpenAIConversationMessage(message));
|
||||
|
||||
return [{ role: "system", content: CHAT_TOOL_SYSTEM_PROMPT }, ...normalized];
|
||||
}
|
||||
|
||||
async function runWebSearchTool(input: unknown): Promise<ToolRunOutcome> {
|
||||
const args = WebSearchArgsSchema.parse(input);
|
||||
function normalizeIncomingResponsesInput(messages: ChatMessage[]) {
|
||||
const normalized = messages.map((message) => buildOpenAIResponsesInputMessage(message));
|
||||
|
||||
return [{ role: "system", content: CHAT_TOOL_SYSTEM_PROMPT }, ...normalized];
|
||||
}
|
||||
|
||||
async function runExaWebSearchTool(args: WebSearchArgs): Promise<ToolRunOutcome> {
|
||||
const exa = exaClient();
|
||||
const response = await exa.search(args.query, {
|
||||
type: args.type ?? "auto",
|
||||
@@ -292,6 +415,7 @@ async function runWebSearchTool(input: unknown): Promise<ToolRunOutcome> {
|
||||
const results = Array.isArray(response?.results) ? response.results : [];
|
||||
return {
|
||||
ok: true,
|
||||
searchEngine: "exa",
|
||||
query: args.query,
|
||||
requestId: response?.requestId ?? null,
|
||||
results: results.map((result: any, index: number) => ({
|
||||
@@ -309,6 +433,40 @@ async function runWebSearchTool(input: unknown): Promise<ToolRunOutcome> {
|
||||
};
|
||||
}
|
||||
|
||||
async function runSearxngWebSearchTool(args: WebSearchArgs): Promise<ToolRunOutcome> {
|
||||
const response = await searchSearxng(args.query, {
|
||||
numResults: args.numResults ?? DEFAULT_WEB_RESULTS,
|
||||
includeDomains: args.includeDomains,
|
||||
excludeDomains: args.excludeDomains,
|
||||
});
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
searchEngine: "searxng",
|
||||
query: args.query,
|
||||
requestId: response.requestId,
|
||||
results: response.results.map((result, index) => ({
|
||||
rank: index + 1,
|
||||
title: result.title,
|
||||
url: result.url,
|
||||
publishedDate: result.publishedDate,
|
||||
author: null,
|
||||
summary: result.summary,
|
||||
text: result.text,
|
||||
highlights: result.summary ? [clipText(result.summary, 280)] : [],
|
||||
engines: result.engines,
|
||||
})),
|
||||
};
|
||||
}
|
||||
|
||||
async function runWebSearchTool(input: unknown): Promise<ToolRunOutcome> {
|
||||
const args = WebSearchArgsSchema.parse(input);
|
||||
if (env.CHAT_WEB_SEARCH_ENGINE === "searxng") {
|
||||
return runSearxngWebSearchTool(args);
|
||||
}
|
||||
return runExaWebSearchTool(args);
|
||||
}
|
||||
|
||||
function assertSafeFetchUrl(urlRaw: string) {
|
||||
const parsed = new URL(urlRaw);
|
||||
if (parsed.protocol !== "http:" && parsed.protocol !== "https:") {
|
||||
@@ -379,9 +537,228 @@ async function runFetchUrlTool(input: unknown): Promise<ToolRunOutcome> {
|
||||
};
|
||||
}
|
||||
|
||||
function shellQuote(value: string) {
|
||||
return `'${value.replace(/'/g, `'\\''`)}'`;
|
||||
}
|
||||
|
||||
function buildDevboxSshTarget() {
|
||||
const host = env.CHAT_CODEX_REMOTE_HOST;
|
||||
if (!host) {
|
||||
throw new Error("CHAT_CODEX_REMOTE_HOST not set");
|
||||
}
|
||||
if (!env.CHAT_CODEX_REMOTE_USER || host.includes("@")) {
|
||||
return host;
|
||||
}
|
||||
return `${env.CHAT_CODEX_REMOTE_USER}@${host}`;
|
||||
}
|
||||
|
||||
function buildRemoteCodexCommand(prompt: string) {
|
||||
const workdir = env.CHAT_CODEX_REMOTE_WORKDIR.trim();
|
||||
const wrappedPrompt = [
|
||||
"You are running in a non-interactive batch environment.",
|
||||
"",
|
||||
"Rules:",
|
||||
"- Do not ask questions or wait for user input.",
|
||||
"- Do not use interactive commands, editors, pagers, or prompts.",
|
||||
"- If details are ambiguous, make a reasonable assumption and continue.",
|
||||
"- Complete the task in one run, including any requested file edits, commands, and verification.",
|
||||
"- End with a concise final report that includes changed files, commands run, and outcomes.",
|
||||
"",
|
||||
"Task:",
|
||||
prompt,
|
||||
].join("\n");
|
||||
const codexCommand =
|
||||
`codex exec --dangerously-bypass-approvals-and-sandbox --skip-git-repo-check ${shellQuote(wrappedPrompt)} < /dev/null`;
|
||||
return `mkdir -p ${shellQuote(workdir)} && cd ${shellQuote(workdir)} && ${codexCommand}`;
|
||||
}
|
||||
|
||||
function buildRemoteShellCommand(command: string) {
|
||||
const workdir = env.CHAT_CODEX_REMOTE_WORKDIR.trim();
|
||||
const quotedCommand = shellQuote(command);
|
||||
return (
|
||||
`mkdir -p ${shellQuote(workdir)} && cd ${shellQuote(workdir)} && ` +
|
||||
`if command -v bash >/dev/null 2>&1; then bash -lc ${quotedCommand}; else sh -lc ${quotedCommand}; fi`
|
||||
);
|
||||
}
|
||||
|
||||
async function withDevboxSshKeyPath<T>(fn: (keyPath?: string) => Promise<T>) {
|
||||
if (env.CHAT_CODEX_SSH_KEY_PATH) {
|
||||
return fn(env.CHAT_CODEX_SSH_KEY_PATH);
|
||||
}
|
||||
|
||||
if (!env.CHAT_CODEX_SSH_PRIVATE_KEY_B64) {
|
||||
return fn(undefined);
|
||||
}
|
||||
|
||||
const tmpDir = await mkdtemp(path.join(os.tmpdir(), "sybil-codex-ssh-"));
|
||||
const keyPath = path.join(tmpDir, "id");
|
||||
try {
|
||||
await writeFile(keyPath, Buffer.from(env.CHAT_CODEX_SSH_PRIVATE_KEY_B64, "base64"), { mode: 0o600 });
|
||||
return await fn(keyPath);
|
||||
} finally {
|
||||
await rm(tmpDir, { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
|
||||
function clipRemoteOutput(value: string, maxCharacters: number) {
|
||||
if (value.length <= maxCharacters) {
|
||||
return { text: value, truncated: false };
|
||||
}
|
||||
return {
|
||||
text: `${value.slice(0, maxCharacters)}\n\n[truncated ${value.length - maxCharacters} characters]`,
|
||||
truncated: true,
|
||||
};
|
||||
}
|
||||
|
||||
function bufferOrStringToString(value: unknown) {
|
||||
if (typeof value === "string") return value;
|
||||
if (Buffer.isBuffer(value)) return value.toString("utf8");
|
||||
return "";
|
||||
}
|
||||
|
||||
async function runCodexExecTool(input: unknown): Promise<ToolRunOutcome> {
|
||||
if (!env.CHAT_CODEX_TOOL_ENABLED) {
|
||||
return { ok: false, error: "codex_exec is disabled." };
|
||||
}
|
||||
|
||||
const args: CodexExecArgs = CodexExecArgsSchema.parse(input);
|
||||
const maxCharacters = args.maxCharacters ?? DEFAULT_CODEX_MAX_OUTPUT_CHARACTERS;
|
||||
const sshTarget = buildDevboxSshTarget();
|
||||
const remoteCommand = buildRemoteCodexCommand(args.prompt);
|
||||
|
||||
const run = async (keyPath?: string) => {
|
||||
const sshArgs = [
|
||||
"-n",
|
||||
"-o",
|
||||
"BatchMode=yes",
|
||||
"-o",
|
||||
"StrictHostKeyChecking=accept-new",
|
||||
"-o",
|
||||
"UserKnownHostsFile=/tmp/sybil-codex-known-hosts",
|
||||
"-p",
|
||||
String(env.CHAT_CODEX_REMOTE_PORT),
|
||||
];
|
||||
|
||||
if (keyPath) {
|
||||
sshArgs.push("-i", keyPath);
|
||||
}
|
||||
|
||||
sshArgs.push(sshTarget, remoteCommand);
|
||||
|
||||
try {
|
||||
const result = await execFileAsync("ssh", sshArgs, {
|
||||
timeout: env.CHAT_CODEX_EXEC_TIMEOUT_MS,
|
||||
maxBuffer: REMOTE_EXEC_MAX_BUFFER_BYTES,
|
||||
});
|
||||
const stdout = clipRemoteOutput(bufferOrStringToString(result.stdout), maxCharacters);
|
||||
const stderr = clipRemoteOutput(bufferOrStringToString(result.stderr), Math.min(maxCharacters, 12_000));
|
||||
return {
|
||||
ok: true,
|
||||
host: env.CHAT_CODEX_REMOTE_HOST,
|
||||
workdir: env.CHAT_CODEX_REMOTE_WORKDIR,
|
||||
stdout: stdout.text,
|
||||
stderr: stderr.text,
|
||||
stdoutTruncated: stdout.truncated,
|
||||
stderrTruncated: stderr.truncated,
|
||||
};
|
||||
} catch (err: any) {
|
||||
const stdout = clipRemoteOutput(bufferOrStringToString(err?.stdout), maxCharacters);
|
||||
const stderr = clipRemoteOutput(bufferOrStringToString(err?.stderr), Math.min(maxCharacters, 12_000));
|
||||
return {
|
||||
ok: false,
|
||||
error: err?.killed
|
||||
? `Remote Codex command timed out after ${env.CHAT_CODEX_EXEC_TIMEOUT_MS}ms.`
|
||||
: err?.message ?? String(err),
|
||||
exitCode: typeof err?.code === "number" ? err.code : null,
|
||||
signal: typeof err?.signal === "string" ? err.signal : null,
|
||||
host: env.CHAT_CODEX_REMOTE_HOST,
|
||||
workdir: env.CHAT_CODEX_REMOTE_WORKDIR,
|
||||
stdout: stdout.text,
|
||||
stderr: stderr.text,
|
||||
stdoutTruncated: stdout.truncated,
|
||||
stderrTruncated: stderr.truncated,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
return withDevboxSshKeyPath(run);
|
||||
}
|
||||
|
||||
async function runShellExecTool(input: unknown): Promise<ToolRunOutcome> {
|
||||
if (!env.CHAT_SHELL_TOOL_ENABLED) {
|
||||
return { ok: false, error: "shell_exec is disabled." };
|
||||
}
|
||||
|
||||
const args: ShellExecArgs = ShellExecArgsSchema.parse(input);
|
||||
const maxCharacters = args.maxCharacters ?? DEFAULT_SHELL_MAX_OUTPUT_CHARACTERS;
|
||||
const sshTarget = buildDevboxSshTarget();
|
||||
const remoteCommand = buildRemoteShellCommand(args.command);
|
||||
|
||||
const run = async (keyPath?: string) => {
|
||||
const sshArgs = [
|
||||
"-n",
|
||||
"-o",
|
||||
"BatchMode=yes",
|
||||
"-o",
|
||||
"StrictHostKeyChecking=accept-new",
|
||||
"-o",
|
||||
"UserKnownHostsFile=/tmp/sybil-codex-known-hosts",
|
||||
"-p",
|
||||
String(env.CHAT_CODEX_REMOTE_PORT),
|
||||
];
|
||||
|
||||
if (keyPath) {
|
||||
sshArgs.push("-i", keyPath);
|
||||
}
|
||||
|
||||
sshArgs.push(sshTarget, remoteCommand);
|
||||
|
||||
try {
|
||||
const result = await execFileAsync("ssh", sshArgs, {
|
||||
timeout: env.CHAT_SHELL_EXEC_TIMEOUT_MS,
|
||||
maxBuffer: REMOTE_EXEC_MAX_BUFFER_BYTES,
|
||||
});
|
||||
const stdout = clipRemoteOutput(bufferOrStringToString(result.stdout), maxCharacters);
|
||||
const stderr = clipRemoteOutput(bufferOrStringToString(result.stderr), Math.min(maxCharacters, 12_000));
|
||||
return {
|
||||
ok: true,
|
||||
host: env.CHAT_CODEX_REMOTE_HOST,
|
||||
workdir: env.CHAT_CODEX_REMOTE_WORKDIR,
|
||||
command: args.command,
|
||||
stdout: stdout.text,
|
||||
stderr: stderr.text,
|
||||
stdoutTruncated: stdout.truncated,
|
||||
stderrTruncated: stderr.truncated,
|
||||
};
|
||||
} catch (err: any) {
|
||||
const stdout = clipRemoteOutput(bufferOrStringToString(err?.stdout), maxCharacters);
|
||||
const stderr = clipRemoteOutput(bufferOrStringToString(err?.stderr), Math.min(maxCharacters, 12_000));
|
||||
return {
|
||||
ok: false,
|
||||
error: err?.killed
|
||||
? `Remote shell command timed out after ${env.CHAT_SHELL_EXEC_TIMEOUT_MS}ms.`
|
||||
: err?.message ?? String(err),
|
||||
exitCode: typeof err?.code === "number" ? err.code : null,
|
||||
signal: typeof err?.signal === "string" ? err.signal : null,
|
||||
host: env.CHAT_CODEX_REMOTE_HOST,
|
||||
workdir: env.CHAT_CODEX_REMOTE_WORKDIR,
|
||||
command: args.command,
|
||||
stdout: stdout.text,
|
||||
stderr: stderr.text,
|
||||
stdoutTruncated: stdout.truncated,
|
||||
stderrTruncated: stderr.truncated,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
return withDevboxSshKeyPath(run);
|
||||
}
|
||||
|
||||
async function executeTool(name: string, args: unknown): Promise<ToolRunOutcome> {
|
||||
if (name === "web_search") return runWebSearchTool(args);
|
||||
if (name === "fetch_url") return runFetchUrlTool(args);
|
||||
if (name === "codex_exec") return runCodexExecTool(args);
|
||||
if (name === "shell_exec") return runShellExecTool(args);
|
||||
return { ok: false, error: `Unknown tool: ${name}` };
|
||||
}
|
||||
|
||||
@@ -396,6 +773,49 @@ function parseToolArgs(raw: unknown) {
|
||||
}
|
||||
}
|
||||
|
||||
function buildEventArgs(name: string, args: Record<string, unknown>) {
|
||||
if (name === "codex_exec" && typeof args.prompt === "string") {
|
||||
return {
|
||||
...args,
|
||||
prompt: clipText(args.prompt, 1_000),
|
||||
};
|
||||
}
|
||||
|
||||
if (name === "shell_exec" && typeof args.command === "string") {
|
||||
return {
|
||||
...args,
|
||||
command: clipText(args.command, 1_000),
|
||||
};
|
||||
}
|
||||
|
||||
return args;
|
||||
}
|
||||
|
||||
function looksLikeDanglingToolIntent(text: string) {
|
||||
const normalized = text
|
||||
.toLowerCase()
|
||||
.replace(/[`*_>#-]/g, " ")
|
||||
.replace(/\s+/g, " ")
|
||||
.trim();
|
||||
if (!normalized) return false;
|
||||
if (normalized.length > 800) return false;
|
||||
if (/\blet me know\b/.test(normalized) || /\bif you (want|would like)\b/.test(normalized)) return false;
|
||||
return (
|
||||
/\b(calling|running|executing|trying|checking|testing)\b.{0,80}\b(now|it|tool|command|shell_exec|codex_exec)\b/.test(normalized) ||
|
||||
/\b(let me|i'?ll|i will)\b.{0,120}\b(run|execute|call|try|check|test)\b/.test(normalized) ||
|
||||
/\b(stand by|hang on|one moment)\b/.test(normalized)
|
||||
);
|
||||
}
|
||||
|
||||
function appendDanglingToolIntentCorrection(conversation: any[], text: string) {
|
||||
conversation.push({ role: "assistant", content: text });
|
||||
conversation.push({
|
||||
role: "system",
|
||||
content:
|
||||
"Internal correction: the previous assistant message claimed it would run a tool, but no tool call was made. If the task needs an available tool, call it now. Otherwise provide the final answer directly without saying you will run a tool.",
|
||||
});
|
||||
}
|
||||
|
||||
function mergeUsage(acc: Required<ToolAwareUsage>, usage: any) {
|
||||
if (!usage) return false;
|
||||
acc.inputTokens += usage.prompt_tokens ?? 0;
|
||||
@@ -404,6 +824,58 @@ function mergeUsage(acc: Required<ToolAwareUsage>, usage: any) {
|
||||
return true;
|
||||
}
|
||||
|
||||
function mergeResponsesUsage(acc: Required<ToolAwareUsage>, usage: any) {
|
||||
if (!usage) return false;
|
||||
acc.inputTokens += usage.input_tokens ?? 0;
|
||||
acc.outputTokens += usage.output_tokens ?? 0;
|
||||
acc.totalTokens += usage.total_tokens ?? 0;
|
||||
return true;
|
||||
}
|
||||
|
||||
function getResponseOutputItems(response: any) {
|
||||
return Array.isArray(response?.output) ? response.output : [];
|
||||
}
|
||||
|
||||
function extractResponsesText(response: any, fallback = "") {
|
||||
if (typeof response?.output_text === "string") return response.output_text;
|
||||
|
||||
const parts: string[] = [];
|
||||
for (const item of getResponseOutputItems(response)) {
|
||||
if (item?.type !== "message" || !Array.isArray(item.content)) continue;
|
||||
for (const content of item.content) {
|
||||
if (content?.type === "output_text" && typeof content.text === "string") {
|
||||
parts.push(content.text);
|
||||
} else if (content?.type === "refusal" && typeof content.refusal === "string") {
|
||||
parts.push(content.refusal);
|
||||
}
|
||||
}
|
||||
}
|
||||
return parts.join("") || fallback;
|
||||
}
|
||||
|
||||
function getUnstreamedText(finalText: string, streamedText: string) {
|
||||
if (!finalText) return "";
|
||||
if (!streamedText) return finalText;
|
||||
return finalText.startsWith(streamedText) ? finalText.slice(streamedText.length) : "";
|
||||
}
|
||||
|
||||
function getResponseFailureMessage(response: any) {
|
||||
if (response?.status !== "failed" && response?.status !== "incomplete") return null;
|
||||
const errorMessage = typeof response?.error?.message === "string" ? response.error.message : null;
|
||||
const incompleteReason = typeof response?.incomplete_details?.reason === "string" ? response.incomplete_details.reason : null;
|
||||
return errorMessage ?? (incompleteReason ? `Response incomplete: ${incompleteReason}` : `Response ${response.status}.`);
|
||||
}
|
||||
|
||||
function normalizeResponsesToolCalls(outputItems: any[], round: number): NormalizedToolCall[] {
|
||||
return outputItems
|
||||
.filter((item) => item?.type === "function_call")
|
||||
.map((call: any, index: number) => ({
|
||||
id: call.call_id ?? call.id ?? `tool_call_${round}_${index}`,
|
||||
name: call.name ?? "unknown_tool",
|
||||
arguments: call.arguments ?? "{}",
|
||||
}));
|
||||
}
|
||||
|
||||
type NormalizedToolCall = {
|
||||
id: string;
|
||||
name: string;
|
||||
@@ -445,12 +917,13 @@ async function executeToolCallAndBuildEvent(
|
||||
: undefined;
|
||||
|
||||
const completedAtMs = Date.now();
|
||||
const eventArgs = buildEventArgs(call.name, parsedArgs);
|
||||
const event: ToolExecutionEvent = {
|
||||
toolCallId: call.id,
|
||||
name: call.name,
|
||||
status,
|
||||
summary: buildToolSummary(call.name, parsedArgs, status, error),
|
||||
args: parsedArgs,
|
||||
summary: buildToolSummary(call.name, eventArgs, status, error),
|
||||
args: eventArgs,
|
||||
startedAt,
|
||||
completedAt: new Date(completedAtMs).toISOString(),
|
||||
durationMs: completedAtMs - startedAtMs,
|
||||
@@ -466,12 +939,82 @@ async function executeToolCallAndBuildEvent(
|
||||
}
|
||||
|
||||
export async function runToolAwareOpenAIChat(params: ToolAwareCompletionParams): Promise<ToolAwareCompletionResult> {
|
||||
const input: any[] = normalizeIncomingResponsesInput(params.messages);
|
||||
const rawResponses: unknown[] = [];
|
||||
const toolEvents: ToolExecutionEvent[] = [];
|
||||
const usageAcc: Required<ToolAwareUsage> = { inputTokens: 0, outputTokens: 0, totalTokens: 0 };
|
||||
let sawUsage = false;
|
||||
let totalToolCalls = 0;
|
||||
let danglingToolIntentRetries = 0;
|
||||
|
||||
for (let round = 0; round < MAX_TOOL_ROUNDS; round += 1) {
|
||||
const response = await params.client.responses.create({
|
||||
model: params.model,
|
||||
input,
|
||||
temperature: params.temperature,
|
||||
max_output_tokens: params.maxTokens,
|
||||
tools: RESPONSES_CHAT_TOOLS,
|
||||
tool_choice: "auto",
|
||||
parallel_tool_calls: true,
|
||||
// Tool loops pass response output items back as input; reasoning items need persistence.
|
||||
store: true,
|
||||
} as any);
|
||||
rawResponses.push(response);
|
||||
sawUsage = mergeResponsesUsage(usageAcc, response?.usage) || sawUsage;
|
||||
|
||||
const failureMessage = getResponseFailureMessage(response);
|
||||
if (failureMessage) {
|
||||
throw new Error(failureMessage);
|
||||
}
|
||||
|
||||
const outputItems = getResponseOutputItems(response);
|
||||
const normalizedToolCalls = normalizeResponsesToolCalls(outputItems, round);
|
||||
if (!normalizedToolCalls.length) {
|
||||
const text = extractResponsesText(response);
|
||||
if (danglingToolIntentRetries < MAX_DANGLING_TOOL_INTENT_RETRIES && looksLikeDanglingToolIntent(text)) {
|
||||
danglingToolIntentRetries += 1;
|
||||
appendDanglingToolIntentCorrection(input, text);
|
||||
continue;
|
||||
}
|
||||
return {
|
||||
text,
|
||||
usage: sawUsage ? usageAcc : undefined,
|
||||
raw: { responses: rawResponses, toolCallsUsed: totalToolCalls, api: "responses" },
|
||||
toolEvents,
|
||||
};
|
||||
}
|
||||
|
||||
totalToolCalls += normalizedToolCalls.length;
|
||||
input.push(...outputItems);
|
||||
|
||||
for (const call of normalizedToolCalls) {
|
||||
const { event, toolResult } = await executeToolCallAndBuildEvent(call, params);
|
||||
toolEvents.push(event);
|
||||
|
||||
input.push({
|
||||
type: "function_call_output",
|
||||
call_id: call.id,
|
||||
output: JSON.stringify(toolResult),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
text: "I reached the tool-call limit while gathering information. Please narrow the request and try again.",
|
||||
usage: sawUsage ? usageAcc : undefined,
|
||||
raw: { responses: rawResponses, toolCallsUsed: totalToolCalls, toolCallLimitReached: true, api: "responses" },
|
||||
toolEvents,
|
||||
};
|
||||
}
|
||||
|
||||
export async function runToolAwareChatCompletions(params: ToolAwareCompletionParams): Promise<ToolAwareCompletionResult> {
|
||||
const conversation: any[] = normalizeIncomingMessages(params.messages);
|
||||
const rawResponses: unknown[] = [];
|
||||
const toolEvents: ToolExecutionEvent[] = [];
|
||||
const usageAcc: Required<ToolAwareUsage> = { inputTokens: 0, outputTokens: 0, totalTokens: 0 };
|
||||
let sawUsage = false;
|
||||
let totalToolCalls = 0;
|
||||
let danglingToolIntentRetries = 0;
|
||||
|
||||
for (let round = 0; round < MAX_TOOL_ROUNDS; round += 1) {
|
||||
const completion = await params.client.chat.completions.create({
|
||||
@@ -497,8 +1040,14 @@ export async function runToolAwareOpenAIChat(params: ToolAwareCompletionParams):
|
||||
|
||||
const toolCalls = Array.isArray(message.tool_calls) ? message.tool_calls : [];
|
||||
if (!toolCalls.length) {
|
||||
const text = typeof message.content === "string" ? message.content : "";
|
||||
if (danglingToolIntentRetries < MAX_DANGLING_TOOL_INTENT_RETRIES && looksLikeDanglingToolIntent(text)) {
|
||||
danglingToolIntentRetries += 1;
|
||||
appendDanglingToolIntentCorrection(conversation, text);
|
||||
continue;
|
||||
}
|
||||
return {
|
||||
text: typeof message.content === "string" ? message.content : "",
|
||||
text,
|
||||
usage: sawUsage ? usageAcc : undefined,
|
||||
raw: { responses: rawResponses, toolCallsUsed: totalToolCalls },
|
||||
toolEvents,
|
||||
@@ -546,6 +1095,132 @@ export async function runToolAwareOpenAIChat(params: ToolAwareCompletionParams):
|
||||
|
||||
export async function* runToolAwareOpenAIChatStream(
|
||||
params: ToolAwareCompletionParams
|
||||
): AsyncGenerator<ToolAwareStreamingEvent> {
|
||||
const input: any[] = normalizeIncomingResponsesInput(params.messages);
|
||||
const rawResponses: unknown[] = [];
|
||||
const toolEvents: ToolExecutionEvent[] = [];
|
||||
const usageAcc: Required<ToolAwareUsage> = { inputTokens: 0, outputTokens: 0, totalTokens: 0 };
|
||||
let sawUsage = false;
|
||||
let totalToolCalls = 0;
|
||||
let danglingToolIntentRetries = 0;
|
||||
|
||||
for (let round = 0; round < MAX_TOOL_ROUNDS; round += 1) {
|
||||
const stream = await params.client.responses.create({
|
||||
model: params.model,
|
||||
input,
|
||||
temperature: params.temperature,
|
||||
max_output_tokens: params.maxTokens,
|
||||
tools: RESPONSES_CHAT_TOOLS,
|
||||
tool_choice: "auto",
|
||||
parallel_tool_calls: true,
|
||||
// Tool loops pass response output items back as input; reasoning items need persistence.
|
||||
store: true,
|
||||
stream: true,
|
||||
} as any);
|
||||
|
||||
let roundText = "";
|
||||
let streamedRoundText = "";
|
||||
let roundHasToolCalls = false;
|
||||
let canStreamRoundText = false;
|
||||
let completedResponse: any | null = null;
|
||||
const completedOutputItems: any[] = [];
|
||||
|
||||
for await (const event of stream as any as AsyncIterable<any>) {
|
||||
rawResponses.push(event);
|
||||
|
||||
if (event?.type === "response.output_text.delta" && typeof event.delta === "string") {
|
||||
roundText += event.delta;
|
||||
if (canStreamRoundText && !roundHasToolCalls && event.delta.length) {
|
||||
streamedRoundText += event.delta;
|
||||
yield { type: "delta", text: event.delta };
|
||||
}
|
||||
} else if (event?.type === "response.output_item.added" && event.item) {
|
||||
if (event.item.type === "function_call") {
|
||||
roundHasToolCalls = true;
|
||||
canStreamRoundText = false;
|
||||
} else if (event.item.type === "message" && !roundHasToolCalls) {
|
||||
canStreamRoundText = true;
|
||||
}
|
||||
} else if (event?.type === "response.output_item.done" && event.item) {
|
||||
completedOutputItems[event.output_index ?? completedOutputItems.length] = event.item;
|
||||
if (event.item.type === "function_call") {
|
||||
roundHasToolCalls = true;
|
||||
canStreamRoundText = false;
|
||||
}
|
||||
} else if (event?.type === "response.completed") {
|
||||
completedResponse = event.response;
|
||||
sawUsage = mergeResponsesUsage(usageAcc, event.response?.usage) || sawUsage;
|
||||
} else if (event?.type === "response.failed" || event?.type === "response.incomplete") {
|
||||
completedResponse = event.response;
|
||||
sawUsage = mergeResponsesUsage(usageAcc, event.response?.usage) || sawUsage;
|
||||
} else if (event?.type === "error") {
|
||||
throw new Error(event.message ?? "OpenAI Responses stream failed.");
|
||||
}
|
||||
}
|
||||
|
||||
const failureMessage = getResponseFailureMessage(completedResponse);
|
||||
if (failureMessage) {
|
||||
throw new Error(failureMessage);
|
||||
}
|
||||
|
||||
const outputItems = getResponseOutputItems(completedResponse);
|
||||
const responseOutputItems = outputItems.length ? outputItems : completedOutputItems.filter(Boolean);
|
||||
const normalizedToolCalls = normalizeResponsesToolCalls(responseOutputItems, round);
|
||||
if (!normalizedToolCalls.length) {
|
||||
const text = extractResponsesText(completedResponse, roundText);
|
||||
if (
|
||||
!streamedRoundText &&
|
||||
danglingToolIntentRetries < MAX_DANGLING_TOOL_INTENT_RETRIES &&
|
||||
looksLikeDanglingToolIntent(text)
|
||||
) {
|
||||
danglingToolIntentRetries += 1;
|
||||
appendDanglingToolIntentCorrection(input, text);
|
||||
continue;
|
||||
}
|
||||
const unstreamedText = getUnstreamedText(text, streamedRoundText);
|
||||
if (unstreamedText) {
|
||||
yield { type: "delta", text: unstreamedText };
|
||||
}
|
||||
yield {
|
||||
type: "done",
|
||||
result: {
|
||||
text,
|
||||
usage: sawUsage ? usageAcc : undefined,
|
||||
raw: { streamed: true, responses: rawResponses, toolCallsUsed: totalToolCalls, api: "responses" },
|
||||
toolEvents,
|
||||
},
|
||||
};
|
||||
return;
|
||||
}
|
||||
|
||||
totalToolCalls += normalizedToolCalls.length;
|
||||
input.push(...responseOutputItems);
|
||||
|
||||
for (const call of normalizedToolCalls) {
|
||||
const { event, toolResult } = await executeToolCallAndBuildEvent(call, params);
|
||||
toolEvents.push(event);
|
||||
yield { type: "tool_call", event };
|
||||
input.push({
|
||||
type: "function_call_output",
|
||||
call_id: call.id,
|
||||
output: JSON.stringify(toolResult),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
yield {
|
||||
type: "done",
|
||||
result: {
|
||||
text: "I reached the tool-call limit while gathering information. Please narrow the request and try again.",
|
||||
usage: sawUsage ? usageAcc : undefined,
|
||||
raw: { streamed: true, responses: rawResponses, toolCallsUsed: totalToolCalls, toolCallLimitReached: true, api: "responses" },
|
||||
toolEvents,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export async function* runToolAwareChatCompletionsStream(
|
||||
params: ToolAwareCompletionParams
|
||||
): AsyncGenerator<ToolAwareStreamingEvent> {
|
||||
const conversation: any[] = normalizeIncomingMessages(params.messages);
|
||||
const rawResponses: unknown[] = [];
|
||||
@@ -553,6 +1228,7 @@ export async function* runToolAwareOpenAIChatStream(
|
||||
const usageAcc: Required<ToolAwareUsage> = { inputTokens: 0, outputTokens: 0, totalTokens: 0 };
|
||||
let sawUsage = false;
|
||||
let totalToolCalls = 0;
|
||||
let danglingToolIntentRetries = 0;
|
||||
|
||||
for (let round = 0; round < MAX_TOOL_ROUNDS; round += 1) {
|
||||
const stream = await params.client.chat.completions.create({
|
||||
@@ -567,6 +1243,8 @@ export async function* runToolAwareOpenAIChatStream(
|
||||
} as any);
|
||||
|
||||
let roundText = "";
|
||||
let streamedRoundText = "";
|
||||
let roundHasToolCalls = false;
|
||||
const roundToolCalls = new Map<number, { id?: string; name?: string; arguments: string }>();
|
||||
|
||||
for await (const chunk of stream as any as AsyncIterable<any>) {
|
||||
@@ -577,12 +1255,16 @@ export async function* runToolAwareOpenAIChatStream(
|
||||
const deltaText = choice?.delta?.content ?? "";
|
||||
if (typeof deltaText === "string" && deltaText.length) {
|
||||
roundText += deltaText;
|
||||
if (roundToolCalls.size === 0) {
|
||||
if (!roundHasToolCalls) {
|
||||
streamedRoundText += deltaText;
|
||||
yield { type: "delta", text: deltaText };
|
||||
}
|
||||
}
|
||||
|
||||
const deltaToolCalls = Array.isArray(choice?.delta?.tool_calls) ? choice.delta.tool_calls : [];
|
||||
if (deltaToolCalls.length) {
|
||||
roundHasToolCalls = true;
|
||||
}
|
||||
for (const toolCall of deltaToolCalls) {
|
||||
const idx = typeof toolCall?.index === "number" ? toolCall.index : 0;
|
||||
const entry = roundToolCalls.get(idx) ?? { arguments: "" };
|
||||
@@ -608,6 +1290,19 @@ export async function* runToolAwareOpenAIChatStream(
|
||||
}));
|
||||
|
||||
if (!normalizedToolCalls.length) {
|
||||
if (
|
||||
!streamedRoundText &&
|
||||
danglingToolIntentRetries < MAX_DANGLING_TOOL_INTENT_RETRIES &&
|
||||
looksLikeDanglingToolIntent(roundText)
|
||||
) {
|
||||
danglingToolIntentRetries += 1;
|
||||
appendDanglingToolIntentCorrection(conversation, roundText);
|
||||
continue;
|
||||
}
|
||||
const unstreamedText = getUnstreamedText(roundText, streamedRoundText);
|
||||
if (unstreamedText) {
|
||||
yield { type: "delta", text: unstreamedText };
|
||||
}
|
||||
yield {
|
||||
type: "done",
|
||||
result: {
|
||||
@@ -621,7 +1316,7 @@ export async function* runToolAwareOpenAIChatStream(
|
||||
}
|
||||
|
||||
totalToolCalls += normalizedToolCalls.length;
|
||||
conversation.push({
|
||||
const assistantToolCallMessage: any = {
|
||||
role: "assistant",
|
||||
tool_calls: normalizedToolCalls.map((call) => ({
|
||||
id: call.id,
|
||||
@@ -631,7 +1326,11 @@ export async function* runToolAwareOpenAIChatStream(
|
||||
arguments: call.arguments,
|
||||
},
|
||||
})),
|
||||
});
|
||||
};
|
||||
if (roundText) {
|
||||
assistantToolCallMessage.content = roundText;
|
||||
}
|
||||
conversation.push(assistantToolCallMessage);
|
||||
|
||||
for (const call of normalizedToolCalls) {
|
||||
const { event, toolResult } = await executeToolCallAndBuildEvent(call, params);
|
||||
|
||||
268
server/src/llm/message-content.ts
Normal file
268
server/src/llm/message-content.ts
Normal file
@@ -0,0 +1,268 @@
|
||||
import type { ChatAttachment, ChatImageAttachment, ChatMessage, ChatTextAttachment } from "./types.js";
|
||||
|
||||
function escapeAttribute(value: string) {
|
||||
return value.replace(/"/g, """);
|
||||
}
|
||||
|
||||
function getImageAttachments(message: ChatMessage) {
|
||||
return (message.attachments ?? []).filter((attachment): attachment is ChatImageAttachment => attachment.kind === "image");
|
||||
}
|
||||
|
||||
function getTextAttachments(message: ChatMessage) {
|
||||
return (message.attachments ?? []).filter((attachment): attachment is ChatTextAttachment => attachment.kind === "text");
|
||||
}
|
||||
|
||||
function buildImageSummaryText(attachments: ChatImageAttachment[]) {
|
||||
if (!attachments.length) return null;
|
||||
const label = attachments.length === 1 ? "Attached image" : "Attached images";
|
||||
return `${label}: ${attachments.map((attachment) => attachment.filename).join(", ")}.`;
|
||||
}
|
||||
|
||||
function buildTextAttachmentPrompt(attachment: ChatTextAttachment) {
|
||||
const truncationNote = attachment.truncated ? ' truncated="true"' : "";
|
||||
return [
|
||||
`Attached text file: ${attachment.filename}${attachment.truncated ? " (content truncated)" : ""}`,
|
||||
`<attached_file filename="${escapeAttribute(attachment.filename)}" mime_type="${escapeAttribute(attachment.mimeType)}"${truncationNote}>`,
|
||||
attachment.text,
|
||||
"</attached_file>",
|
||||
].join("\n");
|
||||
}
|
||||
|
||||
function toOpenAIContent(message: ChatMessage) {
|
||||
const imageAttachments = getImageAttachments(message);
|
||||
const textAttachments = getTextAttachments(message);
|
||||
if (!imageAttachments.length && !textAttachments.length) {
|
||||
return message.content;
|
||||
}
|
||||
|
||||
const parts: Array<Record<string, unknown>> = [];
|
||||
|
||||
for (const attachment of imageAttachments) {
|
||||
parts.push({
|
||||
type: "image_url",
|
||||
image_url: {
|
||||
url: attachment.dataUrl,
|
||||
detail: "auto",
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
const imageSummary = buildImageSummaryText(imageAttachments);
|
||||
if (imageSummary) {
|
||||
parts.push({ type: "text", text: imageSummary });
|
||||
}
|
||||
|
||||
for (const attachment of textAttachments) {
|
||||
parts.push({ type: "text", text: buildTextAttachmentPrompt(attachment) });
|
||||
}
|
||||
|
||||
if (message.content.trim()) {
|
||||
parts.push({ type: "text", text: message.content });
|
||||
}
|
||||
|
||||
if (parts.length === 1 && parts[0]?.type === "text" && typeof parts[0].text === "string") {
|
||||
return parts[0].text;
|
||||
}
|
||||
|
||||
return parts;
|
||||
}
|
||||
|
||||
function toOpenAIResponsesContent(message: ChatMessage) {
|
||||
const imageAttachments = getImageAttachments(message);
|
||||
const textAttachments = getTextAttachments(message);
|
||||
if (!imageAttachments.length && !textAttachments.length) {
|
||||
return message.content;
|
||||
}
|
||||
|
||||
const parts: Array<Record<string, unknown>> = [];
|
||||
|
||||
for (const attachment of imageAttachments) {
|
||||
parts.push({
|
||||
type: "input_image",
|
||||
image_url: attachment.dataUrl,
|
||||
detail: "auto",
|
||||
});
|
||||
}
|
||||
|
||||
const imageSummary = buildImageSummaryText(imageAttachments);
|
||||
if (imageSummary) {
|
||||
parts.push({ type: "input_text", text: imageSummary });
|
||||
}
|
||||
|
||||
for (const attachment of textAttachments) {
|
||||
parts.push({ type: "input_text", text: buildTextAttachmentPrompt(attachment) });
|
||||
}
|
||||
|
||||
if (message.content.trim()) {
|
||||
parts.push({ type: "input_text", text: message.content });
|
||||
}
|
||||
|
||||
if (parts.length === 1 && parts[0]?.type === "input_text" && typeof parts[0].text === "string") {
|
||||
return parts[0].text;
|
||||
}
|
||||
|
||||
return parts;
|
||||
}
|
||||
|
||||
function parseImageDataUrl(attachment: ChatImageAttachment) {
|
||||
const match = attachment.dataUrl.match(/^data:(image\/(?:png|jpeg));base64,([a-z0-9+/=\s]+)$/i);
|
||||
if (!match) {
|
||||
throw new Error(`Invalid image attachment data URL for '${attachment.filename}'.`);
|
||||
}
|
||||
|
||||
const mediaType = match[1].toLowerCase();
|
||||
if (mediaType !== attachment.mimeType) {
|
||||
throw new Error(`Image attachment MIME type mismatch for '${attachment.filename}'.`);
|
||||
}
|
||||
|
||||
return {
|
||||
mediaType,
|
||||
data: match[2].replace(/\s+/g, ""),
|
||||
};
|
||||
}
|
||||
|
||||
function toAnthropicContent(message: ChatMessage) {
|
||||
const imageAttachments = getImageAttachments(message);
|
||||
const textAttachments = getTextAttachments(message);
|
||||
if (!imageAttachments.length && !textAttachments.length) {
|
||||
return message.content;
|
||||
}
|
||||
|
||||
const blocks: Array<Record<string, unknown>> = [];
|
||||
|
||||
for (const attachment of imageAttachments) {
|
||||
const source = parseImageDataUrl(attachment);
|
||||
blocks.push({
|
||||
type: "image",
|
||||
source: {
|
||||
type: "base64",
|
||||
media_type: source.mediaType,
|
||||
data: source.data,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
const imageSummary = buildImageSummaryText(imageAttachments);
|
||||
if (imageSummary) {
|
||||
blocks.push({ type: "text", text: imageSummary });
|
||||
}
|
||||
|
||||
for (const attachment of textAttachments) {
|
||||
blocks.push({ type: "text", text: buildTextAttachmentPrompt(attachment) });
|
||||
}
|
||||
|
||||
if (message.content.trim()) {
|
||||
blocks.push({ type: "text", text: message.content });
|
||||
}
|
||||
|
||||
if (blocks.length === 1 && blocks[0]?.type === "text" && typeof blocks[0].text === "string") {
|
||||
return blocks[0].text;
|
||||
}
|
||||
|
||||
return blocks;
|
||||
}
|
||||
|
||||
export function buildOpenAIConversationMessage(message: ChatMessage) {
|
||||
if (message.role === "tool") {
|
||||
const name = message.name?.trim() || "tool";
|
||||
return {
|
||||
role: "user",
|
||||
content: `Tool output (${name}):\n${message.content}`,
|
||||
};
|
||||
}
|
||||
|
||||
const out: Record<string, unknown> = {
|
||||
role: message.role,
|
||||
content: toOpenAIContent(message),
|
||||
};
|
||||
|
||||
if (message.name && (message.role === "assistant" || message.role === "user")) {
|
||||
out.name = message.name;
|
||||
}
|
||||
|
||||
return out;
|
||||
}
|
||||
|
||||
export function buildOpenAIResponsesInputMessage(message: ChatMessage) {
|
||||
if (message.role === "tool") {
|
||||
const name = message.name?.trim() || "tool";
|
||||
return {
|
||||
role: "user",
|
||||
content: `Tool output (${name}):\n${message.content}`,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
role: message.role,
|
||||
content: toOpenAIResponsesContent(message),
|
||||
};
|
||||
}
|
||||
|
||||
const ANTHROPIC_NO_SERVER_TOOLS_PROMPT =
|
||||
"This Anthropic backend path does not have server-managed tool calls. Do not claim to run shell commands, Codex tasks, web searches, or fetch URLs. If the user asks for tool execution, explain that they should switch to OpenAI or xAI in this app for tool-enabled chat.";
|
||||
|
||||
export function getAnthropicSystemPrompt(messages: ChatMessage[]) {
|
||||
return [ANTHROPIC_NO_SERVER_TOOLS_PROMPT, messages.find((message) => message.role === "system")?.content]
|
||||
.filter(Boolean)
|
||||
.join("\n\n");
|
||||
}
|
||||
|
||||
export function buildAnthropicConversationMessage(message: ChatMessage) {
|
||||
if (message.role === "system") {
|
||||
throw new Error("System messages must be handled separately for Anthropic.");
|
||||
}
|
||||
|
||||
if (message.role === "tool") {
|
||||
const name = message.name?.trim() || "tool";
|
||||
return {
|
||||
role: "user",
|
||||
content: `Tool output (${name}):\n${message.content}`,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
role: message.role === "assistant" ? "assistant" : "user",
|
||||
content: toAnthropicContent(message),
|
||||
};
|
||||
}
|
||||
|
||||
export function buildComparableAttachments(input: unknown): ChatAttachment[] {
|
||||
if (!Array.isArray(input)) return [];
|
||||
|
||||
const attachments: ChatAttachment[] = [];
|
||||
for (const entry of input) {
|
||||
if (!entry || typeof entry !== "object" || Array.isArray(entry)) continue;
|
||||
const record = entry as Record<string, unknown>;
|
||||
const kind = record.kind;
|
||||
const id = typeof record.id === "string" ? record.id : "";
|
||||
const filename = typeof record.filename === "string" ? record.filename : "";
|
||||
const mimeType = typeof record.mimeType === "string" ? record.mimeType : "";
|
||||
const sizeBytes = typeof record.sizeBytes === "number" ? record.sizeBytes : 0;
|
||||
|
||||
if (kind === "image" && typeof record.dataUrl === "string") {
|
||||
attachments.push({
|
||||
kind,
|
||||
id,
|
||||
filename,
|
||||
mimeType: mimeType === "image/png" ? "image/png" : "image/jpeg",
|
||||
sizeBytes,
|
||||
dataUrl: record.dataUrl,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
if (kind === "text" && typeof record.text === "string") {
|
||||
attachments.push({
|
||||
kind,
|
||||
id,
|
||||
filename,
|
||||
mimeType,
|
||||
sizeBytes,
|
||||
text: record.text,
|
||||
truncated: record.truncated === true,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return attachments;
|
||||
}
|
||||
@@ -23,6 +23,15 @@ function uniqSorted(models: string[]) {
|
||||
return [...new Set(models.map((value) => value.trim()).filter(Boolean))].sort((a, b) => a.localeCompare(b));
|
||||
}
|
||||
|
||||
function isLikelyOpenAIResponsesModel(model: string) {
|
||||
const id = model.toLowerCase();
|
||||
if (id.includes("embedding") || id.includes("moderation")) return false;
|
||||
if (id.includes("audio") || id.includes("realtime") || id.includes("transcribe") || id.includes("tts")) return false;
|
||||
if (id.includes("image") || id.includes("dall-e") || id.includes("sora")) return false;
|
||||
if (id.includes("search") || id.includes("computer-use")) return false;
|
||||
return /^(gpt-|o\d|chatgpt-)/.test(id);
|
||||
}
|
||||
|
||||
async function withTimeout<T>(promise: Promise<T>, timeoutMs: number, label: string) {
|
||||
let timeoutId: NodeJS.Timeout | null = null;
|
||||
try {
|
||||
@@ -42,7 +51,7 @@ async function withTimeout<T>(promise: Promise<T>, timeoutMs: number, label: str
|
||||
async function fetchProviderModels(provider: Provider) {
|
||||
if (provider === "openai") {
|
||||
const page = await openaiClient().models.list();
|
||||
return uniqSorted(page.data.map((model) => model.id));
|
||||
return uniqSorted(page.data.map((model) => model.id).filter(isLikelyOpenAIResponsesModel));
|
||||
}
|
||||
|
||||
if (provider === "anthropic") {
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import { performance } from "node:perf_hooks";
|
||||
import { prisma } from "../db.js";
|
||||
import { anthropicClient, openaiClient, xaiClient } from "./providers.js";
|
||||
import { buildToolLogMessageData, runToolAwareOpenAIChat } from "./chat-tools.js";
|
||||
import { buildToolLogMessageData, runToolAwareChatCompletions, runToolAwareOpenAIChat } from "./chat-tools.js";
|
||||
import { buildAnthropicConversationMessage, getAnthropicSystemPrompt } from "./message-content.js";
|
||||
import type { MultiplexRequest, MultiplexResponse, Provider } from "./types.js";
|
||||
|
||||
function asProviderEnum(p: Provider) {
|
||||
@@ -47,8 +48,8 @@ export async function runMultiplex(req: MultiplexRequest): Promise<MultiplexResp
|
||||
let raw: unknown;
|
||||
let toolMessages: ReturnType<typeof buildToolLogMessageData>[] = [];
|
||||
|
||||
if (req.provider === "openai" || req.provider === "xai") {
|
||||
const client = req.provider === "openai" ? openaiClient() : xaiClient();
|
||||
if (req.provider === "openai") {
|
||||
const client = openaiClient();
|
||||
const r = await runToolAwareOpenAIChat({
|
||||
client,
|
||||
model: req.model,
|
||||
@@ -65,14 +66,29 @@ export async function runMultiplex(req: MultiplexRequest): Promise<MultiplexResp
|
||||
outText = r.text;
|
||||
usage = r.usage;
|
||||
toolMessages = r.toolEvents.map((event) => buildToolLogMessageData(call.chatId, event));
|
||||
} else if (req.provider === "xai") {
|
||||
const client = xaiClient();
|
||||
const r = await runToolAwareChatCompletions({
|
||||
client,
|
||||
model: req.model,
|
||||
messages: req.messages,
|
||||
temperature: req.temperature,
|
||||
maxTokens: req.maxTokens,
|
||||
logContext: {
|
||||
provider: req.provider,
|
||||
model: req.model,
|
||||
chatId,
|
||||
},
|
||||
});
|
||||
raw = r.raw;
|
||||
outText = r.text;
|
||||
usage = r.usage;
|
||||
toolMessages = r.toolEvents.map((event) => buildToolLogMessageData(call.chatId, event));
|
||||
} else if (req.provider === "anthropic") {
|
||||
const client = anthropicClient();
|
||||
|
||||
// Anthropic splits system prompt. We'll convert first system message into system string.
|
||||
const system = req.messages.find((m) => m.role === "system")?.content;
|
||||
const msgs = req.messages
|
||||
.filter((m) => m.role !== "system")
|
||||
.map((m) => ({ role: m.role === "assistant" ? "assistant" : "user", content: m.content }));
|
||||
const system = getAnthropicSystemPrompt(req.messages);
|
||||
const msgs = req.messages.filter((message) => message.role !== "system").map((message) => buildAnthropicConversationMessage(message));
|
||||
|
||||
const r = await client.messages.create({
|
||||
model: req.model,
|
||||
|
||||
@@ -1,14 +1,26 @@
|
||||
import { performance } from "node:perf_hooks";
|
||||
import { prisma } from "../db.js";
|
||||
import { anthropicClient, openaiClient, xaiClient } from "./providers.js";
|
||||
import { buildToolLogMessageData, runToolAwareOpenAIChatStream, type ToolExecutionEvent } from "./chat-tools.js";
|
||||
import {
|
||||
buildToolLogMessageData,
|
||||
runToolAwareChatCompletionsStream,
|
||||
runToolAwareOpenAIChatStream,
|
||||
type ToolExecutionEvent,
|
||||
} from "./chat-tools.js";
|
||||
import { buildAnthropicConversationMessage, getAnthropicSystemPrompt } from "./message-content.js";
|
||||
import type { MultiplexRequest, Provider } from "./types.js";
|
||||
|
||||
type StreamUsage = {
|
||||
inputTokens?: number;
|
||||
outputTokens?: number;
|
||||
totalTokens?: number;
|
||||
};
|
||||
|
||||
export type StreamEvent =
|
||||
| { type: "meta"; chatId: string; callId: string; provider: Provider; model: string }
|
||||
| { type: "meta"; chatId: string | null; callId: string | null; provider: Provider; model: string }
|
||||
| { type: "tool_call"; event: ToolExecutionEvent }
|
||||
| { type: "delta"; text: string }
|
||||
| { type: "done"; text: string; usage?: { inputTokens?: number; outputTokens?: number; totalTokens?: number } }
|
||||
| { type: "done"; text: string; usage?: StreamUsage }
|
||||
| { type: "error"; message: string };
|
||||
|
||||
function getChatIdOrCreate(chatId?: string) {
|
||||
@@ -18,57 +30,77 @@ function getChatIdOrCreate(chatId?: string) {
|
||||
|
||||
export async function* runMultiplexStream(req: MultiplexRequest): AsyncGenerator<StreamEvent> {
|
||||
const t0 = performance.now();
|
||||
const chatId = await getChatIdOrCreate(req.chatId);
|
||||
const shouldPersist = req.persist !== false;
|
||||
const chatId = shouldPersist ? await getChatIdOrCreate(req.chatId) : null;
|
||||
|
||||
const call = await prisma.llmCall.create({
|
||||
data: {
|
||||
chatId,
|
||||
provider: req.provider as any,
|
||||
model: req.model,
|
||||
request: req as any,
|
||||
},
|
||||
select: { id: true },
|
||||
});
|
||||
const call =
|
||||
shouldPersist && chatId
|
||||
? await prisma.llmCall.create({
|
||||
data: {
|
||||
chatId,
|
||||
provider: req.provider as any,
|
||||
model: req.model,
|
||||
request: req as any,
|
||||
},
|
||||
select: { id: true },
|
||||
})
|
||||
: null;
|
||||
|
||||
await prisma.$transaction([
|
||||
prisma.chat.update({
|
||||
where: { id: chatId },
|
||||
data: {
|
||||
lastUsedProvider: req.provider as any,
|
||||
lastUsedModel: req.model,
|
||||
},
|
||||
}),
|
||||
prisma.chat.updateMany({
|
||||
where: { id: chatId, initiatedProvider: null },
|
||||
data: {
|
||||
initiatedProvider: req.provider as any,
|
||||
initiatedModel: req.model,
|
||||
},
|
||||
}),
|
||||
]);
|
||||
if (shouldPersist && chatId) {
|
||||
await prisma.$transaction([
|
||||
prisma.chat.update({
|
||||
where: { id: chatId },
|
||||
data: {
|
||||
lastUsedProvider: req.provider as any,
|
||||
lastUsedModel: req.model,
|
||||
},
|
||||
}),
|
||||
prisma.chat.updateMany({
|
||||
where: { id: chatId, initiatedProvider: null },
|
||||
data: {
|
||||
initiatedProvider: req.provider as any,
|
||||
initiatedModel: req.model,
|
||||
},
|
||||
}),
|
||||
]);
|
||||
}
|
||||
|
||||
yield { type: "meta", chatId, callId: call.id, provider: req.provider, model: req.model };
|
||||
yield { type: "meta", chatId, callId: call?.id ?? null, provider: req.provider, model: req.model };
|
||||
|
||||
let text = "";
|
||||
let usage: StreamEvent extends any ? any : never;
|
||||
let usage: StreamUsage | undefined;
|
||||
let raw: unknown = { streamed: true };
|
||||
let toolMessages: ReturnType<typeof buildToolLogMessageData>[] = [];
|
||||
|
||||
try {
|
||||
if (req.provider === "openai" || req.provider === "xai") {
|
||||
const client = req.provider === "openai" ? openaiClient() : xaiClient();
|
||||
for await (const ev of runToolAwareOpenAIChatStream({
|
||||
client,
|
||||
model: req.model,
|
||||
messages: req.messages,
|
||||
temperature: req.temperature,
|
||||
maxTokens: req.maxTokens,
|
||||
logContext: {
|
||||
provider: req.provider,
|
||||
model: req.model,
|
||||
chatId,
|
||||
},
|
||||
})) {
|
||||
const streamEvents =
|
||||
req.provider === "openai"
|
||||
? runToolAwareOpenAIChatStream({
|
||||
client,
|
||||
model: req.model,
|
||||
messages: req.messages,
|
||||
temperature: req.temperature,
|
||||
maxTokens: req.maxTokens,
|
||||
logContext: {
|
||||
provider: req.provider,
|
||||
model: req.model,
|
||||
chatId: chatId ?? undefined,
|
||||
},
|
||||
})
|
||||
: runToolAwareChatCompletionsStream({
|
||||
client,
|
||||
model: req.model,
|
||||
messages: req.messages,
|
||||
temperature: req.temperature,
|
||||
maxTokens: req.maxTokens,
|
||||
logContext: {
|
||||
provider: req.provider,
|
||||
model: req.model,
|
||||
chatId: chatId ?? undefined,
|
||||
},
|
||||
});
|
||||
for await (const ev of streamEvents) {
|
||||
if (ev.type === "delta") {
|
||||
text += ev.text;
|
||||
yield { type: "delta", text: ev.text };
|
||||
@@ -76,7 +108,18 @@ export async function* runMultiplexStream(req: MultiplexRequest): AsyncGenerator
|
||||
}
|
||||
|
||||
if (ev.type === "tool_call") {
|
||||
toolMessages.push(buildToolLogMessageData(chatId, ev.event));
|
||||
if (shouldPersist && chatId) {
|
||||
const toolMessage = buildToolLogMessageData(chatId, ev.event);
|
||||
await prisma.message.create({
|
||||
data: {
|
||||
chatId: toolMessage.chatId,
|
||||
role: toolMessage.role as any,
|
||||
content: toolMessage.content,
|
||||
name: toolMessage.name,
|
||||
metadata: toolMessage.metadata as any,
|
||||
},
|
||||
});
|
||||
}
|
||||
yield { type: "tool_call", event: ev.event };
|
||||
continue;
|
||||
}
|
||||
@@ -88,10 +131,8 @@ export async function* runMultiplexStream(req: MultiplexRequest): AsyncGenerator
|
||||
} else if (req.provider === "anthropic") {
|
||||
const client = anthropicClient();
|
||||
|
||||
const system = req.messages.find((m) => m.role === "system")?.content;
|
||||
const msgs = req.messages
|
||||
.filter((m) => m.role !== "system")
|
||||
.map((m) => ({ role: m.role === "assistant" ? "assistant" : "user", content: m.content }));
|
||||
const system = getAnthropicSystemPrompt(req.messages);
|
||||
const msgs = req.messages.filter((message) => message.role !== "system").map((message) => buildAnthropicConversationMessage(message));
|
||||
|
||||
const stream = await client.messages.create({
|
||||
model: req.model,
|
||||
@@ -129,43 +170,36 @@ export async function* runMultiplexStream(req: MultiplexRequest): AsyncGenerator
|
||||
|
||||
const latencyMs = Math.round(performance.now() - t0);
|
||||
|
||||
await prisma.$transaction(async (tx) => {
|
||||
if (toolMessages.length) {
|
||||
await tx.message.createMany({
|
||||
data: toolMessages.map((message) => ({
|
||||
chatId: message.chatId,
|
||||
role: message.role as any,
|
||||
content: message.content,
|
||||
name: message.name,
|
||||
metadata: message.metadata as any,
|
||||
})),
|
||||
if (shouldPersist && chatId && call) {
|
||||
await prisma.$transaction(async (tx) => {
|
||||
await tx.message.create({
|
||||
data: { chatId, role: "assistant" as any, content: text },
|
||||
});
|
||||
await tx.llmCall.update({
|
||||
where: { id: call.id },
|
||||
data: {
|
||||
response: raw as any,
|
||||
latencyMs,
|
||||
inputTokens: usage?.inputTokens,
|
||||
outputTokens: usage?.outputTokens,
|
||||
totalTokens: usage?.totalTokens,
|
||||
},
|
||||
});
|
||||
}
|
||||
await tx.message.create({
|
||||
data: { chatId, role: "assistant" as any, content: text },
|
||||
});
|
||||
await tx.llmCall.update({
|
||||
where: { id: call.id },
|
||||
data: {
|
||||
response: raw as any,
|
||||
latencyMs,
|
||||
inputTokens: usage?.inputTokens,
|
||||
outputTokens: usage?.outputTokens,
|
||||
totalTokens: usage?.totalTokens,
|
||||
},
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
yield { type: "done", text, usage };
|
||||
} catch (e: any) {
|
||||
const latencyMs = Math.round(performance.now() - t0);
|
||||
await prisma.llmCall.update({
|
||||
where: { id: call.id },
|
||||
data: {
|
||||
error: e?.message ?? String(e),
|
||||
latencyMs,
|
||||
},
|
||||
});
|
||||
if (shouldPersist && call) {
|
||||
await prisma.llmCall.update({
|
||||
where: { id: call.id },
|
||||
data: {
|
||||
error: e?.message ?? String(e),
|
||||
latencyMs,
|
||||
},
|
||||
});
|
||||
}
|
||||
yield { type: "error", message: e?.message ?? String(e) };
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +1,36 @@
|
||||
export type Provider = "openai" | "anthropic" | "xai";
|
||||
|
||||
export type ChatImageAttachment = {
|
||||
kind: "image";
|
||||
id: string;
|
||||
filename: string;
|
||||
mimeType: "image/png" | "image/jpeg";
|
||||
sizeBytes: number;
|
||||
dataUrl: string;
|
||||
};
|
||||
|
||||
export type ChatTextAttachment = {
|
||||
kind: "text";
|
||||
id: string;
|
||||
filename: string;
|
||||
mimeType: string;
|
||||
sizeBytes: number;
|
||||
text: string;
|
||||
truncated?: boolean;
|
||||
};
|
||||
|
||||
export type ChatAttachment = ChatImageAttachment | ChatTextAttachment;
|
||||
|
||||
export type ChatMessage = {
|
||||
role: "system" | "user" | "assistant" | "tool";
|
||||
content: string;
|
||||
name?: string;
|
||||
attachments?: ChatAttachment[];
|
||||
};
|
||||
|
||||
export type MultiplexRequest = {
|
||||
chatId?: string;
|
||||
persist?: boolean;
|
||||
provider: Provider;
|
||||
model: string;
|
||||
messages: ChatMessage[];
|
||||
|
||||
@@ -4,23 +4,33 @@ import type { FastifyInstance } from "fastify";
|
||||
import { prisma } from "./db.js";
|
||||
import { requireAdmin } from "./auth.js";
|
||||
import { env } from "./env.js";
|
||||
import { buildComparableAttachments } from "./llm/message-content.js";
|
||||
import { runMultiplex } from "./llm/multiplexer.js";
|
||||
import { runMultiplexStream } from "./llm/streaming.js";
|
||||
import { getModelCatalogSnapshot } from "./llm/model-catalog.js";
|
||||
import { openaiClient } from "./llm/providers.js";
|
||||
import { exaClient } from "./search/exa.js";
|
||||
import type { ChatAttachment } from "./llm/types.js";
|
||||
|
||||
type IncomingChatMessage = {
|
||||
role: "system" | "user" | "assistant" | "tool";
|
||||
content: string;
|
||||
name?: string;
|
||||
attachments?: ChatAttachment[];
|
||||
};
|
||||
|
||||
function sameMessage(
|
||||
a: { role: string; content: string; name?: string | null },
|
||||
b: { role: string; content: string; name?: string | null }
|
||||
a: { role: string; content: string; name?: string | null; metadata?: unknown },
|
||||
b: { role: string; content: string; name?: string | null; attachments?: ChatAttachment[] }
|
||||
) {
|
||||
return a.role === b.role && a.content === b.content && (a.name ?? null) === (b.name ?? null);
|
||||
const existingAttachments = JSON.stringify(buildComparableAttachments((a.metadata as Record<string, unknown> | null)?.attachments ?? null));
|
||||
const incomingAttachments = JSON.stringify(b.attachments ?? []);
|
||||
return (
|
||||
a.role === b.role &&
|
||||
a.content === b.content &&
|
||||
(a.name ?? null) === (b.name ?? null) &&
|
||||
existingAttachments === incomingAttachments
|
||||
);
|
||||
}
|
||||
|
||||
function isToolCallLogMetadata(value: unknown) {
|
||||
@@ -60,10 +70,67 @@ async function storeNonAssistantMessages(chatId: string, messages: IncomingChatM
|
||||
role: m.role as any,
|
||||
content: m.content,
|
||||
name: m.name,
|
||||
metadata: m.attachments?.length ? ({ attachments: m.attachments } as any) : undefined,
|
||||
})),
|
||||
});
|
||||
}
|
||||
|
||||
const MAX_CHAT_ATTACHMENTS = 8;
|
||||
const MAX_IMAGE_ATTACHMENT_BYTES = 6 * 1024 * 1024;
|
||||
const MAX_TEXT_ATTACHMENT_CHARS = 200_000;
|
||||
const MAX_IMAGE_DATA_URL_CHARS = 8_500_000;
|
||||
|
||||
const ChatAttachmentSchema = z.discriminatedUnion("kind", [
|
||||
z.object({
|
||||
kind: z.literal("image"),
|
||||
id: z.string().trim().min(1).max(128),
|
||||
filename: z.string().trim().min(1).max(255),
|
||||
mimeType: z.enum(["image/png", "image/jpeg"]),
|
||||
sizeBytes: z.number().int().positive().max(MAX_IMAGE_ATTACHMENT_BYTES),
|
||||
dataUrl: z
|
||||
.string()
|
||||
.max(MAX_IMAGE_DATA_URL_CHARS)
|
||||
.regex(/^data:image\/(?:png|jpeg);base64,[a-z0-9+/=\s]+$/i, "Invalid image data URL"),
|
||||
}),
|
||||
z.object({
|
||||
kind: z.literal("text"),
|
||||
id: z.string().trim().min(1).max(128),
|
||||
filename: z.string().trim().min(1).max(255),
|
||||
mimeType: z.string().trim().min(1).max(127),
|
||||
sizeBytes: z.number().int().positive().max(8 * 1024 * 1024),
|
||||
text: z.string().max(MAX_TEXT_ATTACHMENT_CHARS),
|
||||
truncated: z.boolean().optional(),
|
||||
}),
|
||||
]);
|
||||
|
||||
const CompletionMessageSchema = z
|
||||
.object({
|
||||
role: z.enum(["system", "user", "assistant", "tool"]),
|
||||
content: z.string(),
|
||||
name: z.string().optional(),
|
||||
attachments: z.array(ChatAttachmentSchema).max(MAX_CHAT_ATTACHMENTS).optional(),
|
||||
})
|
||||
.superRefine((value, ctx) => {
|
||||
if (value.attachments?.length && value.role === "tool") {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
message: "Tool messages cannot include attachments.",
|
||||
path: ["attachments"],
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
function mergeAttachmentsIntoMetadata(metadata: unknown, attachments?: ChatAttachment[]) {
|
||||
if (!attachments?.length) return metadata as any;
|
||||
if (!metadata || typeof metadata !== "object" || Array.isArray(metadata)) {
|
||||
return { attachments };
|
||||
}
|
||||
return {
|
||||
...(metadata as Record<string, unknown>),
|
||||
attachments,
|
||||
};
|
||||
}
|
||||
|
||||
const SearchRunBody = z.object({
|
||||
query: z.string().trim().min(1).optional(),
|
||||
title: z.string().trim().min(1).optional(),
|
||||
@@ -136,16 +203,15 @@ async function generateChatTitle(content: string) {
|
||||
const systemPrompt =
|
||||
"You create short chat titles. Return exactly one line, maximum 4 words, no quotes, no trailing punctuation.";
|
||||
const userPrompt = `User request:\n${content}\n\nTitle:`;
|
||||
const response = await openaiClient().chat.completions.create({
|
||||
const response = await openaiClient().responses.create({
|
||||
model: "gpt-4.1-mini",
|
||||
temperature: 0,
|
||||
max_completion_tokens: 20,
|
||||
messages: [
|
||||
{ role: "system", content: systemPrompt },
|
||||
{ role: "user", content: userPrompt },
|
||||
],
|
||||
max_output_tokens: 20,
|
||||
instructions: systemPrompt,
|
||||
input: userPrompt,
|
||||
store: false,
|
||||
});
|
||||
return response.choices?.[0]?.message?.content ?? "";
|
||||
return response.output_text ?? "";
|
||||
}
|
||||
|
||||
function normalizeUrlForMatch(input: string | null | undefined) {
|
||||
@@ -261,10 +327,50 @@ export async function registerRoutes(app: FastifyInstance) {
|
||||
|
||||
app.post("/v1/chats", async (req) => {
|
||||
requireAdmin(req);
|
||||
const Body = z.object({ title: z.string().optional() });
|
||||
const body = Body.parse(req.body ?? {});
|
||||
const Body = z
|
||||
.object({
|
||||
title: z.string().optional(),
|
||||
provider: z.enum(["openai", "anthropic", "xai"]).optional(),
|
||||
model: z.string().trim().min(1).optional(),
|
||||
messages: z.array(CompletionMessageSchema).optional(),
|
||||
})
|
||||
.superRefine((value, ctx) => {
|
||||
if (value.provider && !value.model) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
message: "model is required when provider is supplied",
|
||||
path: ["model"],
|
||||
});
|
||||
}
|
||||
if (!value.provider && value.model) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
message: "provider is required when model is supplied",
|
||||
path: ["provider"],
|
||||
});
|
||||
}
|
||||
});
|
||||
const parsed = Body.safeParse(req.body ?? {});
|
||||
if (!parsed.success) return app.httpErrors.badRequest(parsed.error.message);
|
||||
const body = parsed.data;
|
||||
const chat = await prisma.chat.create({
|
||||
data: { title: body.title },
|
||||
data: {
|
||||
title: body.title,
|
||||
initiatedProvider: body.provider as any,
|
||||
initiatedModel: body.model,
|
||||
lastUsedProvider: body.provider as any,
|
||||
lastUsedModel: body.model,
|
||||
messages: body.messages?.length
|
||||
? {
|
||||
create: body.messages.map((message) => ({
|
||||
role: message.role as any,
|
||||
content: message.content,
|
||||
name: message.name,
|
||||
metadata: message.attachments?.length ? ({ attachments: message.attachments } as any) : undefined,
|
||||
})),
|
||||
}
|
||||
: undefined,
|
||||
},
|
||||
select: {
|
||||
id: true,
|
||||
title: true,
|
||||
@@ -768,10 +874,13 @@ export async function registerRoutes(app: FastifyInstance) {
|
||||
content: z.string(),
|
||||
name: z.string().optional(),
|
||||
metadata: z.unknown().optional(),
|
||||
attachments: z.array(ChatAttachmentSchema).max(MAX_CHAT_ATTACHMENTS).optional(),
|
||||
});
|
||||
|
||||
const { chatId } = Params.parse(req.params);
|
||||
const body = Body.parse(req.body);
|
||||
const parsed = Body.safeParse(req.body);
|
||||
if (!parsed.success) return app.httpErrors.badRequest(parsed.error.message);
|
||||
const body = parsed.data;
|
||||
|
||||
const msg = await prisma.message.create({
|
||||
data: {
|
||||
@@ -779,7 +888,7 @@ export async function registerRoutes(app: FastifyInstance) {
|
||||
role: body.role as any,
|
||||
content: body.content,
|
||||
name: body.name,
|
||||
metadata: body.metadata as any,
|
||||
metadata: mergeAttachmentsIntoMetadata(body.metadata, body.attachments) as any,
|
||||
},
|
||||
});
|
||||
|
||||
@@ -794,18 +903,14 @@ export async function registerRoutes(app: FastifyInstance) {
|
||||
chatId: z.string().optional(),
|
||||
provider: z.enum(["openai", "anthropic", "xai"]),
|
||||
model: z.string().min(1),
|
||||
messages: z.array(
|
||||
z.object({
|
||||
role: z.enum(["system", "user", "assistant", "tool"]),
|
||||
content: z.string(),
|
||||
name: z.string().optional(),
|
||||
})
|
||||
),
|
||||
messages: z.array(CompletionMessageSchema),
|
||||
temperature: z.number().min(0).max(2).optional(),
|
||||
maxTokens: z.number().int().positive().optional(),
|
||||
});
|
||||
|
||||
const body = Body.parse(req.body);
|
||||
const parsed = Body.safeParse(req.body);
|
||||
if (!parsed.success) return app.httpErrors.badRequest(parsed.error.message);
|
||||
const body = parsed.data;
|
||||
|
||||
// ensure chat exists if provided
|
||||
if (body.chatId) {
|
||||
@@ -830,22 +935,29 @@ export async function registerRoutes(app: FastifyInstance) {
|
||||
app.post("/v1/chat-completions/stream", async (req, reply) => {
|
||||
requireAdmin(req);
|
||||
|
||||
const Body = z.object({
|
||||
chatId: z.string().optional(),
|
||||
provider: z.enum(["openai", "anthropic", "xai"]),
|
||||
model: z.string().min(1),
|
||||
messages: z.array(
|
||||
z.object({
|
||||
role: z.enum(["system", "user", "assistant", "tool"]),
|
||||
content: z.string(),
|
||||
name: z.string().optional(),
|
||||
})
|
||||
),
|
||||
temperature: z.number().min(0).max(2).optional(),
|
||||
maxTokens: z.number().int().positive().optional(),
|
||||
});
|
||||
const Body = z
|
||||
.object({
|
||||
chatId: z.string().optional(),
|
||||
persist: z.boolean().optional(),
|
||||
provider: z.enum(["openai", "anthropic", "xai"]),
|
||||
model: z.string().min(1),
|
||||
messages: z.array(CompletionMessageSchema),
|
||||
temperature: z.number().min(0).max(2).optional(),
|
||||
maxTokens: z.number().int().positive().optional(),
|
||||
})
|
||||
.superRefine((value, ctx) => {
|
||||
if (value.persist === false && value.chatId) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
message: "chatId must be omitted when persist is false",
|
||||
path: ["chatId"],
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
const body = Body.parse(req.body);
|
||||
const parsed = Body.safeParse(req.body);
|
||||
if (!parsed.success) return app.httpErrors.badRequest(parsed.error.message);
|
||||
const body = parsed.data;
|
||||
|
||||
// ensure chat exists if provided
|
||||
if (body.chatId) {
|
||||
@@ -854,11 +966,12 @@ export async function registerRoutes(app: FastifyInstance) {
|
||||
}
|
||||
|
||||
// Store only new non-assistant messages to avoid duplicate history entries.
|
||||
if (body.chatId) {
|
||||
if (body.persist !== false && body.chatId) {
|
||||
await storeNonAssistantMessages(body.chatId, body.messages);
|
||||
}
|
||||
|
||||
reply.raw.writeHead(200, buildSseHeaders(typeof req.headers.origin === "string" ? req.headers.origin : undefined));
|
||||
reply.raw.flushHeaders();
|
||||
|
||||
const send = (event: string, data: any) => {
|
||||
reply.raw.write(`event: ${event}\n`);
|
||||
|
||||
160
server/src/search/searxng.ts
Normal file
160
server/src/search/searxng.ts
Normal file
@@ -0,0 +1,160 @@
|
||||
import { env } from "../env.js";
|
||||
|
||||
const SEARXNG_TIMEOUT_MS = 12_000;
|
||||
const DEFAULT_SEARXNG_CATEGORIES = "general";
|
||||
|
||||
export type SearxngSearchOptions = {
|
||||
numResults: number;
|
||||
includeDomains?: string[];
|
||||
excludeDomains?: string[];
|
||||
};
|
||||
|
||||
export type SearxngSearchResult = {
|
||||
title: string | null;
|
||||
url: string | null;
|
||||
publishedDate: string | null;
|
||||
summary: string | null;
|
||||
text: string | null;
|
||||
engines: string[];
|
||||
};
|
||||
|
||||
export type SearxngSearchResponse = {
|
||||
query: string;
|
||||
requestId: null;
|
||||
results: SearxngSearchResult[];
|
||||
};
|
||||
|
||||
function clipText(input: string, maxCharacters: number) {
|
||||
return input.length <= maxCharacters ? input : `${input.slice(0, maxCharacters)}...`;
|
||||
}
|
||||
|
||||
function compactWhitespace(input: string) {
|
||||
return input.replace(/\r/g, "").replace(/[ \t]+\n/g, "\n").replace(/\n{3,}/g, "\n\n").replace(/\s+/g, " ").trim();
|
||||
}
|
||||
|
||||
function requireSearxngBaseUrl() {
|
||||
if (!env.SEARXNG_BASE_URL) {
|
||||
throw new Error("SEARXNG_BASE_URL not set");
|
||||
}
|
||||
return env.SEARXNG_BASE_URL.endsWith("/") ? env.SEARXNG_BASE_URL : `${env.SEARXNG_BASE_URL}/`;
|
||||
}
|
||||
|
||||
function normalizeDomain(input: string) {
|
||||
const trimmed = input.trim().toLowerCase();
|
||||
if (!trimmed) return null;
|
||||
|
||||
try {
|
||||
const parsed = new URL(trimmed.includes("://") ? trimmed : `https://${trimmed}`);
|
||||
return parsed.hostname.replace(/^www\./, "");
|
||||
} catch {
|
||||
return trimmed.split(/[/?#]/, 1)[0]?.replace(/^www\./, "") || null;
|
||||
}
|
||||
}
|
||||
|
||||
function normalizeDomains(input: string[] | undefined) {
|
||||
return Array.from(new Set((input ?? []).map(normalizeDomain).filter((domain): domain is string => Boolean(domain))));
|
||||
}
|
||||
|
||||
function hostnameMatchesDomain(urlRaw: string | null, domain: string) {
|
||||
if (!urlRaw) return false;
|
||||
try {
|
||||
const hostname = new URL(urlRaw).hostname.toLowerCase().replace(/^www\./, "");
|
||||
return hostname === domain || hostname.endsWith(`.${domain}`);
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function filterResultsByDomains(results: SearxngSearchResult[], options: SearxngSearchOptions) {
|
||||
const includeDomains = normalizeDomains(options.includeDomains);
|
||||
const excludeDomains = normalizeDomains(options.excludeDomains);
|
||||
return results.filter((result) => {
|
||||
if (includeDomains.length && !includeDomains.some((domain) => hostnameMatchesDomain(result.url, domain))) return false;
|
||||
if (excludeDomains.some((domain) => hostnameMatchesDomain(result.url, domain))) return false;
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
function buildSearxngQuery(query: string, options: SearxngSearchOptions) {
|
||||
const includeDomains = normalizeDomains(options.includeDomains);
|
||||
const excludeDomains = normalizeDomains(options.excludeDomains);
|
||||
const includeClause =
|
||||
includeDomains.length === 0
|
||||
? ""
|
||||
: includeDomains.length === 1
|
||||
? `site:${includeDomains[0]}`
|
||||
: `(${includeDomains.map((domain) => `site:${domain}`).join(" OR ")})`;
|
||||
const excludeClause = excludeDomains.map((domain) => `-site:${domain}`).join(" ");
|
||||
return [query, includeClause, excludeClause].filter(Boolean).join(" ");
|
||||
}
|
||||
|
||||
function buildSearchUrl(query: string, options: SearxngSearchOptions) {
|
||||
const url = new URL("search", requireSearxngBaseUrl());
|
||||
url.searchParams.set("q", buildSearxngQuery(query, options));
|
||||
url.searchParams.set("categories", DEFAULT_SEARXNG_CATEGORIES);
|
||||
url.searchParams.set("language", "auto");
|
||||
url.searchParams.set("safesearch", "1");
|
||||
url.searchParams.set("format", "json");
|
||||
return url;
|
||||
}
|
||||
|
||||
async function fetchSearxng(url: URL, accept: string) {
|
||||
const controller = new AbortController();
|
||||
const timeout = setTimeout(() => controller.abort(), SEARXNG_TIMEOUT_MS);
|
||||
|
||||
try {
|
||||
return await fetch(url, {
|
||||
redirect: "follow",
|
||||
signal: controller.signal,
|
||||
headers: {
|
||||
"User-Agent": "SybilBot/1.0 (+https://sybil.local)",
|
||||
Accept: accept,
|
||||
},
|
||||
});
|
||||
} finally {
|
||||
clearTimeout(timeout);
|
||||
}
|
||||
}
|
||||
|
||||
function stringOrNull(value: unknown) {
|
||||
if (typeof value !== "string") return null;
|
||||
const normalized = compactWhitespace(value);
|
||||
return normalized || null;
|
||||
}
|
||||
|
||||
function stringArray(value: unknown) {
|
||||
if (!Array.isArray(value)) return [];
|
||||
return value.filter((item): item is string => typeof item === "string").map(compactWhitespace).filter(Boolean);
|
||||
}
|
||||
|
||||
function mapJsonResult(result: any): SearxngSearchResult {
|
||||
const summary = stringOrNull(result?.content) ?? stringOrNull(result?.snippet);
|
||||
const text = summary ? clipText(summary, 700) : null;
|
||||
return {
|
||||
title: stringOrNull(result?.title),
|
||||
url: stringOrNull(result?.url),
|
||||
publishedDate: stringOrNull(result?.publishedDate) ?? stringOrNull(result?.published_date),
|
||||
summary: summary ? clipText(summary, 1_400) : null,
|
||||
text,
|
||||
engines: stringArray(result?.engines ?? (typeof result?.engine === "string" ? [result.engine] : [])),
|
||||
};
|
||||
}
|
||||
|
||||
export async function searchSearxng(query: string, options: SearxngSearchOptions): Promise<SearxngSearchResponse> {
|
||||
const url = buildSearchUrl(query, options);
|
||||
const response = await fetchSearxng(url, "application/json");
|
||||
if (!response.ok) {
|
||||
await response.arrayBuffer();
|
||||
throw new Error(`SearXNG JSON search failed with status ${response.status}. Verify search.formats includes json.`);
|
||||
}
|
||||
|
||||
const contentType = response.headers.get("content-type")?.toLowerCase() ?? "";
|
||||
if (!contentType.includes("application/json")) {
|
||||
await response.arrayBuffer();
|
||||
throw new Error(`SearXNG JSON search returned ${contentType || "unknown content type"}.`);
|
||||
}
|
||||
|
||||
const data: any = await response.json();
|
||||
const results = Array.isArray(data?.results) ? data.results.map(mapJsonResult) : [];
|
||||
return { query, requestId: null, results: filterResultsByDomains(results, options).slice(0, options.numResults) };
|
||||
}
|
||||
107
server/tests/chat-tools-streaming.test.ts
Normal file
107
server/tests/chat-tools-streaming.test.ts
Normal file
@@ -0,0 +1,107 @@
|
||||
import assert from "node:assert/strict";
|
||||
import test from "node:test";
|
||||
import {
|
||||
runToolAwareChatCompletionsStream,
|
||||
runToolAwareOpenAIChatStream,
|
||||
type ToolAwareStreamingEvent,
|
||||
} from "../src/llm/chat-tools.js";
|
||||
|
||||
async function* streamFrom(events: any[]) {
|
||||
for (const event of events) {
|
||||
await Promise.resolve();
|
||||
yield event;
|
||||
}
|
||||
}
|
||||
|
||||
async function collectEvents(iterable: AsyncIterable<ToolAwareStreamingEvent>) {
|
||||
const events: ToolAwareStreamingEvent[] = [];
|
||||
for await (const event of iterable) {
|
||||
events.push(event);
|
||||
}
|
||||
return events;
|
||||
}
|
||||
|
||||
test("OpenAI Responses stream emits text deltas as they arrive", async () => {
|
||||
const outputMessage = {
|
||||
id: "msg_1",
|
||||
type: "message",
|
||||
role: "assistant",
|
||||
status: "completed",
|
||||
content: [{ type: "output_text", text: "Hello" }],
|
||||
};
|
||||
const client = {
|
||||
responses: {
|
||||
create: async () =>
|
||||
streamFrom([
|
||||
{ type: "response.output_item.added", item: { ...outputMessage, content: [] }, output_index: 0 },
|
||||
{ type: "response.output_text.delta", delta: "Hel", output_index: 0, content_index: 0 },
|
||||
{ type: "response.output_text.delta", delta: "lo", output_index: 0, content_index: 0 },
|
||||
{ type: "response.output_item.done", item: outputMessage, output_index: 0 },
|
||||
{
|
||||
type: "response.completed",
|
||||
response: {
|
||||
status: "completed",
|
||||
output_text: "Hello",
|
||||
output: [outputMessage],
|
||||
usage: { input_tokens: 2, output_tokens: 1, total_tokens: 3 },
|
||||
},
|
||||
},
|
||||
]),
|
||||
},
|
||||
};
|
||||
|
||||
const events = await collectEvents(
|
||||
runToolAwareOpenAIChatStream({
|
||||
client: client as any,
|
||||
model: "gpt-test",
|
||||
messages: [{ role: "user", content: "Say hello" }],
|
||||
})
|
||||
);
|
||||
|
||||
assert.deepEqual(
|
||||
events.map((event) => event.type),
|
||||
["delta", "delta", "done"]
|
||||
);
|
||||
assert.deepEqual(
|
||||
events.filter((event) => event.type === "delta").map((event) => event.text),
|
||||
["Hel", "lo"]
|
||||
);
|
||||
assert.equal(events.at(-1)?.type === "done" ? events.at(-1)?.result.text : null, "Hello");
|
||||
});
|
||||
|
||||
test("OpenAI-compatible Chat Completions stream emits text deltas as they arrive", async () => {
|
||||
const client = {
|
||||
chat: {
|
||||
completions: {
|
||||
create: async () =>
|
||||
streamFrom([
|
||||
{ choices: [{ delta: { role: "assistant" } }] },
|
||||
{ choices: [{ delta: { content: "Hel" } }] },
|
||||
{ choices: [{ delta: { content: "lo" } }] },
|
||||
{
|
||||
choices: [{ delta: {}, finish_reason: "stop" }],
|
||||
usage: { prompt_tokens: 2, completion_tokens: 1, total_tokens: 3 },
|
||||
},
|
||||
]),
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const events = await collectEvents(
|
||||
runToolAwareChatCompletionsStream({
|
||||
client: client as any,
|
||||
model: "grok-test",
|
||||
messages: [{ role: "user", content: "Say hello" }],
|
||||
})
|
||||
);
|
||||
|
||||
assert.deepEqual(
|
||||
events.map((event) => event.type),
|
||||
["delta", "delta", "done"]
|
||||
);
|
||||
assert.deepEqual(
|
||||
events.filter((event) => event.type === "delta").map((event) => event.text),
|
||||
["Hel", "lo"]
|
||||
);
|
||||
assert.equal(events.at(-1)?.type === "done" ? events.at(-1)?.result.text : null, "Hello");
|
||||
});
|
||||
@@ -40,6 +40,10 @@ Default dev URL: `http://localhost:5173`
|
||||
- Composer adapts to the active item:
|
||||
- Chat sends `POST /v1/chat-completions/stream` (SSE).
|
||||
- Search sends `POST /v1/searches/:searchId/run/stream` (SSE).
|
||||
- Keyboard shortcuts:
|
||||
- `Cmd/Ctrl+J`: start a new chat.
|
||||
- `Shift+Cmd/Ctrl+J`: start a new search.
|
||||
- `Cmd/Ctrl+Up/Down`: move through the sidebar list.
|
||||
|
||||
Client API contract docs:
|
||||
- `../docs/api/rest.md`
|
||||
|
||||
983
web/src/App.tsx
983
web/src/App.tsx
File diff suppressed because it is too large
Load Diff
103
web/src/components/chat/chat-attachment-list.tsx
Normal file
103
web/src/components/chat/chat-attachment-list.tsx
Normal file
@@ -0,0 +1,103 @@
|
||||
import { FileText, Image as ImageIcon, X } from "lucide-preact";
|
||||
import type { ChatAttachment } from "@/lib/api";
|
||||
import { cn } from "@/lib/utils";
|
||||
|
||||
type Props = {
|
||||
attachments: ChatAttachment[];
|
||||
tone?: "composer" | "user" | "assistant";
|
||||
onRemove?: (id: string) => void;
|
||||
};
|
||||
|
||||
function getTextPreview(value: string) {
|
||||
const normalized = value.replace(/\r/g, "").trim();
|
||||
if (!normalized) return "(empty file)";
|
||||
return normalized.length <= 280 ? normalized : `${normalized.slice(0, 280).trimEnd()}...`;
|
||||
}
|
||||
|
||||
function getSurfaceClasses(tone: Props["tone"]) {
|
||||
if (tone === "user") {
|
||||
return "border-white/12 bg-black/16 text-fuchsia-50";
|
||||
}
|
||||
if (tone === "assistant") {
|
||||
return "border-violet-300/16 bg-violet-400/8 text-violet-50";
|
||||
}
|
||||
return "border-violet-300/18 bg-background/40 text-violet-50";
|
||||
}
|
||||
|
||||
export function ChatAttachmentList({ attachments, tone = "composer", onRemove }: Props) {
|
||||
if (!attachments.length) return null;
|
||||
|
||||
const surfaceClasses = getSurfaceClasses(tone);
|
||||
|
||||
return (
|
||||
<div className="space-y-2">
|
||||
{attachments.map((attachment) => {
|
||||
const isImage = attachment.kind === "image";
|
||||
return (
|
||||
<div key={attachment.id} className={cn("overflow-hidden rounded-xl border", surfaceClasses)}>
|
||||
{isImage ? (
|
||||
<div className="grid gap-0 md:grid-cols-[minmax(0,220px)_minmax(0,1fr)]">
|
||||
<div className="border-b border-white/10 bg-black/10 md:border-b-0 md:border-r">
|
||||
<img src={attachment.dataUrl} alt={attachment.filename} className="block max-h-56 w-full object-cover" />
|
||||
</div>
|
||||
<div className="flex min-w-0 flex-col gap-2 p-3">
|
||||
<div className="flex items-start gap-2">
|
||||
<span className="mt-0.5 rounded-md border border-white/12 bg-white/5 p-1.5">
|
||||
<ImageIcon className="h-3.5 w-3.5" />
|
||||
</span>
|
||||
<div className="min-w-0 flex-1">
|
||||
<p className="truncate text-sm font-medium">{attachment.filename}</p>
|
||||
<p className="text-xs text-muted-foreground">{attachment.mimeType}</p>
|
||||
</div>
|
||||
{onRemove ? (
|
||||
<button
|
||||
type="button"
|
||||
className="rounded-md border border-white/10 p-1 text-muted-foreground transition hover:bg-white/8 hover:text-foreground"
|
||||
onClick={() => onRemove(attachment.id)}
|
||||
aria-label={`Remove ${attachment.filename}`}
|
||||
>
|
||||
<X className="h-3.5 w-3.5" />
|
||||
</button>
|
||||
) : null}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
<div className="p-3">
|
||||
<div className="flex items-start gap-2">
|
||||
<span className="mt-0.5 rounded-md border border-white/12 bg-white/5 p-1.5">
|
||||
<FileText className="h-3.5 w-3.5" />
|
||||
</span>
|
||||
<div className="min-w-0 flex-1">
|
||||
<div className="flex items-start gap-2">
|
||||
<div className="min-w-0 flex-1">
|
||||
<p className="truncate text-sm font-medium">{attachment.filename}</p>
|
||||
<p className="text-xs text-muted-foreground">
|
||||
{attachment.mimeType}
|
||||
{attachment.truncated ? " · truncated" : ""}
|
||||
</p>
|
||||
</div>
|
||||
{onRemove ? (
|
||||
<button
|
||||
type="button"
|
||||
className="rounded-md border border-white/10 p-1 text-muted-foreground transition hover:bg-white/8 hover:text-foreground"
|
||||
onClick={() => onRemove(attachment.id)}
|
||||
aria-label={`Remove ${attachment.filename}`}
|
||||
>
|
||||
<X className="h-3.5 w-3.5" />
|
||||
</button>
|
||||
) : null}
|
||||
</div>
|
||||
<pre className="mt-2 overflow-x-auto rounded-lg border border-white/8 bg-black/16 p-3 text-xs leading-5 text-inherit whitespace-pre-wrap">
|
||||
{getTextPreview(attachment.text)}
|
||||
</pre>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
import { cn } from "@/lib/utils";
|
||||
import type { Message } from "@/lib/api";
|
||||
import { ChatAttachmentList } from "@/components/chat/chat-attachment-list";
|
||||
import { getMessageAttachments, type Message } from "@/lib/api";
|
||||
import { MarkdownContent } from "@/components/markdown/markdown-content";
|
||||
import { Globe2, Link2, Wrench } from "lucide-preact";
|
||||
|
||||
@@ -11,9 +12,16 @@ type Props = {
|
||||
|
||||
type ToolLogMetadata = {
|
||||
kind: "tool_call";
|
||||
toolCallId?: string;
|
||||
toolName?: string;
|
||||
status?: "completed" | "failed";
|
||||
summary?: string;
|
||||
args?: Record<string, unknown>;
|
||||
startedAt?: string;
|
||||
completedAt?: string;
|
||||
durationMs?: number;
|
||||
error?: string | null;
|
||||
resultPreview?: string | null;
|
||||
};
|
||||
|
||||
function asToolLogMetadata(value: unknown): ToolLogMetadata | null {
|
||||
@@ -25,10 +33,26 @@ function asToolLogMetadata(value: unknown): ToolLogMetadata | null {
|
||||
|
||||
function getToolSummary(message: Message, metadata: ToolLogMetadata) {
|
||||
if (typeof metadata.summary === "string" && metadata.summary.trim()) return metadata.summary.trim();
|
||||
if (metadata.status === "failed" && typeof metadata.error === "string" && metadata.error.trim()) {
|
||||
return `Tool failed: ${metadata.error.trim()}`;
|
||||
}
|
||||
if (typeof metadata.resultPreview === "string" && metadata.resultPreview.trim()) return metadata.resultPreview.trim();
|
||||
if (message.content.trim()) return message.content.trim();
|
||||
const toolName = metadata.toolName?.trim() || message.name?.trim() || "unknown_tool";
|
||||
return `Ran tool '${toolName}'.`;
|
||||
}
|
||||
|
||||
function getToolLabel(message: Message, metadata: ToolLogMetadata) {
|
||||
const raw = metadata.toolName?.trim() || message.name?.trim();
|
||||
if (!raw) return "Tool call";
|
||||
return raw
|
||||
.replace(/_/g, " ")
|
||||
.split(/\s+/)
|
||||
.filter(Boolean)
|
||||
.map((word) => `${word.slice(0, 1).toUpperCase()}${word.slice(1)}`)
|
||||
.join(" ");
|
||||
}
|
||||
|
||||
function getToolIconName(toolName: string | null | undefined) {
|
||||
const lowered = toolName?.toLowerCase() ?? "";
|
||||
if (lowered.includes("search")) return "search";
|
||||
@@ -36,6 +60,27 @@ function getToolIconName(toolName: string | null | undefined) {
|
||||
return "generic";
|
||||
}
|
||||
|
||||
function formatDuration(durationMs: unknown) {
|
||||
if (typeof durationMs !== "number" || !Number.isFinite(durationMs) || durationMs <= 0) return null;
|
||||
return `${Math.round(durationMs)} ms`;
|
||||
}
|
||||
|
||||
function formatToolTimestamp(...values: Array<string | null | undefined>) {
|
||||
const value = values.find((candidate) => candidate && !Number.isNaN(new Date(candidate).getTime()));
|
||||
if (!value) return null;
|
||||
return new Intl.DateTimeFormat(undefined, { hour: "numeric", minute: "2-digit" }).format(new Date(value));
|
||||
}
|
||||
|
||||
function getToolDetailLabel(message: Message, metadata: ToolLogMetadata, isFailed: boolean) {
|
||||
return [
|
||||
isFailed ? "Failed" : "Completed",
|
||||
formatDuration(metadata.durationMs),
|
||||
formatToolTimestamp(message.createdAt, metadata.completedAt, metadata.startedAt),
|
||||
]
|
||||
.filter(Boolean)
|
||||
.join(" • ");
|
||||
}
|
||||
|
||||
export function ChatMessagesPanel({ messages, isLoading, isSending }: Props) {
|
||||
const hasPendingAssistant = messages.some((message) => message.id.startsWith("temp-assistant-") && message.content.trim().length === 0);
|
||||
|
||||
@@ -49,18 +94,39 @@ export function ChatMessagesPanel({ messages, isLoading, isSending }: Props) {
|
||||
const iconKind = getToolIconName(toolLogMetadata.toolName ?? message.name);
|
||||
const Icon = iconKind === "search" ? Globe2 : iconKind === "fetch" ? Link2 : Wrench;
|
||||
const isFailed = toolLogMetadata.status === "failed";
|
||||
const toolSummary = getToolSummary(message, toolLogMetadata);
|
||||
const toolLabel = getToolLabel(message, toolLogMetadata);
|
||||
const toolDetailLabel = getToolDetailLabel(message, toolLogMetadata, isFailed);
|
||||
return (
|
||||
<div key={message.id} className="flex justify-start">
|
||||
<div
|
||||
className={cn(
|
||||
"inline-flex max-w-[85%] items-center gap-3 rounded-lg border px-3.5 py-2 text-sm leading-5 shadow-[inset_0_1px_0_hsl(180_100%_88%_/_0.06)]",
|
||||
"inline-flex max-w-[85%] min-w-0 items-start gap-3 overflow-hidden rounded-xl border px-3 py-2.5 shadow-[inset_0_1px_0_hsl(180_100%_88%_/_0.06)]",
|
||||
isFailed
|
||||
? "border-rose-500/40 bg-rose-950/18 text-rose-200"
|
||||
: "border-cyan-400/34 bg-cyan-950/18 text-cyan-100"
|
||||
? "border-rose-400/34 bg-[linear-gradient(90deg,hsl(350_72%_44%_/_0.18),hsl(342_66%_9%_/_0.72))]"
|
||||
: "border-cyan-400/34 bg-[linear-gradient(90deg,hsl(184_89%_21%_/_0.70),hsl(208_66%_12%_/_0.78))]"
|
||||
)}
|
||||
title={`${toolSummary}\n${toolLabel} • ${toolDetailLabel}`}
|
||||
>
|
||||
<Icon className="h-4 w-4 shrink-0 text-cyan-300" />
|
||||
<span>{getToolSummary(message, toolLogMetadata)}</span>
|
||||
<span
|
||||
className={cn(
|
||||
"mt-0.5 flex h-[30px] w-[30px] shrink-0 items-center justify-center rounded-lg border",
|
||||
isFailed ? "border-rose-400/34 bg-rose-400/13 text-rose-300" : "border-cyan-300/34 bg-cyan-300/13 text-cyan-300"
|
||||
)}
|
||||
>
|
||||
<Icon className="h-4 w-4" />
|
||||
</span>
|
||||
<span className="min-w-0 flex-1 space-y-1">
|
||||
<span className={cn("block truncate text-sm leading-5", isFailed ? "text-rose-200" : "text-violet-50/95")}>
|
||||
{toolSummary}
|
||||
</span>
|
||||
<span className="flex min-w-0 items-center gap-1.5 text-[11px] leading-4">
|
||||
<span className={cn("min-w-0 truncate font-semibold", isFailed ? "text-rose-300/85" : "text-cyan-200/90")}>
|
||||
{toolLabel}
|
||||
</span>
|
||||
<span className="min-w-0 truncate text-violet-200/64">{toolDetailLabel}</span>
|
||||
</span>
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
@@ -68,28 +134,30 @@ export function ChatMessagesPanel({ messages, isLoading, isSending }: Props) {
|
||||
|
||||
const isUser = message.role === "user";
|
||||
const isPendingAssistant = message.id.startsWith("temp-assistant-") && isSending && message.content.trim().length === 0;
|
||||
const attachments = getMessageAttachments(message.metadata);
|
||||
return (
|
||||
<div key={message.id} className={cn("flex", isUser ? "justify-end" : "justify-start")}>
|
||||
<div
|
||||
className={cn(
|
||||
"max-w-[85%]",
|
||||
"max-w-[85%] space-y-3",
|
||||
isUser
|
||||
? "rounded-xl border border-violet-300/24 bg-[linear-gradient(135deg,hsl(258_86%_48%_/_0.86),hsl(278_72%_29%_/_0.86))] px-4 py-3 text-sm leading-6 text-fuchsia-50 shadow-sm"
|
||||
: "text-base leading-7 text-violet-50"
|
||||
)}
|
||||
>
|
||||
{attachments.length ? <ChatAttachmentList attachments={attachments} tone={isUser ? "user" : "assistant"} /> : null}
|
||||
{isPendingAssistant ? (
|
||||
<span className="inline-flex items-center gap-1" aria-label="Assistant is typing" role="status">
|
||||
<span className="inline-block h-1.5 w-1.5 animate-bounce rounded-full bg-muted-foreground [animation-delay:0ms]" />
|
||||
<span className="inline-block h-1.5 w-1.5 animate-bounce rounded-full bg-muted-foreground [animation-delay:140ms]" />
|
||||
<span className="inline-block h-1.5 w-1.5 animate-bounce rounded-full bg-muted-foreground [animation-delay:280ms]" />
|
||||
</span>
|
||||
) : (
|
||||
) : message.content.trim() ? (
|
||||
<MarkdownContent
|
||||
markdown={message.content}
|
||||
className={cn("[&_a]:text-inherit [&_a]:underline", isUser ? "leading-[1.78] text-fuchsia-50" : "leading-[1.82] text-violet-50")}
|
||||
/>
|
||||
)}
|
||||
) : null}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { useMemo } from "preact/hooks";
|
||||
import DOMPurify from "dompurify";
|
||||
import { marked } from "marked";
|
||||
import { marked, Renderer } from "marked";
|
||||
import { cn } from "@/lib/utils";
|
||||
|
||||
type MarkdownMode = "default" | "citationTokens";
|
||||
@@ -21,8 +21,15 @@ function replaceMarkdownLinksWithCitationTokens(markdown: string, resolveCitatio
|
||||
});
|
||||
}
|
||||
|
||||
const markdownRenderer = new Renderer();
|
||||
const renderTable = markdownRenderer.table.bind(markdownRenderer);
|
||||
|
||||
markdownRenderer.table = (token) => {
|
||||
return `<div class="md-table-scroll">${renderTable(token)}</div>`;
|
||||
};
|
||||
|
||||
function renderMarkdown(markdown: string) {
|
||||
const rawHtml = marked.parse(markdown, { gfm: true, breaks: true }) as string;
|
||||
const rawHtml = marked.parse(markdown, { gfm: true, breaks: true, renderer: markdownRenderer }) as string;
|
||||
return DOMPurify.sanitize(rawHtml, { ADD_ATTR: ["class", "target", "rel"] });
|
||||
}
|
||||
|
||||
|
||||
@@ -83,6 +83,77 @@ textarea {
|
||||
word-break: break-word;
|
||||
}
|
||||
|
||||
.md-table-scroll {
|
||||
max-width: 100%;
|
||||
margin: 0.35rem 0 1rem;
|
||||
overflow-x: auto;
|
||||
overflow-y: hidden;
|
||||
border: 1px solid hsl(var(--border) / 0.86);
|
||||
border-radius: 0.625rem;
|
||||
background: hsl(246 34% 10% / 0.76);
|
||||
box-shadow: inset 0 1px 0 hsl(258 80% 88% / 0.06);
|
||||
}
|
||||
|
||||
.md-content table {
|
||||
width: max-content;
|
||||
min-width: 100%;
|
||||
border-collapse: separate;
|
||||
border-spacing: 0;
|
||||
font-size: 0.94em;
|
||||
line-height: 1.48;
|
||||
}
|
||||
|
||||
.md-table-scroll::-webkit-scrollbar {
|
||||
height: 0.45rem;
|
||||
}
|
||||
|
||||
.md-table-scroll::-webkit-scrollbar-thumb {
|
||||
border-radius: 9999px;
|
||||
background: hsl(263 78% 72% / 0.34);
|
||||
}
|
||||
|
||||
.md-content th,
|
||||
.md-content td {
|
||||
padding: 0.48rem 0.7rem;
|
||||
border-right: 1px solid hsl(var(--border) / 0.72);
|
||||
border-bottom: 1px solid hsl(var(--border) / 0.7);
|
||||
text-align: left;
|
||||
vertical-align: top;
|
||||
word-break: normal;
|
||||
}
|
||||
|
||||
.md-content th:last-child,
|
||||
.md-content td:last-child {
|
||||
border-right: 0;
|
||||
}
|
||||
|
||||
.md-content tr:last-child td {
|
||||
border-bottom: 0;
|
||||
}
|
||||
|
||||
.md-content th {
|
||||
background: hsl(251 40% 15% / 0.92);
|
||||
color: hsl(258 36% 98%);
|
||||
font-weight: 700;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.md-content td {
|
||||
color: hsl(258 34% 94% / 0.96);
|
||||
}
|
||||
|
||||
.md-content tbody tr:nth-child(odd) td {
|
||||
background: hsl(242 32% 10% / 0.58);
|
||||
}
|
||||
|
||||
.md-content tbody tr:nth-child(even) td {
|
||||
background: hsl(252 36% 13% / 0.46);
|
||||
}
|
||||
|
||||
.md-content tbody tr:hover td {
|
||||
background: hsl(263 46% 20% / 0.48);
|
||||
}
|
||||
|
||||
.md-content p + p {
|
||||
margin-top: 0.85rem;
|
||||
}
|
||||
|
||||
@@ -90,6 +90,27 @@ export type SearchDetail = {
|
||||
results: SearchResultItem[];
|
||||
};
|
||||
|
||||
export type ChatImageAttachment = {
|
||||
kind: "image";
|
||||
id: string;
|
||||
filename: string;
|
||||
mimeType: "image/png" | "image/jpeg";
|
||||
sizeBytes: number;
|
||||
dataUrl: string;
|
||||
};
|
||||
|
||||
export type ChatTextAttachment = {
|
||||
kind: "text";
|
||||
id: string;
|
||||
filename: string;
|
||||
mimeType: string;
|
||||
sizeBytes: number;
|
||||
text: string;
|
||||
truncated?: boolean;
|
||||
};
|
||||
|
||||
export type ChatAttachment = ChatImageAttachment | ChatTextAttachment;
|
||||
|
||||
export type SearchRunRequest = {
|
||||
query?: string;
|
||||
title?: string;
|
||||
@@ -103,6 +124,7 @@ export type CompletionRequestMessage = {
|
||||
role: "system" | "user" | "assistant" | "tool";
|
||||
content: string;
|
||||
name?: string;
|
||||
attachments?: ChatAttachment[];
|
||||
};
|
||||
|
||||
export type Provider = "openai" | "anthropic" | "xai";
|
||||
@@ -126,13 +148,20 @@ type CompletionResponse = {
|
||||
};
|
||||
|
||||
type CompletionStreamHandlers = {
|
||||
onMeta?: (payload: { chatId: string; callId: string; provider: Provider; model: string }) => void;
|
||||
onMeta?: (payload: { chatId: string | null; callId: string | null; provider: Provider; model: string }) => void;
|
||||
onToolCall?: (payload: ToolCallEvent) => void;
|
||||
onDelta?: (payload: { text: string }) => void;
|
||||
onDone?: (payload: { text: string; usage?: { inputTokens?: number; outputTokens?: number; totalTokens?: number } }) => void;
|
||||
onError?: (payload: { message: string }) => void;
|
||||
};
|
||||
|
||||
type CreateChatRequest = {
|
||||
title?: string;
|
||||
provider?: Provider;
|
||||
model?: string;
|
||||
messages?: CompletionRequestMessage[];
|
||||
};
|
||||
|
||||
const API_BASE_URL = import.meta.env.VITE_API_BASE_URL ?? "/api";
|
||||
const ENV_ADMIN_TOKEN = (import.meta.env.VITE_ADMIN_TOKEN as string | undefined)?.trim() || null;
|
||||
let authToken: string | null = ENV_ADMIN_TOKEN;
|
||||
@@ -188,10 +217,11 @@ export async function listModels() {
|
||||
return api<ModelCatalogResponse>("/v1/models");
|
||||
}
|
||||
|
||||
export async function createChat(title?: string) {
|
||||
export async function createChat(input?: string | CreateChatRequest) {
|
||||
const body = typeof input === "string" ? { title: input } : input ?? {};
|
||||
const data = await api<{ chat: ChatSummary }>("/v1/chats", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ title }),
|
||||
body: JSON.stringify(body),
|
||||
});
|
||||
return data.chat;
|
||||
}
|
||||
@@ -251,6 +281,49 @@ export async function deleteSearch(searchId: string) {
|
||||
await api<{ deleted: true }>(`/v1/searches/${searchId}`, { method: "DELETE" });
|
||||
}
|
||||
|
||||
export function getMessageAttachments(metadata: unknown): ChatAttachment[] {
|
||||
if (!metadata || typeof metadata !== "object" || Array.isArray(metadata)) return [];
|
||||
const attachments = (metadata as Record<string, unknown>).attachments;
|
||||
if (!Array.isArray(attachments)) return [];
|
||||
|
||||
const parsed: ChatAttachment[] = [];
|
||||
for (const entry of attachments) {
|
||||
if (!entry || typeof entry !== "object" || Array.isArray(entry)) continue;
|
||||
const record = entry as Record<string, unknown>;
|
||||
const kind = record.kind;
|
||||
const id = typeof record.id === "string" ? record.id : "";
|
||||
const filename = typeof record.filename === "string" ? record.filename : "";
|
||||
const mimeType = typeof record.mimeType === "string" ? record.mimeType : "";
|
||||
const sizeBytes = typeof record.sizeBytes === "number" ? record.sizeBytes : 0;
|
||||
|
||||
if (kind === "image" && typeof record.dataUrl === "string" && (mimeType === "image/png" || mimeType === "image/jpeg")) {
|
||||
parsed.push({
|
||||
kind,
|
||||
id,
|
||||
filename,
|
||||
mimeType,
|
||||
sizeBytes,
|
||||
dataUrl: record.dataUrl,
|
||||
} satisfies ChatImageAttachment);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (kind === "text" && typeof record.text === "string") {
|
||||
parsed.push({
|
||||
kind,
|
||||
id,
|
||||
filename,
|
||||
mimeType,
|
||||
sizeBytes,
|
||||
text: record.text,
|
||||
truncated: record.truncated === true,
|
||||
} satisfies ChatTextAttachment);
|
||||
}
|
||||
}
|
||||
|
||||
return parsed;
|
||||
}
|
||||
|
||||
type RunSearchStreamHandlers = {
|
||||
onSearchResults?: (payload: { requestId: string | null; results: SearchResultItem[] }) => void;
|
||||
onSearchError?: (payload: { error: string }) => void;
|
||||
@@ -378,7 +451,8 @@ export async function runCompletion(body: {
|
||||
|
||||
export async function runCompletionStream(
|
||||
body: {
|
||||
chatId: string;
|
||||
chatId?: string | null;
|
||||
persist?: boolean;
|
||||
provider: Provider;
|
||||
model: string;
|
||||
messages: CompletionRequestMessage[];
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"root":["./src/App.tsx","./src/main.tsx","./src/root-router.tsx","./src/vite-env.d.ts","./src/components/auth/auth-screen.tsx","./src/components/chat/chat-messages-panel.tsx","./src/components/markdown/markdown-content.tsx","./src/components/search/search-results-panel.tsx","./src/components/ui/button.tsx","./src/components/ui/input.tsx","./src/components/ui/scroll-area.tsx","./src/components/ui/separator.tsx","./src/components/ui/textarea.tsx","./src/hooks/use-session-auth.ts","./src/lib/api.ts","./src/lib/utils.ts","./src/pages/search-route-page.tsx"],"version":"5.9.3"}
|
||||
{"root":["./src/app.tsx","./src/main.tsx","./src/root-router.tsx","./src/vite-env.d.ts","./src/components/auth/auth-screen.tsx","./src/components/chat/chat-attachment-list.tsx","./src/components/chat/chat-messages-panel.tsx","./src/components/markdown/markdown-content.tsx","./src/components/search/search-results-panel.tsx","./src/components/ui/button.tsx","./src/components/ui/input.tsx","./src/components/ui/scroll-area.tsx","./src/components/ui/separator.tsx","./src/components/ui/textarea.tsx","./src/hooks/use-session-auth.ts","./src/lib/api.ts","./src/lib/utils.ts","./src/pages/search-route-page.tsx"],"version":"5.9.3"}
|
||||
Reference in New Issue
Block a user