26 Commits

Author SHA1 Message Date
2313e560e8 fix streaming 2026-05-02 23:09:39 -07:00
94565298d8 better new chat animation 2026-05-02 22:51:59 -07:00
7360604136 ios: swipe to create new conversation 2026-05-02 22:46:25 -07:00
ca6b5e0807 web: keyboard shortcuts 2026-05-02 22:45:15 -07:00
4b0cc3fbf7 ios: better fix for scroll 2026-05-02 22:25:24 -07:00
2da73f802c ios: scroll without animation when clicking 2026-05-02 22:21:18 -07:00
4ad36d9bf6 ios: better backgrounding/resume 2026-05-02 22:18:45 -07:00
cf9832ca3b tool call in-flight resume 2026-05-02 22:03:43 -07:00
2c32ca66e2 codex no sandbox (its already sandboxed) 2026-05-02 21:50:17 -07:00
015253c0af oai responses api, tool call retries 2026-05-02 21:44:32 -07:00
8d6c069a33 Various fixes for tool calling 2026-05-02 21:19:52 -07:00
d579b5bf75 adds shell tool 2026-05-02 19:52:19 -07:00
01ee807991 ios: adds file uploading 2026-05-02 19:47:47 -07:00
fd9ee455fb experimental devbox support 2026-05-02 19:38:15 -07:00
38da3cea72 adds attachment support 2026-05-02 19:21:06 -07:00
11e6875de9 ios: proj bump 2026-05-02 18:26:20 -07:00
5a690b276f web: transcript improvements 2026-05-02 18:25:20 -07:00
d7967eaa75 Adds searxng support for tool calling 2026-05-02 18:15:14 -07:00
2125c5dfa4 web: tweak scrolling behavior 2026-05-02 18:15:14 -07:00
815655a73c ios: fix api endpoint change 2026-05-02 18:09:22 -07:00
b85409d977 ios: xcode proj changes 2026-05-02 17:40:41 -07:00
d9f27213e7 Merge pull request 'Add chat flow for search results' (#8) from codex/chat-with-search-results into master
Reviewed-on: #8
2026-05-03 00:23:30 +00:00
3a6c40cb3c Merge pull request 'ios: right-align user bubbles on iPad' (#7) from codex/ios-right-align-user-bubbles into master
Reviewed-on: #7
2026-05-03 00:20:22 +00:00
57a6287b2b Merge pull request 'ios: stop refocusing composer after send' (#5) from codex/ios-no-autofocus-after-send into master
Reviewed-on: #5
2026-05-02 23:53:32 +00:00
dc9336acf9 add chat flow for search results 2026-05-02 16:48:01 -07:00
85f8d6b5f3 ios: stop refocusing composer after send 2026-05-02 16:37:55 -07:00
42 changed files with 4828 additions and 267 deletions

View File

@@ -24,6 +24,10 @@ COPY server/package.json server/package-lock.json ./
COPY server/scripts ./scripts COPY server/scripts ./scripts
COPY server/prisma ./prisma COPY server/prisma ./prisma
RUN apt-get update \
&& apt-get install -y --no-install-recommends openssh-client \
&& rm -rf /var/lib/apt/lists/*
RUN npm ci --omit=dev --no-audit --no-fund RUN npm ci --omit=dev --no-audit --no-fund
COPY --from=server-build /app/server/dist ./dist COPY --from=server-build /app/server/dist ./dist

1
dist/default.conf vendored
View File

@@ -1,6 +1,7 @@
server { server {
listen 80; listen 80;
server_name _; server_name _;
client_max_body_size 32m;
root /usr/share/nginx/html; root /usr/share/nginx/html;
index index.html; index index.html;

View File

@@ -13,8 +13,24 @@ services:
ANTHROPIC_API_KEY: ${ANTHROPIC_API_KEY:-} ANTHROPIC_API_KEY: ${ANTHROPIC_API_KEY:-}
XAI_API_KEY: ${XAI_API_KEY:-} XAI_API_KEY: ${XAI_API_KEY:-}
EXA_API_KEY: ${EXA_API_KEY:-} EXA_API_KEY: ${EXA_API_KEY:-}
CHAT_WEB_SEARCH_ENGINE: ${CHAT_WEB_SEARCH_ENGINE:-exa}
SEARXNG_BASE_URL: ${SEARXNG_BASE_URL:-}
CHAT_MAX_TOOL_ROUNDS: ${CHAT_MAX_TOOL_ROUNDS:-100}
CHAT_CODEX_TOOL_ENABLED: ${CHAT_CODEX_TOOL_ENABLED:-false}
CHAT_CODEX_REMOTE_HOST: ${CHAT_CODEX_REMOTE_HOST:-}
CHAT_CODEX_REMOTE_USER: ${CHAT_CODEX_REMOTE_USER:-}
CHAT_CODEX_REMOTE_PORT: ${CHAT_CODEX_REMOTE_PORT:-22}
CHAT_CODEX_REMOTE_WORKDIR: ${CHAT_CODEX_REMOTE_WORKDIR:-/workspace/sybil-codex}
# Prefer mounting a private key read-only and pointing CHAT_CODEX_SSH_KEY_PATH at it.
CHAT_CODEX_SSH_KEY_PATH: ${CHAT_CODEX_SSH_KEY_PATH:-}
CHAT_CODEX_SSH_PRIVATE_KEY_B64: ${CHAT_CODEX_SSH_PRIVATE_KEY_B64:-}
CHAT_CODEX_EXEC_TIMEOUT_MS: ${CHAT_CODEX_EXEC_TIMEOUT_MS:-600000}
CHAT_SHELL_TOOL_ENABLED: ${CHAT_SHELL_TOOL_ENABLED:-false}
CHAT_SHELL_EXEC_TIMEOUT_MS: ${CHAT_SHELL_EXEC_TIMEOUT_MS:-120000}
volumes: volumes:
- sybil_data:/data - sybil_data:/data
# Example key mount for codex_exec:
# - ./secrets/devbox_id_ed25519:/run/secrets/codex_ssh_key:ro
expose: expose:
- "8787" - "8787"
restart: unless-stopped restart: unless-stopped

View File

@@ -10,6 +10,12 @@ Content type:
- Requests with bodies use `application/json`. - Requests with bodies use `application/json`.
- Responses are JSON unless noted otherwise. - Responses are JSON unless noted otherwise.
Chat upload limits:
- Chat completion and direct message payloads support inline attachments up to a 32 MB request body.
- Up to 8 attachments per message.
- Image attachments: PNG or JPEG only, max 6 MB each.
- Text attachments: up to 8 MB source size each; server accepts at most 200,000 characters of inlined text content per attachment.
## Health + Auth ## Health + Auth
### `GET /health` ### `GET /health`
@@ -31,6 +37,7 @@ Content type:
} }
} }
``` ```
- OpenAI model lists are filtered to models that are expected to work with the backend's Responses API implementation.
## Chats ## Chats
@@ -74,11 +81,34 @@ Behavior notes:
"role": "system|user|assistant|tool", "role": "system|user|assistant|tool",
"content": "string", "content": "string",
"name": "optional", "name": "optional",
"metadata": {} "metadata": {},
"attachments": [
{
"kind": "image",
"id": "attachment-id",
"filename": "photo.jpg",
"mimeType": "image/jpeg",
"sizeBytes": 12345,
"dataUrl": "data:image/jpeg;base64,..."
},
{
"kind": "text",
"id": "attachment-id",
"filename": "notes.md",
"mimeType": "text/markdown",
"sizeBytes": 4567,
"text": "# Notes\\n...",
"truncated": false
}
]
} }
``` ```
- Response: `{ "message": Message }` - Response: `{ "message": Message }`
Notes:
- `attachments` is optional and is merged into stored `message.metadata.attachments`.
- Tool messages should not include attachments.
## Chat Completions (non-streaming) ## Chat Completions (non-streaming)
### `POST /v1/chat-completions` ### `POST /v1/chat-completions`
@@ -89,7 +119,30 @@ Behavior notes:
"provider": "openai|anthropic|xai", "provider": "openai|anthropic|xai",
"model": "string", "model": "string",
"messages": [ "messages": [
{ "role": "system|user|assistant|tool", "content": "string", "name": "optional" } {
"role": "system|user|assistant|tool",
"content": "string",
"name": "optional",
"attachments": [
{
"kind": "image",
"id": "attachment-id",
"filename": "photo.jpg",
"mimeType": "image/jpeg",
"sizeBytes": 12345,
"dataUrl": "data:image/jpeg;base64,..."
},
{
"kind": "text",
"id": "attachment-id",
"filename": "notes.md",
"mimeType": "text/markdown",
"sizeBytes": 4567,
"text": "# Notes\\n...",
"truncated": false
}
]
}
], ],
"temperature": 0.2, "temperature": 0.2,
"maxTokens": 256 "maxTokens": 256
@@ -112,11 +165,33 @@ Behavior notes:
- For `chatId` calls, server stores only *new* non-assistant messages from provided history to avoid duplicates. - For `chatId` calls, server stores only *new* non-assistant messages from provided history to avoid duplicates.
- Server persists final assistant output and call metadata (`LlmCall`) in DB. - Server persists final assistant output and call metadata (`LlmCall`) in DB.
- Server updates chat-level model metadata on each call: `lastUsedProvider`/`lastUsedModel`; first successful/failed call also initializes `initiatedProvider`/`initiatedModel` if unset. - Server updates chat-level model metadata on each call: `lastUsedProvider`/`lastUsedModel`; first successful/failed call also initializes `initiatedProvider`/`initiatedModel` if unset.
- For `openai` and `xai`, backend enables tool use during chat completion with an internal system instruction. - Attachments are optional and currently apply to `user` messages. Persisted chat history stores them under `message.metadata.attachments`.
- Available tool calls for chat: `web_search` and `fetch_url`. - Images are forwarded inline to providers as multimodal image parts. Use PNG or JPEG for cross-provider compatibility.
- `web_search` uses Exa and returns ranked results with per-result summaries/snippets. - Text files are forwarded as explicit text blocks rather than provider-managed file references. Large text attachments should already be truncated client-side before submission.
- For `openai`, backend calls OpenAI's Responses API and enables internal tool use with an internal system instruction.
- For `xai`, backend calls xAI's OpenAI-compatible Chat Completions API and enables internal tool use with the same internal system instruction.
- For `openai`, image attachments are sent as Responses `input_image` items and text attachments are sent as `input_text` items.
- For `xai`, image attachments are sent as Chat Completions content parts alongside text.
- For `openai`, Responses calls that can enter the server-managed tool loop use `store: true` so reasoning and function-call items can be passed between tool rounds.
- For `anthropic`, image attachments are sent as Messages API `image` blocks using base64 source data; text attachments are added as `text` blocks.
- Available tool calls for chat: `web_search` and `fetch_url`. When `CHAT_CODEX_TOOL_ENABLED=true`, `codex_exec` is also available. When `CHAT_SHELL_TOOL_ENABLED=true`, `shell_exec` is also available.
- `web_search` returns ranked results with per-result summaries/snippets. Its backend engine is selected by `CHAT_WEB_SEARCH_ENGINE` (`exa` default, or `searxng` with `SEARXNG_BASE_URL` set). SearXNG mode requires the instance to allow `format=json`.
- `fetch_url` fetches a URL and returns plaintext page content (HTML converted to text server-side). - `fetch_url` fetches a URL and returns plaintext page content (HTML converted to text server-side).
- When a tool call is executed, backend stores a chat `Message` with `role: "tool"` and tool metadata (`metadata.kind = "tool_call"`), then stores the assistant output. - `codex_exec` delegates coding, shell, repository inspection, and other complex software tasks to a persistent remote Codex CLI workspace over SSH. The server runs `codex exec --dangerously-bypass-approvals-and-sandbox --skip-git-repo-check <non-interactive wrapped prompt>` on the configured devbox inside `CHAT_CODEX_REMOTE_WORKDIR`, with SSH stdin closed.
- `shell_exec` runs arbitrary non-interactive shell commands on the same configured devbox, starting in `CHAT_CODEX_REMOTE_WORKDIR`. It uses `bash -lc` when bash exists, otherwise `sh -lc`, closes SSH stdin, and does not run inside the Sybil server container.
- Devbox tool configuration:
- `CHAT_MAX_TOOL_ROUNDS=100` (optional; maximum model/tool result cycles before the backend returns a limit message)
- `CHAT_CODEX_TOOL_ENABLED=true`
- `CHAT_SHELL_TOOL_ENABLED=true`
- `CHAT_CODEX_REMOTE_HOST=<host-or-ip>` (required when enabled)
- `CHAT_CODEX_REMOTE_USER=<ssh-user>` (optional; omitted if `CHAT_CODEX_REMOTE_HOST` already contains `user@host`)
- `CHAT_CODEX_REMOTE_PORT=22` (optional)
- `CHAT_CODEX_REMOTE_WORKDIR=/workspace/sybil-codex` (optional; created on the remote host if missing)
- `CHAT_CODEX_SSH_KEY_PATH=/run/secrets/codex_ssh_key` (recommended private-key delivery via read-only volume mount)
- `CHAT_CODEX_SSH_PRIVATE_KEY_B64=<base64-private-key>` (optional fallback when a volume mount is not practical)
- `CHAT_CODEX_EXEC_TIMEOUT_MS=600000` (optional)
- `CHAT_SHELL_EXEC_TIMEOUT_MS=120000` (optional)
- When a tool call is executed, backend stores a chat `Message` with `role: "tool"` and tool metadata (`metadata.kind = "tool_call"`). Streaming requests persist each completed tool call as its SSE `tool_call` event is emitted, then store the assistant output when the completion finishes.
- `anthropic` currently runs without server-managed tool calls. - `anthropic` currently runs without server-managed tool calls.
## Searches ## Searches
@@ -135,6 +210,16 @@ Behavior notes:
### `GET /v1/searches/:searchId` ### `GET /v1/searches/:searchId`
- Response: `{ "search": SearchDetail }` - Response: `{ "search": SearchDetail }`
### `POST /v1/searches/:searchId/chat`
- Body: `{ "title"?: string }`
- Response: `{ "chat": ChatSummary }`
- Not found: `404 { "message": "search not found" }`
Behavior notes:
- Creates a new chat seeded with a hidden `system` message containing the search query, answer text, answer citations, and top search results.
- Clients should include existing `system` messages when sending the chat history to `/v1/chat-completions` or `/v1/chat-completions/stream`; they may hide those messages in the transcript UI.
- The default chat title is `Search: <query-or-title>`, unless `title` is supplied.
### `POST /v1/searches/:searchId/run` ### `POST /v1/searches/:searchId/run`
- Body: - Body:
```json ```json
@@ -151,6 +236,7 @@ Behavior notes:
Search run notes: Search run notes:
- Backend executes Exa search and Exa answer. - Backend executes Exa search and Exa answer.
- Search mode is independent from chat `web_search` tool configuration and remains Exa-only.
- Persists answer text/citations + ranked results. - Persists answer text/citations + ranked results.
- If both search and answer fail, endpoint returns an error. - If both search and answer fail, endpoint returns an error.
@@ -178,10 +264,32 @@ Search run notes:
"role": "system|user|assistant|tool", "role": "system|user|assistant|tool",
"content": "...", "content": "...",
"name": null, "name": null,
"metadata": null "metadata": {
"attachments": [
{
"kind": "image",
"id": "attachment-id",
"filename": "photo.jpg",
"mimeType": "image/jpeg",
"sizeBytes": 12345,
"dataUrl": "data:image/jpeg;base64,..."
},
{
"kind": "text",
"id": "attachment-id",
"filename": "notes.md",
"mimeType": "text/markdown",
"sizeBytes": 4567,
"text": "# Notes\\n...",
"truncated": false
}
]
}
} }
``` ```
`metadata` remains nullable. Tool-call log messages still use `metadata.kind = "tool_call"`; regular user messages with attachments use `metadata.attachments`.
`ChatDetail` `ChatDetail`
```json ```json
{ {

View File

@@ -9,6 +9,7 @@ Transport:
- HTTP response uses `Content-Type: text/event-stream; charset=utf-8` - HTTP response uses `Content-Type: text/event-stream; charset=utf-8`
- Events are emitted in SSE format (`event: ...`, `data: ...`) - Events are emitted in SSE format (`event: ...`, `data: ...`)
- Request body is JSON - Request body is JSON
- Request body supports the same inline attachment schema and limits documented in `docs/api/rest.md`.
Authentication: Authentication:
- Same as REST endpoints (`Authorization: Bearer <token>` when token mode is enabled) - Same as REST endpoints (`Authorization: Bearer <token>` when token mode is enabled)
@@ -21,7 +22,30 @@ Authentication:
"provider": "openai|anthropic|xai", "provider": "openai|anthropic|xai",
"model": "string", "model": "string",
"messages": [ "messages": [
{ "role": "system|user|assistant|tool", "content": "string", "name": "optional" } {
"role": "system|user|assistant|tool",
"content": "string",
"name": "optional",
"attachments": [
{
"kind": "image",
"id": "attachment-id",
"filename": "photo.jpg",
"mimeType": "image/jpeg",
"sizeBytes": 12345,
"dataUrl": "data:image/jpeg;base64,..."
},
{
"kind": "text",
"id": "attachment-id",
"filename": "notes.md",
"mimeType": "text/markdown",
"sizeBytes": 4567,
"text": "# Notes\\n...",
"truncated": false
}
]
}
], ],
"temperature": 0.2, "temperature": 0.2,
"maxTokens": 256 "maxTokens": 256
@@ -32,6 +56,7 @@ Notes:
- If `chatId` is omitted, backend creates a new chat. - If `chatId` is omitted, backend creates a new chat.
- If `chatId` is provided, backend validates it exists. - If `chatId` is provided, backend validates it exists.
- Backend stores only new non-assistant input history rows to avoid duplicates. - Backend stores only new non-assistant input history rows to avoid duplicates.
- Attachments are optional and are persisted under `message.metadata.attachments` on stored user messages.
## Event Stream Contract ## Event Stream Contract
@@ -102,14 +127,22 @@ Event order:
## Provider Streaming Behavior ## Provider Streaming Behavior
- `openai`: backend may execute internal tool calls (`web_search`, `fetch_url`) before producing final text. - `openai`: backend uses OpenAI's Responses API and may execute internal function tool calls (`web_search`, `fetch_url`, optional `codex_exec`, and optional `shell_exec`) before producing final text.
- `xai`: same tool-enabled behavior as OpenAI. - `xai`: backend uses xAI's OpenAI-compatible Chat Completions API and may execute the same internal tool calls before producing final text.
- `anthropic`: streamed via event stream; emits `delta` from `content_block_delta` with `text_delta`. - `openai`: image attachments are sent as Responses `input_image` items; text attachments are sent as `input_text` items.
- `xai`: image attachments are sent as Chat Completions content parts; text attachments are inlined as text parts.
- `openai`: Responses calls that can enter the server-managed tool loop use `store: true` so reasoning and function-call items can be passed between tool rounds.
- `anthropic`: streamed via event stream; emits `delta` from `content_block_delta` with `text_delta`. Image attachments are sent as base64 `image` blocks and text attachments are appended as `text` blocks.
- `web_search` uses `CHAT_WEB_SEARCH_ENGINE` (`exa` default, or `searxng` with `SEARXNG_BASE_URL` set). SearXNG mode requires the instance to allow `format=json`. This only affects chat-mode tool calls, not search-mode endpoints.
- `codex_exec` is available only when `CHAT_CODEX_TOOL_ENABLED=true`. It SSHes to `CHAT_CODEX_REMOTE_HOST`, creates/uses `CHAT_CODEX_REMOTE_WORKDIR`, and runs `codex exec --dangerously-bypass-approvals-and-sandbox --skip-git-repo-check <non-interactive wrapped prompt>` there with SSH stdin closed. Prefer `CHAT_CODEX_SSH_KEY_PATH` with a read-only mounted private key; `CHAT_CODEX_SSH_PRIVATE_KEY_B64` is also supported.
- `shell_exec` is available only when `CHAT_SHELL_TOOL_ENABLED=true`. It uses the same devbox SSH configuration, starts in `CHAT_CODEX_REMOTE_WORKDIR`, and runs non-interactive shell commands there with SSH stdin closed, not inside the Sybil server container.
- `CHAT_MAX_TOOL_ROUNDS` controls how many model/tool result cycles may occur before the backend returns a tool-call limit message; default is 100.
Tool-enabled streaming notes (`openai`/`xai`): Tool-enabled streaming notes (`openai`/`xai`):
- Stream still emits standard `meta`, `delta`, `done|error` events. - Stream still emits standard `meta`, `delta`, `done|error` events.
- Stream may emit `tool_call` events while tool calls are executed. - Stream may emit `tool_call` events while tool calls are executed.
- `delta` events stream incrementally as text is generated. - `delta` events carry assistant text and are emitted incrementally for normal text rounds. The backend may buffer model-native text briefly while determining whether a provider round contains tool calls.
- OpenAI Responses stream events are normalized by the backend into this SSE contract; clients do not consume OpenAI's raw Responses stream event names.
## Persistence + Consistency Model ## Persistence + Consistency Model
@@ -117,6 +150,7 @@ Backend database remains source of truth.
During stream: During stream:
- Client may optimistically render accumulated `delta` text. - Client may optimistically render accumulated `delta` text.
- Backend persists each completed tool call as a `tool` message before emitting its `tool_call` SSE event, so chat detail refreshes can show completed tool calls while the assistant response is still running.
On successful completion: On successful completion:
- Backend persists assistant `Message` and updates `LlmCall` usage/latency in a transaction. - Backend persists assistant `Message` and updates `LlmCall` usage/latency in a transaction.

View File

@@ -35,7 +35,7 @@ Instructions for work under `/Users/buzzert/src/sybil-2/ios`.
## Practical Notes ## Practical Notes
- Default API URL is `http://127.0.0.1:8787` (configurable in-app). - Default API URL is `http://127.0.0.1:8787` (configurable in-app).
- Previously saved `/api` API roots are normalized to the server root by the iOS client. - The iOS client preserves an explicit `/api` base path for proxied deployments.
- Provider fallback models: - Provider fallback models:
- OpenAI: `gpt-4.1-mini` - OpenAI: `gpt-4.1-mini`
- Anthropic: `claude-3-5-sonnet-latest` - Anthropic: `claude-3-5-sonnet-latest`

View File

@@ -19,9 +19,10 @@ targets:
TARGETED_DEVICE_FAMILY: "1,2" TARGETED_DEVICE_FAMILY: "1,2"
GENERATE_INFOPLIST_FILE: YES GENERATE_INFOPLIST_FILE: YES
ASSETCATALOG_COMPILER_APPICON_NAME: AppIcon ASSETCATALOG_COMPILER_APPICON_NAME: AppIcon
MARKETING_VERSION: 1.0 MARKETING_VERSION: 1.4
CURRENT_PROJECT_VERSION: 1 CURRENT_PROJECT_VERSION: 5
INFOPLIST_KEY_CFBundleDisplayName: Sybil INFOPLIST_KEY_CFBundleDisplayName: Sybil
INFOPLIST_KEY_ITSAppUsesNonExemptEncryption: NO
INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents: YES INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents: YES
INFOPLIST_KEY_UILaunchScreen_Generation: YES INFOPLIST_KEY_UILaunchScreen_Generation: YES
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone: UIInterfaceOrientationPortrait INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone: UIInterfaceOrientationPortrait

View File

@@ -0,0 +1,33 @@
{
"originHash" : "a6321e2b291c1094ca66f749c90095f05aac7f8c6b4a6e54e0e77a1bb0e1a79f",
"pins" : [
{
"identity" : "networkimage",
"kind" : "remoteSourceControl",
"location" : "https://github.com/gonzalezreal/NetworkImage",
"state" : {
"revision" : "2849f5323265386e200484b0d0f896e73c3411b9",
"version" : "6.0.1"
}
},
{
"identity" : "swift-cmark",
"kind" : "remoteSourceControl",
"location" : "https://github.com/swiftlang/swift-cmark",
"state" : {
"revision" : "5d9bdaa4228b381639fff09403e39a04926e2dbe",
"version" : "0.7.1"
}
},
{
"identity" : "swift-markdown-ui",
"kind" : "remoteSourceControl",
"location" : "https://github.com/gonzalezreal/swift-markdown-ui.git",
"state" : {
"revision" : "5f613358148239d0292c0cef674a3c2314737f9e",
"version" : "2.4.1"
}
}
],
"version" : 3
}

View File

@@ -3,6 +3,9 @@ import SwiftUI
public struct SplitView: View { public struct SplitView: View {
@State private var viewModel = SybilViewModel() @State private var viewModel = SybilViewModel()
@Environment(\.horizontalSizeClass) private var horizontalSizeClass @Environment(\.horizontalSizeClass) private var horizontalSizeClass
@Environment(\.scenePhase) private var scenePhase
@State private var shouldRefreshOnForeground = false
@State private var composerFocusRequest = 0
@MainActor public init() { @MainActor public init() {
SybilFontRegistry.registerIfNeeded() SybilFontRegistry.registerIfNeeded()
@@ -27,7 +30,10 @@ public struct SplitView: View {
NavigationSplitView { NavigationSplitView {
SybilSidebarView(viewModel: viewModel) SybilSidebarView(viewModel: viewModel)
} detail: { } detail: {
SybilWorkspaceView(viewModel: viewModel) SybilWorkspaceView(viewModel: viewModel, composerFocusRequest: composerFocusRequest) {
viewModel.startNewChat()
composerFocusRequest += 1
}
} }
.navigationSplitViewStyle(.balanced) .navigationSplitViewStyle(.balanced)
.tint(SybilTheme.primary) .tint(SybilTheme.primary)
@@ -38,5 +44,26 @@ public struct SplitView: View {
.task { .task {
await viewModel.bootstrap() await viewModel.bootstrap()
} }
.onChange(of: scenePhase) { _, nextPhase in
switch nextPhase {
case .background:
shouldRefreshOnForeground = true
case .active:
guard shouldRefreshOnForeground, horizontalSizeClass != .compact else {
return
}
shouldRefreshOnForeground = false
Task {
await viewModel.refreshVisibleContent(
refreshCollections: true,
refreshSelection: viewModel.hasRefreshableSelection
)
}
case .inactive:
break
@unknown default:
break
}
}
} }
} }

View File

@@ -17,16 +17,18 @@ struct AnyEncodable: Encodable {
} }
} }
actor SybilAPIClient { actor SybilAPIClient: SybilAPIClienting {
private let configuration: APIConfiguration private let configuration: APIConfiguration
private let session: URLSession private let session: URLSession
@MainActor
private static let iso8601FormatterWithFractional: ISO8601DateFormatter = { private static let iso8601FormatterWithFractional: ISO8601DateFormatter = {
let formatter = ISO8601DateFormatter() let formatter = ISO8601DateFormatter()
formatter.formatOptions = [.withInternetDateTime, .withFractionalSeconds] formatter.formatOptions = [.withInternetDateTime, .withFractionalSeconds]
return formatter return formatter
}() }()
@MainActor
private static let iso8601Formatter: ISO8601DateFormatter = { private static let iso8601Formatter: ISO8601DateFormatter = {
let formatter = ISO8601DateFormatter() let formatter = ISO8601DateFormatter()
formatter.formatOptions = [.withInternetDateTime] formatter.formatOptions = [.withInternetDateTime]
@@ -96,6 +98,16 @@ actor SybilAPIClient {
return response.search return response.search
} }
func createChatFromSearch(searchID: String, title: String? = nil) async throws -> ChatSummary {
let response = try await request(
"/v1/searches/\(searchID)/chat",
method: "POST",
body: AnyEncodable(SearchChatCreateBody(title: title)),
responseType: ChatCreateResponse.self
)
return response.chat
}
func deleteSearch(searchID: String) async throws { func deleteSearch(searchID: String) async throws {
_ = try await request("/v1/searches/\(searchID)", method: "DELETE", responseType: DeleteResponse.self) _ = try await request("/v1/searches/\(searchID)", method: "DELETE", responseType: DeleteResponse.self)
} }
@@ -552,3 +564,7 @@ private struct SearchCreateBody: Encodable {
var title: String? var title: String?
var query: String? var query: String?
} }
private struct SearchChatCreateBody: Encodable {
var title: String?
}

View File

@@ -0,0 +1,25 @@
import Foundation
protocol SybilAPIClienting: Sendable {
func verifySession() async throws -> AuthSession
func listChats() async throws -> [ChatSummary]
func createChat(title: String?) async throws -> ChatSummary
func getChat(chatID: String) async throws -> ChatDetail
func deleteChat(chatID: String) async throws
func suggestChatTitle(chatID: String, content: String) async throws -> ChatSummary
func listSearches() async throws -> [SearchSummary]
func createSearch(title: String?, query: String?) async throws -> SearchSummary
func getSearch(searchID: String) async throws -> SearchDetail
func createChatFromSearch(searchID: String, title: String?) async throws -> ChatSummary
func deleteSearch(searchID: String) async throws
func listModels() async throws -> ModelCatalogResponse
func runCompletionStream(
body: CompletionStreamRequest,
onEvent: @escaping @Sendable (CompletionStreamEvent) async -> Void
) async throws
func runSearchStream(
searchID: String,
body: SearchRunRequest,
onEvent: @escaping @Sendable (SearchStreamEvent) async -> Void
) async throws
}

View File

@@ -0,0 +1,222 @@
import SwiftUI
enum SybilAttachmentTone {
case composer
case user
case assistant
}
struct SybilAttachmentListView: View {
var attachments: [ChatAttachment]
var tone: SybilAttachmentTone
var onRemove: ((String) -> Void)? = nil
var body: some View {
VStack(alignment: .leading, spacing: 8) {
ForEach(attachments) { attachment in
Group {
if attachment.kind == .image {
imageCard(attachment)
} else {
textCard(attachment)
}
}
}
}
}
@ViewBuilder
private func imageCard(_ attachment: ChatAttachment) -> some View {
VStack(alignment: .leading, spacing: 0) {
if let image = SybilChatAttachmentSupport.image(for: attachment) {
Image(uiImage: image)
.resizable()
.scaledToFill()
.frame(maxWidth: .infinity)
.frame(height: 180)
.clipped()
} else {
ZStack {
RoundedRectangle(cornerRadius: 14)
.fill(Color.black.opacity(0.18))
Image(systemName: "photo")
.font(.system(size: 22, weight: .medium))
.foregroundStyle(SybilTheme.textMuted)
}
.frame(height: 140)
}
HStack(alignment: .top, spacing: 10) {
Image(systemName: "photo")
.font(.system(size: 13, weight: .semibold))
.foregroundStyle(titleColor.opacity(0.92))
.frame(width: 26, height: 26)
.background(
RoundedRectangle(cornerRadius: 8)
.fill(Color.white.opacity(0.06))
.overlay(
RoundedRectangle(cornerRadius: 8)
.stroke(Color.white.opacity(0.08), lineWidth: 1)
)
)
VStack(alignment: .leading, spacing: 2) {
Text(attachment.filename)
.font(.sybil(.footnote, weight: .medium))
.foregroundStyle(titleColor)
.lineLimit(1)
Text(attachment.mimeType)
.font(.sybil(.caption2))
.foregroundStyle(SybilTheme.textMuted)
.lineLimit(1)
}
Spacer(minLength: 0)
if let onRemove {
removeButton(for: attachment.id, onRemove: onRemove)
}
}
.padding(12)
}
.background(cardBackground)
.clipShape(RoundedRectangle(cornerRadius: 14))
.overlay(
RoundedRectangle(cornerRadius: 14)
.stroke(cardBorder, lineWidth: 1)
)
}
@ViewBuilder
private func textCard(_ attachment: ChatAttachment) -> some View {
VStack(alignment: .leading, spacing: 10) {
HStack(alignment: .top, spacing: 10) {
Image(systemName: "doc.text")
.font(.system(size: 13, weight: .semibold))
.foregroundStyle(titleColor.opacity(0.92))
.frame(width: 26, height: 26)
.background(
RoundedRectangle(cornerRadius: 8)
.fill(Color.white.opacity(0.06))
.overlay(
RoundedRectangle(cornerRadius: 8)
.stroke(Color.white.opacity(0.08), lineWidth: 1)
)
)
VStack(alignment: .leading, spacing: 2) {
Text(attachment.filename)
.font(.sybil(.footnote, weight: .medium))
.foregroundStyle(titleColor)
.lineLimit(1)
Text(
attachment.truncated == true
? "\(attachment.mimeType) • truncated"
: attachment.mimeType
)
.font(.sybil(.caption2))
.foregroundStyle(SybilTheme.textMuted)
.lineLimit(1)
}
Spacer(minLength: 0)
if let onRemove {
removeButton(for: attachment.id, onRemove: onRemove)
}
}
Text(SybilChatAttachmentSupport.previewText(for: attachment))
.font(.system(.caption, design: .monospaced))
.foregroundStyle(bodyColor)
.frame(maxWidth: .infinity, alignment: .leading)
.padding(10)
.background(
RoundedRectangle(cornerRadius: 10)
.fill(Color.black.opacity(0.16))
.overlay(
RoundedRectangle(cornerRadius: 10)
.stroke(Color.white.opacity(0.05), lineWidth: 1)
)
)
}
.padding(12)
.background(cardBackground)
.clipShape(RoundedRectangle(cornerRadius: 14))
.overlay(
RoundedRectangle(cornerRadius: 14)
.stroke(cardBorder, lineWidth: 1)
)
}
private func removeButton(for attachmentID: String, onRemove: @escaping (String) -> Void) -> some View {
Button {
onRemove(attachmentID)
} label: {
Image(systemName: "xmark")
.font(.system(size: 11, weight: .bold))
.foregroundStyle(SybilTheme.textMuted)
.frame(width: 24, height: 24)
.background(
Circle()
.fill(Color.white.opacity(0.06))
)
}
.buttonStyle(.plain)
.accessibilityLabel("Remove attachment")
}
private var cardBackground: some ShapeStyle {
switch tone {
case .composer:
return AnyShapeStyle(
LinearGradient(
colors: [SybilTheme.surface.opacity(0.86), SybilTheme.surfaceStrong.opacity(0.78)],
startPoint: .topLeading,
endPoint: .bottomTrailing
)
)
case .user:
return AnyShapeStyle(Color.black.opacity(0.14))
case .assistant:
return AnyShapeStyle(
LinearGradient(
colors: [SybilTheme.surface.opacity(0.58), SybilTheme.surfaceStrong.opacity(0.42)],
startPoint: .topLeading,
endPoint: .bottomTrailing
)
)
}
}
private var cardBorder: Color {
switch tone {
case .composer:
return SybilTheme.border.opacity(0.82)
case .user:
return Color.white.opacity(0.12)
case .assistant:
return SybilTheme.border.opacity(0.58)
}
}
private var titleColor: Color {
switch tone {
case .composer, .assistant:
return SybilTheme.text
case .user:
return SybilTheme.text
}
}
private var bodyColor: Color {
switch tone {
case .composer, .assistant:
return SybilTheme.text.opacity(0.94)
case .user:
return SybilTheme.text.opacity(0.96)
}
}
}

View File

@@ -0,0 +1,36 @@
import UIKit
@MainActor
final class SybilBackgroundTaskAssertion {
private let name: String
private var identifier: UIBackgroundTaskIdentifier = .invalid
init?(name: String, onExpiration: @escaping @MainActor () -> Void = {}) {
self.name = name
identifier = UIApplication.shared.beginBackgroundTask(withName: name) { [weak self] in
Task { @MainActor in
guard let self else { return }
SybilLog.warning(SybilLog.app, "Background task expired: \(self.name)")
onExpiration()
self.end()
}
}
guard identifier != .invalid else {
SybilLog.warning(SybilLog.app, "Failed to acquire background task: \(name)")
return nil
}
SybilLog.debug(SybilLog.app, "Acquired background task: \(name)")
}
func end() {
guard identifier != .invalid else {
return
}
UIApplication.shared.endBackgroundTask(identifier)
identifier = .invalid
SybilLog.debug(SybilLog.app, "Ended background task: \(name)")
}
}

View File

@@ -0,0 +1,354 @@
import Foundation
import UniformTypeIdentifiers
import UIKit
enum ChatAttachmentError: LocalizedError {
case unsupportedType(String)
case imageTooLarge(String)
case textTooLarge(String)
case unreadableFile(String)
case unsupportedImageFormat(String)
case tooManyAttachments(Int)
var errorDescription: String? {
switch self {
case let .unsupportedType(filename):
return "Unsupported file type for '\(filename)'. Use PNG/JPEG images or text-based files."
case let .imageTooLarge(filename):
return "Image '\(filename)' exceeds the 6 MB upload limit."
case let .textTooLarge(filename):
return "Text file '\(filename)' exceeds the 8 MB upload limit."
case let .unreadableFile(filename):
return "Could not read '\(filename)'."
case let .unsupportedImageFormat(filename):
return "Image '\(filename)' could not be converted to PNG or JPEG."
case let .tooManyAttachments(limit):
return "You can attach up to \(limit) files per message."
}
}
}
enum SybilChatAttachmentSupport {
static let maxAttachmentsPerMessage = 8
static let maxImageBytes = 6 * 1024 * 1024
static let maxTextBytes = 8 * 1024 * 1024
static let maxTextCharacters = 200_000
private static let supportedTextExtensions: Set<String> = [
"txt", "md", "markdown", "csv", "tsv", "json", "jsonl", "xml", "yaml", "yml", "html", "htm",
"css", "js", "jsx", "ts", "tsx", "py", "rb", "java", "c", "cc", "cpp", "h", "hpp", "go",
"rs", "sh", "sql", "log", "toml", "ini", "cfg", "conf", "swift", "kt", "m", "mm"
]
private static let supportedTextMimeTypes: Set<String> = [
"application/json",
"application/ld+json",
"application/sql",
"application/toml",
"application/x-httpd-php",
"application/x-javascript",
"application/x-sh",
"application/xml",
"application/yaml",
"application/x-yaml",
"image/svg+xml"
]
static func attachmentSummary(_ attachments: [ChatAttachment]) -> String {
guard !attachments.isEmpty else { return "" }
let names = attachments.map(\.filename).joined(separator: ", ")
return attachments.count == 1 ? names : "Attached: \(names)"
}
static func metadataValue(for attachments: [ChatAttachment]) -> JSONValue? {
guard !attachments.isEmpty else { return nil }
return .object([
"attachments": .array(attachments.map(\.jsonValue))
])
}
static func buildAttachments(from urls: [URL]) throws -> [ChatAttachment] {
try urls.map { try buildAttachment(fromFileURL: $0) }
}
static func buildImageAttachment(image: UIImage, filename: String = "pasted-image.jpg") throws -> ChatAttachment {
if let pngData = image.pngData(), pngData.count <= maxImageBytes {
return try buildImageAttachment(data: pngData, filename: filename, contentType: .png)
}
guard let jpegData = image.jpegData(compressionQuality: 0.92) else {
throw ChatAttachmentError.unsupportedImageFormat(filename)
}
return try buildImageAttachment(data: jpegData, filename: filename, contentType: .jpeg)
}
static func buildTextAttachment(text: String, filename: String = "pasted-text.txt", mimeType: String = "text/plain") throws -> ChatAttachment {
let data = Data(text.utf8)
return try buildTextAttachment(data: data, filename: filename, mimeType: mimeType)
}
@MainActor
static func buildAttachments(from itemProviders: [NSItemProvider]) async throws -> [ChatAttachment] {
var attachments: [ChatAttachment] = []
for provider in itemProviders {
if let fileURL = try await loadFileURL(from: provider) {
attachments.append(try buildAttachment(fromFileURL: fileURL))
continue
}
if provider.hasItemConformingToTypeIdentifier(UTType.image.identifier) {
if let attachment = try await loadImageAttachment(from: provider) {
attachments.append(attachment)
}
}
}
return attachments
}
static func previewText(for attachment: ChatAttachment) -> String {
let normalized = (attachment.text ?? "")
.replacingOccurrences(of: "\r", with: "")
.trimmingCharacters(in: .whitespacesAndNewlines)
if normalized.isEmpty {
return "(empty file)"
}
if normalized.count <= 280 {
return normalized
}
let endIndex = normalized.index(normalized.startIndex, offsetBy: 280)
return normalized[..<endIndex].trimmingCharacters(in: .whitespacesAndNewlines) + "..."
}
static func image(for attachment: ChatAttachment) -> UIImage? {
guard attachment.kind == .image,
let dataURL = attachment.dataUrl,
let data = decodeDataURL(dataURL)
else {
return nil
}
return UIImage(data: data)
}
private static func buildAttachment(fromFileURL url: URL) throws -> ChatAttachment {
let accessed = url.startAccessingSecurityScopedResource()
defer {
if accessed {
url.stopAccessingSecurityScopedResource()
}
}
let filename = url.lastPathComponent.isEmpty ? "attachment" : url.lastPathComponent
let resourceValues = try? url.resourceValues(forKeys: [.contentTypeKey])
let contentType = resourceValues?.contentType ?? UTType(filenameExtension: url.pathExtension)
let data: Data
do {
data = try Data(contentsOf: url)
} catch {
throw ChatAttachmentError.unreadableFile(filename)
}
if contentType?.conforms(to: .image) == true {
return try buildImageAttachment(data: data, filename: filename, contentType: contentType)
}
if isTextLike(contentType: contentType, mimeType: contentType?.preferredMIMEType, filename: filename) {
return try buildTextAttachment(data: data, filename: filename, mimeType: contentType?.preferredMIMEType ?? "text/plain")
}
throw ChatAttachmentError.unsupportedType(filename)
}
static func buildImageAttachment(data: Data, filename: String, contentType: UTType?) throws -> ChatAttachment {
var mimeType = contentType?.preferredMIMEType
var payload = data
if mimeType != "image/png" && mimeType != "image/jpeg" {
guard let image = UIImage(data: data) else {
throw ChatAttachmentError.unsupportedImageFormat(filename)
}
if let pngData = image.pngData(), pngData.count <= maxImageBytes {
payload = pngData
mimeType = "image/png"
} else if let jpegData = image.jpegData(compressionQuality: 0.92) {
payload = jpegData
mimeType = "image/jpeg"
} else {
throw ChatAttachmentError.unsupportedImageFormat(filename)
}
}
if payload.count > maxImageBytes {
throw ChatAttachmentError.imageTooLarge(filename)
}
let normalizedMimeType = (mimeType == "image/png") ? "image/png" : "image/jpeg"
let dataUrl = "data:\(normalizedMimeType);base64,\(payload.base64EncodedString())"
return .image(
filename: filename,
mimeType: normalizedMimeType,
sizeBytes: payload.count,
dataUrl: dataUrl
)
}
private static func buildTextAttachment(data: Data, filename: String, mimeType: String) throws -> ChatAttachment {
if data.count > maxTextBytes {
throw ChatAttachmentError.textTooLarge(filename)
}
let normalized = String(decoding: data, as: UTF8.self)
.replacingOccurrences(of: "\r\n", with: "\n")
.replacingOccurrences(of: "\u{0000}", with: "")
let truncated = normalized.count > maxTextCharacters
let trimmedText: String
if truncated {
let endIndex = normalized.index(normalized.startIndex, offsetBy: maxTextCharacters)
trimmedText = String(normalized[..<endIndex])
} else {
trimmedText = normalized
}
return .text(
filename: filename,
mimeType: mimeType,
sizeBytes: data.count,
text: trimmedText,
truncated: truncated
)
}
private static func isTextLike(contentType: UTType?, mimeType: String?, filename: String) -> Bool {
if let contentType {
if contentType.conforms(to: .text) || contentType.conforms(to: .plainText) || contentType.conforms(to: .sourceCode) {
return true
}
if contentType.conforms(to: .json) || contentType.conforms(to: .xml) {
return true
}
}
if let mimeType {
if mimeType.hasPrefix("text/") {
return true
}
if supportedTextMimeTypes.contains(mimeType.lowercased()) {
return true
}
}
let ext = URL(fileURLWithPath: filename).pathExtension.lowercased()
return supportedTextExtensions.contains(ext)
}
private static func decodeDataURL(_ value: String) -> Data? {
guard let separator = value.firstIndex(of: ",") else {
return nil
}
let encoded = value[value.index(after: separator)...]
return Data(base64Encoded: String(encoded))
}
@MainActor
private static func loadFileURL(from provider: NSItemProvider) async throws -> URL? {
guard provider.hasItemConformingToTypeIdentifier(UTType.fileURL.identifier) else {
return nil
}
return try await withCheckedThrowingContinuation { continuation in
provider.loadItem(forTypeIdentifier: UTType.fileURL.identifier, options: nil) { item, error in
if let error {
continuation.resume(throwing: error)
return
}
if let url = item as? URL {
continuation.resume(returning: url)
return
}
if let data = item as? Data,
let url = URL(dataRepresentation: data, relativeTo: nil) {
continuation.resume(returning: url)
return
}
if let string = item as? String,
let url = URL(string: string) {
continuation.resume(returning: url)
return
}
continuation.resume(returning: nil)
}
}
}
@MainActor
private static func loadImageAttachment(from provider: NSItemProvider) async throws -> ChatAttachment? {
let preferredImageType: UTType = if provider.hasItemConformingToTypeIdentifier(UTType.png.identifier) {
.png
} else if provider.hasItemConformingToTypeIdentifier(UTType.jpeg.identifier) {
.jpeg
} else {
.image
}
if let data = try await loadDataRepresentation(from: provider, type: preferredImageType) {
let filenameExtension = preferredImageType.preferredFilenameExtension ?? "jpg"
let filename = "pasted-image.\(filenameExtension)"
return try buildImageAttachment(data: data, filename: filename, contentType: preferredImageType)
}
if let image = try await loadUIImage(from: provider),
let jpegData = image.jpegData(compressionQuality: 0.92) {
return try buildImageAttachment(data: jpegData, filename: "pasted-image.jpg", contentType: .jpeg)
}
return nil
}
@MainActor
private static func loadDataRepresentation(from provider: NSItemProvider, type: UTType) async throws -> Data? {
guard provider.hasItemConformingToTypeIdentifier(type.identifier) else {
return nil
}
return try await withCheckedThrowingContinuation { continuation in
provider.loadDataRepresentation(forTypeIdentifier: type.identifier) { data, error in
if let error {
continuation.resume(throwing: error)
return
}
continuation.resume(returning: data)
}
}
}
@MainActor
private static func loadUIImage(from provider: NSItemProvider) async throws -> UIImage? {
guard provider.canLoadObject(ofClass: UIImage.self) else {
return nil
}
return try await withCheckedThrowingContinuation { continuation in
provider.loadObject(ofClass: UIImage.self) { object, error in
if let error {
continuation.resume(throwing: error)
return
}
continuation.resume(returning: object as? UIImage)
}
}
}
}

View File

@@ -5,6 +5,7 @@ struct SybilChatTranscriptView: View {
var messages: [Message] var messages: [Message]
var isLoading: Bool var isLoading: Bool
var isSending: Bool var isSending: Bool
@State private var hasHandledInitialTranscriptScroll = false
private var hasPendingAssistant: Bool { private var hasPendingAssistant: Bool {
messages.contains { message in messages.contains { message in
@@ -52,20 +53,27 @@ struct SybilChatTranscriptView: View {
.frame(maxWidth: .infinity, alignment: .leading) .frame(maxWidth: .infinity, alignment: .leading)
.scrollDismissesKeyboard(.interactively) .scrollDismissesKeyboard(.interactively)
.onAppear { .onAppear {
proxy.scrollTo("chat-bottom-anchor", anchor: .bottom) scrollToBottom(with: proxy, animated: false)
} }
.onChange(of: messages.map(\.id)) { _, _ in .onChange(of: messages.map(\.id)) { _, _ in
withAnimation(.easeOut(duration: 0.22)) { scrollToBottom(with: proxy, animated: hasHandledInitialTranscriptScroll && !isLoading)
proxy.scrollTo("chat-bottom-anchor", anchor: .bottom) hasHandledInitialTranscriptScroll = true
}
} }
.onChange(of: isSending) { _, _ in .onChange(of: isSending) { _, _ in
withAnimation(.easeOut(duration: 0.22)) { scrollToBottom(with: proxy, animated: hasHandledInitialTranscriptScroll)
proxy.scrollTo("chat-bottom-anchor", anchor: .bottom)
}
} }
} }
} }
private func scrollToBottom(with proxy: ScrollViewProxy, animated: Bool) {
if animated {
withAnimation(.easeOut(duration: 0.22)) {
proxy.scrollTo("chat-bottom-anchor", anchor: .bottom)
}
} else {
proxy.scrollTo("chat-bottom-anchor", anchor: .bottom)
}
}
} }
private struct MessageBubble: View { private struct MessageBubble: View {
@@ -98,6 +106,13 @@ private struct MessageBubble: View {
) )
} else { } else {
VStack(alignment: .leading, spacing: 8) { VStack(alignment: .leading, spacing: 8) {
if !message.attachments.isEmpty {
SybilAttachmentListView(
attachments: message.attachments,
tone: isUser ? .user : .assistant
)
}
if isPendingAssistant { if isPendingAssistant {
HStack(spacing: 8) { HStack(spacing: 8) {
ProgressView() ProgressView()
@@ -108,7 +123,7 @@ private struct MessageBubble: View {
.foregroundStyle(SybilTheme.textMuted) .foregroundStyle(SybilTheme.textMuted)
} }
.padding(.vertical, 2) .padding(.vertical, 2)
} else { } else if !message.content.trimmingCharacters(in: .whitespacesAndNewlines).isEmpty {
Markdown(message.content) Markdown(message.content)
.tint(SybilTheme.primary) .tint(SybilTheme.primary)
.foregroundStyle(isUser ? SybilTheme.text : SybilTheme.text.opacity(0.95)) .foregroundStyle(isUser ? SybilTheme.text : SybilTheme.text.opacity(0.95))

View File

@@ -21,6 +21,132 @@ public enum MessageRole: String, Codable, Hashable, Sendable {
case tool case tool
} }
public struct ChatAttachment: Codable, Hashable, Identifiable, Sendable {
public enum Kind: String, Codable, Hashable, Sendable {
case image
case text
}
public var id: String
public var kind: Kind
public var filename: String
public var mimeType: String
public var sizeBytes: Int
public var dataUrl: String?
public var text: String?
public var truncated: Bool?
public init(
id: String,
kind: Kind,
filename: String,
mimeType: String,
sizeBytes: Int,
dataUrl: String? = nil,
text: String? = nil,
truncated: Bool? = nil
) {
self.id = id
self.kind = kind
self.filename = filename
self.mimeType = mimeType
self.sizeBytes = sizeBytes
self.dataUrl = dataUrl
self.text = text
self.truncated = truncated
}
public static func image(
id: String = UUID().uuidString,
filename: String,
mimeType: String,
sizeBytes: Int,
dataUrl: String
) -> ChatAttachment {
ChatAttachment(
id: id,
kind: .image,
filename: filename,
mimeType: mimeType,
sizeBytes: sizeBytes,
dataUrl: dataUrl
)
}
public static func text(
id: String = UUID().uuidString,
filename: String,
mimeType: String,
sizeBytes: Int,
text: String,
truncated: Bool
) -> ChatAttachment {
ChatAttachment(
id: id,
kind: .text,
filename: filename,
mimeType: mimeType,
sizeBytes: sizeBytes,
text: text,
truncated: truncated
)
}
var jsonValue: JSONValue {
var object: [String: JSONValue] = [
"kind": .string(kind.rawValue),
"id": .string(id),
"filename": .string(filename),
"mimeType": .string(mimeType),
"sizeBytes": .number(Double(sizeBytes))
]
if let dataUrl {
object["dataUrl"] = .string(dataUrl)
}
if let text {
object["text"] = .string(text)
}
if let truncated {
object["truncated"] = .bool(truncated)
}
return .object(object)
}
static func attachments(from metadata: JSONValue?) -> [ChatAttachment] {
guard let metadataObject = metadata?.objectValue,
let values = metadataObject["attachments"]?.arrayValue
else {
return []
}
return values.compactMap { value in
guard let object = value.objectValue,
let kindRaw = object["kind"]?.stringValue,
let kind = Kind(rawValue: kindRaw),
let id = object["id"]?.stringValue,
let filename = object["filename"]?.stringValue,
let mimeType = object["mimeType"]?.stringValue,
let sizeNumber = object["sizeBytes"]?.numberValue
else {
return nil
}
return ChatAttachment(
id: id,
kind: kind,
filename: filename,
mimeType: mimeType,
sizeBytes: Int(sizeNumber),
dataUrl: object["dataUrl"]?.stringValue,
text: object["text"]?.stringValue,
truncated: object["truncated"]?.boolValue
)
}
}
}
public struct ChatSummary: Codable, Identifiable, Hashable, Sendable { public struct ChatSummary: Codable, Identifiable, Hashable, Sendable {
public var id: String public var id: String
public var title: String? public var title: String?
@@ -48,6 +174,10 @@ public struct Message: Codable, Identifiable, Hashable, Sendable {
public var name: String? public var name: String?
public var metadata: JSONValue? = nil public var metadata: JSONValue? = nil
public var attachments: [ChatAttachment] {
ChatAttachment.attachments(from: metadata)
}
public var toolCallMetadata: ToolCallMetadata? { public var toolCallMetadata: ToolCallMetadata? {
guard role == .tool, guard role == .tool,
let object = metadata?.objectValue, let object = metadata?.objectValue,
@@ -155,6 +285,20 @@ public enum JSONValue: Codable, Hashable, Sendable {
} }
return nil return nil
} }
public var arrayValue: [JSONValue]? {
if case let .array(value) = self {
return value
}
return nil
}
public var boolValue: Bool? {
if case let .bool(value) = self {
return value
}
return nil
}
} }
public struct ChatDetail: Codable, Identifiable, Hashable, Sendable { public struct ChatDetail: Codable, Identifiable, Hashable, Sendable {
@@ -239,11 +383,13 @@ public struct CompletionRequestMessage: Codable, Sendable {
public var role: MessageRole public var role: MessageRole
public var content: String public var content: String
public var name: String? public var name: String?
public var attachments: [ChatAttachment]?
public init(role: MessageRole, content: String, name: String? = nil) { public init(role: MessageRole, content: String, name: String? = nil, attachments: [ChatAttachment]? = nil) {
self.role = role self.role = role
self.content = content self.content = content
self.name = name self.name = name
self.attachments = attachments
} }
} }

View File

@@ -23,6 +23,9 @@ enum PhoneRoute: Hashable {
struct SybilPhoneShellView: View { struct SybilPhoneShellView: View {
@Bindable var viewModel: SybilViewModel @Bindable var viewModel: SybilViewModel
@State private var path: [PhoneRoute] = [] @State private var path: [PhoneRoute] = []
@Environment(\.scenePhase) private var scenePhase
@State private var shouldRefreshOnForeground = false
@State private var composerFocusRequest = 0
var body: some View { var body: some View {
NavigationStack(path: $path) { NavigationStack(path: $path) {
@@ -35,10 +38,36 @@ struct SybilPhoneShellView: View {
} }
} }
.navigationDestination(for: PhoneRoute.self) { route in .navigationDestination(for: PhoneRoute.self) { route in
SybilPhoneDestinationView(viewModel: viewModel, route: route) SybilPhoneDestinationView(
viewModel: viewModel,
path: $path,
composerFocusRequest: $composerFocusRequest,
route: route
)
} }
} }
.tint(SybilTheme.primary) .tint(SybilTheme.primary)
.onChange(of: scenePhase) { _, nextPhase in
switch nextPhase {
case .background:
shouldRefreshOnForeground = true
case .active:
guard shouldRefreshOnForeground else {
return
}
shouldRefreshOnForeground = false
Task {
await viewModel.refreshVisibleContent(
refreshCollections: path.isEmpty,
refreshSelection: !path.isEmpty && viewModel.hasRefreshableSelection
)
}
case .inactive:
break
@unknown default:
break
}
}
} }
} }
@@ -225,15 +254,25 @@ private struct SybilPhoneSidebarRow: View {
private struct SybilPhoneDestinationView: View { private struct SybilPhoneDestinationView: View {
@Bindable var viewModel: SybilViewModel @Bindable var viewModel: SybilViewModel
@Binding var path: [PhoneRoute]
@Binding var composerFocusRequest: Int
let route: PhoneRoute let route: PhoneRoute
var body: some View { var body: some View {
SybilWorkspaceView(viewModel: viewModel) SybilWorkspaceView(viewModel: viewModel, composerFocusRequest: composerFocusRequest) {
.frame(maxWidth: .infinity, maxHeight: .infinity, alignment: .topLeading) viewModel.startNewChat()
.navigationBarTitleDisplayMode(.inline) composerFocusRequest += 1
.task(id: route) { if path.isEmpty {
applyRoute() path = [.draftChat]
} else {
path[path.index(before: path.endIndex)] = .draftChat
} }
}
.frame(maxWidth: .infinity, maxHeight: .infinity, alignment: .topLeading)
.navigationBarTitleDisplayMode(.inline)
.task(id: route) {
applyRoute()
}
} }
private func applyRoute() { private func applyRoute() {

View File

@@ -5,23 +5,60 @@ struct SybilSearchResultsView: View {
var search: SearchDetail? var search: SearchDetail?
var isLoading: Bool var isLoading: Bool
var isRunning: Bool var isRunning: Bool
var isStartingChat: Bool = false
var onStartChat: (() -> Void)? = nil
var body: some View { var body: some View {
ScrollView { ScrollView {
VStack(alignment: .leading, spacing: 16) { VStack(alignment: .leading, spacing: 16) {
if let query = search?.query, !query.isEmpty { if let query = search?.query, !query.isEmpty {
VStack(alignment: .leading, spacing: 4) { VStack(alignment: .leading, spacing: 12) {
Text("Results for") VStack(alignment: .leading, spacing: 4) {
.font(.sybil(.footnote)) Text("Results for")
.foregroundStyle(SybilTheme.textMuted) .font(.sybil(.footnote))
Text(query) .foregroundStyle(SybilTheme.textMuted)
.font(.sybil(.title3, weight: .semibold)) Text(query)
.foregroundStyle(SybilTheme.text) .font(.sybil(.title3, weight: .semibold))
.fixedSize(horizontal: false, vertical: true) .foregroundStyle(SybilTheme.text)
.fixedSize(horizontal: false, vertical: true)
Text(resultCountLabel) Text(resultCountLabel)
.font(.sybil(.caption)) .font(.sybil(.caption))
.foregroundStyle(SybilTheme.textMuted) .foregroundStyle(SybilTheme.textMuted)
}
if let onStartChat {
Button {
onStartChat()
} label: {
HStack(spacing: 8) {
if isStartingChat {
ProgressView()
.controlSize(.small)
.tint(SybilTheme.text)
} else {
Image(systemName: "bubble.left.and.text.bubble.right")
.font(.system(size: 14, weight: .semibold))
}
Text(isStartingChat ? "Starting chat..." : "Chat with results")
.font(.sybil(.caption, weight: .semibold))
}
.foregroundStyle(SybilTheme.text)
.padding(.horizontal, 12)
.padding(.vertical, 9)
.background(
RoundedRectangle(cornerRadius: 10)
.fill(SybilTheme.primary.opacity(0.14))
.overlay(
RoundedRectangle(cornerRadius: 10)
.stroke(SybilTheme.primary.opacity(0.30), lineWidth: 1)
)
)
}
.buttonStyle(.plain)
.disabled(!canStartChat)
.opacity(canStartChat ? 1 : 0.55)
}
} }
} }
@@ -76,6 +113,13 @@ struct SybilSearchResultsView: View {
return "\(count) result\(count == 1 ? "" : "s")" return "\(count) result\(count == 1 ? "" : "s")"
} }
private var canStartChat: Bool {
guard let search, !isLoading, !isRunning, !isStartingChat else {
return false
}
return search.answerText?.isEmpty == false || !search.results.isEmpty
}
@ViewBuilder @ViewBuilder
private var answerCard: some View { private var answerCard: some View {
VStack(alignment: .leading, spacing: 10) { VStack(alignment: .leading, spacing: 10) {

View File

@@ -72,11 +72,6 @@ final class SybilSettingsStore {
return nil return nil
} }
let path = components.path.trimmingCharacters(in: CharacterSet(charactersIn: "/"))
if path.lowercased() == "api" {
components.path = ""
}
return components.url return components.url
} }
} }

View File

@@ -87,16 +87,24 @@ final class SybilViewModel {
var isLoadingCollections = false var isLoadingCollections = false
var isLoadingSelection = false var isLoadingSelection = false
var isSending = false var isSending = false
var isCreatingSearchChat = false
var errorMessage: String? var errorMessage: String?
var composer = "" var composer = ""
var composerAttachments: [ChatAttachment] = []
var provider: Provider var provider: Provider
var modelCatalog: [Provider: ProviderModelInfo] = [:] var modelCatalog: [Provider: ProviderModelInfo] = [:]
var model: String var model: String
@ObservationIgnored
private var hasBootstrapped = false private var hasBootstrapped = false
private var pendingChatState: PendingChatState? private var pendingChatState: PendingChatState?
@ObservationIgnored
private var selectionTask: Task<Void, Never>? private var selectionTask: Task<Void, Never>?
@ObservationIgnored
private var chatBackgroundTask: SybilBackgroundTaskAssertion?
@ObservationIgnored
private let clientFactory: (APIConfiguration) -> any SybilAPIClienting
private let fallbackModels: [Provider: [String]] = [ private let fallbackModels: [Provider: [String]] = [
.openai: ["gpt-4.1-mini"], .openai: ["gpt-4.1-mini"],
@@ -104,8 +112,14 @@ final class SybilViewModel {
.xai: ["grok-3-mini"] .xai: ["grok-3-mini"]
] ]
init(settings: SybilSettingsStore = SybilSettingsStore()) { init(
settings: SybilSettingsStore = SybilSettingsStore(),
clientFactory: @escaping (APIConfiguration) -> any SybilAPIClienting = { configuration in
SybilAPIClient(configuration: configuration)
}
) {
self.settings = settings self.settings = settings
self.clientFactory = clientFactory
self.provider = settings.preferredProvider self.provider = settings.preferredProvider
self.model = settings.preferredModelByProvider[settings.preferredProvider] ?? "gpt-4.1-mini" self.model = settings.preferredModelByProvider[settings.preferredProvider] ?? "gpt-4.1-mini"
} }
@@ -201,21 +215,34 @@ final class SybilViewModel {
return draftKind != nil || selectedItem != nil return draftKind != nil || selectedItem != nil
} }
var canSendComposer: Bool {
if isSending {
return false
}
let content = composer.trimmingCharacters(in: .whitespacesAndNewlines)
if isSearchMode {
return !content.isEmpty
}
return !content.isEmpty || !composerAttachments.isEmpty
}
var displayedMessages: [Message] { var displayedMessages: [Message] {
let canonical = selectedChat?.messages ?? [] let canonical = displayableMessages(selectedChat?.messages ?? [])
guard let pending = pendingChatState else { guard let pending = pendingChatState else {
return canonical return canonical
} }
if let pendingID = pending.chatID { if let pendingID = pending.chatID {
if case let .chat(selectedID) = selectedItem, selectedID == pendingID { if case let .chat(selectedID) = selectedItem, selectedID == pendingID {
return pending.messages return displayableMessages(pending.messages)
} }
return canonical return canonical
} }
if draftKind == .chat { if draftKind == .chat {
return pending.messages return displayableMessages(pending.messages)
} }
return canonical return canonical
@@ -266,6 +293,19 @@ final class SybilViewModel {
return searches.first(where: { $0.id == searchID }) return searches.first(where: { $0.id == searchID })
} }
var hasRefreshableSelection: Bool {
guard draftKind == nil, let selectedItem else {
return false
}
switch selectedItem {
case .chat, .search:
return true
case .settings:
return false
}
}
func bootstrap() async { func bootstrap() async {
guard !hasBootstrapped else { guard !hasBootstrapped else {
return return
@@ -281,6 +321,7 @@ final class SybilViewModel {
authError = nil authError = nil
errorMessage = nil errorMessage = nil
pendingChatState = nil pendingChatState = nil
composerAttachments = []
settings.persist() settings.persist()
SybilLog.info( SybilLog.info(
@@ -357,6 +398,7 @@ final class SybilViewModel {
selectedSearch = nil selectedSearch = nil
errorMessage = nil errorMessage = nil
composer = "" composer = ""
composerAttachments = []
} }
func startNewSearch() { func startNewSearch() {
@@ -367,6 +409,7 @@ final class SybilViewModel {
selectedSearch = nil selectedSearch = nil
errorMessage = nil errorMessage = nil
composer = "" composer = ""
composerAttachments = []
} }
func openSettings() { func openSettings() {
@@ -376,6 +419,7 @@ final class SybilViewModel {
selectedChat = nil selectedChat = nil
selectedSearch = nil selectedSearch = nil
errorMessage = nil errorMessage = nil
composerAttachments = []
} }
func select(_ selection: SidebarSelection) { func select(_ selection: SidebarSelection) {
@@ -383,6 +427,9 @@ final class SybilViewModel {
draftKind = nil draftKind = nil
selectedItem = selection selectedItem = selection
errorMessage = nil errorMessage = nil
if case .search = selection {
composerAttachments = []
}
if case .settings = selection { if case .settings = selection {
selectedChat = nil selectedChat = nil
@@ -427,13 +474,46 @@ final class SybilViewModel {
await reconnect() await reconnect()
} }
func refreshVisibleContent(refreshCollections shouldRefreshCollections: Bool, refreshSelection shouldRefreshSelection: Bool) async {
guard isAuthenticated, !isCheckingSession else {
return
}
guard shouldRefreshCollections || shouldRefreshSelection else {
return
}
SybilLog.info(
SybilLog.ui,
"Foreground refresh requested (collections=\(shouldRefreshCollections), selection=\(shouldRefreshSelection))"
)
if shouldRefreshCollections {
await refreshCollections(preferredSelection: selectedItem, refreshSelection: shouldRefreshSelection)
return
}
if shouldRefreshSelection {
await refreshSelectionIfNeeded()
}
}
func sendComposer() async { func sendComposer() async {
let content = composer.trimmingCharacters(in: .whitespacesAndNewlines) let content = composer.trimmingCharacters(in: .whitespacesAndNewlines)
guard !content.isEmpty, !isSending else { let attachments = composerAttachments
guard !isSending else {
return
}
if isSearchMode {
guard !content.isEmpty else { return }
} else if content.isEmpty && attachments.isEmpty {
return return
} }
composer = "" composer = ""
composerAttachments = []
errorMessage = nil errorMessage = nil
isSending = true isSending = true
@@ -443,7 +523,7 @@ final class SybilViewModel {
try await sendSearch(query: content) try await sendSearch(query: content)
} else { } else {
SybilLog.info(SybilLog.ui, "Sending chat prompt") SybilLog.info(SybilLog.ui, "Sending chat prompt")
try await sendChat(content: content) try await sendChat(content: content, attachments: attachments)
} }
} catch { } catch {
errorMessage = normalizeAPIError(error) errorMessage = normalizeAPIError(error)
@@ -467,13 +547,70 @@ final class SybilViewModel {
} }
} }
pendingChatState = nil if !isSearchMode {
composer = content
composerAttachments = attachments
pendingChatState = nil
}
} }
isSending = false isSending = false
} }
private func loadInitialData(using client: SybilAPIClient) async { func appendComposerAttachments(_ attachments: [ChatAttachment]) throws {
guard !attachments.isEmpty else {
return
}
guard !isSearchMode else {
errorMessage = "Attachments are only available in chat mode."
return
}
if composerAttachments.count + attachments.count > SybilChatAttachmentSupport.maxAttachmentsPerMessage {
throw ChatAttachmentError.tooManyAttachments(SybilChatAttachmentSupport.maxAttachmentsPerMessage)
}
composerAttachments += attachments
errorMessage = nil
}
func removeComposerAttachment(id: String) {
composerAttachments.removeAll { $0.id == id }
}
func startChatFromSelectedSearch() async {
guard let search = selectedSearch, !isCreatingSearchChat, !isSending else {
return
}
isCreatingSearchChat = true
errorMessage = nil
do {
let client = try client()
let chat = try await client.createChatFromSearch(searchID: search.id, title: nil)
draftKind = nil
pendingChatState = nil
composer = ""
composerAttachments = []
chats.removeAll(where: { $0.id == chat.id })
chats.insert(chat, at: 0)
selectedItem = .chat(chat.id)
selectedSearch = nil
await refreshCollections(preferredSelection: .chat(chat.id))
} catch {
errorMessage = normalizeAPIError(error)
SybilLog.error(SybilLog.ui, "Create chat from search failed", error: error)
}
isCreatingSearchChat = false
}
private func loadInitialData(using client: any SybilAPIClienting) async {
isLoadingCollections = true isLoadingCollections = true
errorMessage = nil errorMessage = nil
@@ -545,7 +682,10 @@ final class SybilViewModel {
settings.persist() settings.persist()
} }
private func refreshCollections(preferredSelection: SidebarSelection?) async { private func refreshCollections(
preferredSelection: SidebarSelection?,
refreshSelection: Bool = true
) async {
isLoadingCollections = true isLoadingCollections = true
do { do {
@@ -577,7 +717,7 @@ final class SybilViewModel {
selectedItem = sidebarItems.first?.selection selectedItem = sidebarItems.first?.selection
} }
if selectedItem != nil { if refreshSelection, selectedItem != nil {
await refreshSelectionIfNeeded() await refreshSelectionIfNeeded()
} }
} catch { } catch {
@@ -637,13 +777,14 @@ final class SybilViewModel {
selectedSearch = nil selectedSearch = nil
} }
private func sendChat(content: String) async throws { private func sendChat(content: String, attachments: [ChatAttachment]) async throws {
let optimisticUser = Message( let optimisticUser = Message(
id: "temp-user-\(UUID().uuidString)", id: "temp-user-\(UUID().uuidString)",
createdAt: Date(), createdAt: Date(),
role: .user, role: .user,
content: content, content: content,
name: nil name: nil,
metadata: SybilChatAttachmentSupport.metadataValue(for: attachments)
) )
let optimisticAssistant = Message( let optimisticAssistant = Message(
@@ -663,7 +804,7 @@ final class SybilViewModel {
var chatID = currentChatID var chatID = currentChatID
if chatID == nil { if chatID == nil {
let created = try await client.createChat() let created = try await client.createChat(title: nil)
chatID = created.id chatID = created.id
draftKind = nil draftKind = nil
selectedItem = .chat(created.id) selectedItem = .chat(created.id)
@@ -709,8 +850,8 @@ final class SybilViewModel {
baseChat.messages baseChat.messages
.filter { !$0.isToolCallLog } .filter { !$0.isToolCallLog }
.map { .map {
CompletionRequestMessage(role: $0.role, content: $0.content, name: $0.name) CompletionRequestMessage(role: $0.role, content: $0.content, name: $0.name, attachments: $0.attachments.isEmpty ? nil : $0.attachments)
} + [CompletionRequestMessage(role: .user, content: content)] } + [CompletionRequestMessage(role: .user, content: content, attachments: attachments.isEmpty ? nil : attachments)]
let streamStatus = CompletionStreamStatus() let streamStatus = CompletionStreamStatus()
@@ -718,7 +859,8 @@ final class SybilViewModel {
Task { [weak self] in Task { [weak self] in
guard let self else { return } guard let self else { return }
do { do {
let updated = try await client.suggestChatTitle(chatID: chatID, content: content) let titleSeed = !content.isEmpty ? content : SybilChatAttachmentSupport.attachmentSummary(attachments)
let updated = try await client.suggestChatTitle(chatID: chatID, content: titleSeed.isEmpty ? "Uploaded files" : titleSeed)
await MainActor.run { await MainActor.run {
self.chats = self.chats.map { existing in self.chats = self.chats.map { existing in
if existing.id == updated.id { if existing.id == updated.id {
@@ -738,6 +880,15 @@ final class SybilViewModel {
} }
} }
chatBackgroundTask?.end()
chatBackgroundTask = SybilBackgroundTaskAssertion(name: "Sybil Chat Response") {
SybilLog.warning(SybilLog.app, "Chat response background time expired")
}
defer {
chatBackgroundTask?.end()
chatBackgroundTask = nil
}
try await client.runCompletionStream( try await client.runCompletionStream(
body: CompletionStreamRequest( body: CompletionStreamRequest(
chatId: chatID, chatId: chatID,
@@ -974,6 +1125,10 @@ final class SybilViewModel {
} }
} }
private func displayableMessages(_ messages: [Message]) -> [Message] {
messages.filter { $0.role != .system }
}
private func chatTitle(title: String?, messages: [Message]?) -> String { private func chatTitle(title: String?, messages: [Message]?) -> String {
if let title = title?.trimmingCharacters(in: .whitespacesAndNewlines), !title.isEmpty { if let title = title?.trimmingCharacters(in: .whitespacesAndNewlines), !title.isEmpty {
return title return title
@@ -984,6 +1139,13 @@ final class SybilViewModel {
return String(firstUserMessage.prefix(48)) return String(firstUserMessage.prefix(48))
} }
if let firstUserMessage = messages?.first(where: { $0.role == .user }) {
let attachmentSummary = SybilChatAttachmentSupport.attachmentSummary(firstUserMessage.attachments)
if !attachmentSummary.isEmpty {
return String(attachmentSummary.prefix(48))
}
}
return "New chat" return "New chat"
} }
@@ -1070,7 +1232,7 @@ final class SybilViewModel {
return false return false
} }
private func client() throws -> SybilAPIClient { private func client() throws -> any SybilAPIClienting {
guard let baseURL = settings.normalizedAPIBaseURL else { guard let baseURL = settings.normalizedAPIBaseURL else {
throw APIError.invalidBaseURL throw APIError.invalidBaseURL
} }
@@ -1080,8 +1242,8 @@ final class SybilViewModel {
"Creating API client for \(baseURL.absoluteString) (token: \(settings.trimmedTokenOrNil == nil ? "none" : "set"))" "Creating API client for \(baseURL.absoluteString) (token: \(settings.trimmedTokenOrNil == nil ? "none" : "set"))"
) )
return SybilAPIClient( return clientFactory(
configuration: APIConfiguration( APIConfiguration(
baseURL: baseURL, baseURL: baseURL,
authToken: settings.trimmedTokenOrNil authToken: settings.trimmedTokenOrNil
) )

View File

@@ -1,9 +1,27 @@
import Observation import Observation
import PhotosUI
import SwiftUI import SwiftUI
import UniformTypeIdentifiers
import UIKit
struct SybilWorkspaceView: View { struct SybilWorkspaceView: View {
@Bindable var viewModel: SybilViewModel @Bindable var viewModel: SybilViewModel
var composerFocusRequest: Int = 0
var onRequestNewChat: (() -> Void)? = nil
@FocusState private var composerFocused: Bool @FocusState private var composerFocused: Bool
@State private var isShowingAttachmentOptions = false
@State private var isShowingFileImporter = false
@State private var isShowingPhotoPicker = false
@State private var photoPickerItems: [PhotosPickerItem] = []
@State private var isComposerDropTargeted = false
@State private var newChatSwipeOffset: CGFloat = 0
@State private var newChatSwipeCompletionOffset: CGFloat = 0
@State private var newChatSwipeContainerWidth: CGFloat = NewChatSwipeMetrics.referenceWidth
@State private var newChatSwipeIsActive = false
@State private var newChatSwipeIsCompleting = false
@State private var newChatSwipeHasLatched = false
@State private var newChatSwipeDidTriggerHaptic = false
@State private var newChatSwipeFeedbackGenerator: UIImpactFeedbackGenerator?
private var isSettingsSelected: Bool { private var isSettingsSelected: Bool {
if case .settings = viewModel.selectedItem { if case .settings = viewModel.selectedItem {
@@ -16,7 +34,83 @@ struct SybilWorkspaceView: View {
viewModel.errorMessage != nil viewModel.errorMessage != nil
} }
private var transcriptScrollContextID: String {
if viewModel.draftKind == .chat {
return "draft-chat"
}
if case let .chat(chatID) = viewModel.selectedItem {
return "chat:\(chatID)"
}
return "chat:none"
}
private var canSwipeToCreateChat: Bool {
guard onRequestNewChat != nil else {
return false
}
guard !viewModel.isSending, viewModel.draftKind == nil else {
return false
}
guard case .chat = viewModel.selectedItem else {
return false
}
return true
}
private var canRecognizeNewChatSwipe: Bool {
canSwipeToCreateChat && !newChatSwipeIsCompleting
}
private var showsNewChatSwipeBackdrop: Bool {
canSwipeToCreateChat || newChatSwipeIsCompleting
}
var body: some View { var body: some View {
ZStack(alignment: .trailing) {
if showsNewChatSwipeBackdrop {
NewChatSwipeBackdrop(
progress: NewChatSwipeMetrics.progress(for: newChatSwipeOffset, width: newChatSwipeContainerWidth),
hasLatched: newChatSwipeHasLatched
)
.padding(.trailing, 18)
.padding(.vertical, 20)
.allowsHitTesting(false)
}
workspaceContent
.compositingGroup()
.offset(x: newChatSwipeOffset)
.blur(radius: NewChatSwipeMetrics.blurRadius(for: newChatSwipeOffset, width: newChatSwipeContainerWidth))
}
.offset(x: newChatSwipeCompletionOffset)
.background(SybilTheme.background)
.navigationTitle(viewModel.selectedTitle)
.navigationBarTitleDisplayMode(.inline)
.toolbarRole(.editor)
.toolbar {
if !isSettingsSelected {
ToolbarItem(placement: .topBarTrailing) {
if viewModel.isSearchMode {
searchModeChip
} else {
providerModelMenu
}
}
}
}
.frame(maxWidth: .infinity, maxHeight: .infinity, alignment: .topLeading)
.onChange(of: canSwipeToCreateChat) { _, isEnabled in
guard !isEnabled else {
return
}
resetNewChatSwipe(animated: false)
}
.task(id: composerFocusRequest) {
await focusComposerIfRequested()
}
}
private var workspaceContent: some View {
VStack(spacing: 0) { VStack(spacing: 0) {
if showsHeader { if showsHeader {
header header
@@ -32,17 +126,41 @@ struct SybilWorkspaceView: View {
SybilSearchResultsView( SybilSearchResultsView(
search: viewModel.selectedSearch, search: viewModel.selectedSearch,
isLoading: viewModel.isLoadingSelection, isLoading: viewModel.isLoadingSelection,
isRunning: viewModel.isSending isRunning: viewModel.isSending,
) isStartingChat: viewModel.isCreatingSearchChat
) {
Task {
await viewModel.startChatFromSelectedSearch()
}
}
} else { } else {
SybilChatTranscriptView( SybilChatTranscriptView(
messages: viewModel.displayedMessages, messages: viewModel.displayedMessages,
isLoading: viewModel.isLoadingSelection, isLoading: viewModel.isLoadingSelection,
isSending: viewModel.isSending isSending: viewModel.isSending
) )
.id(transcriptScrollContextID)
} }
} }
.frame(maxWidth: .infinity, maxHeight: .infinity) .frame(maxWidth: .infinity, maxHeight: .infinity)
.background {
NewChatSwipePanInstaller(
isEnabled: canRecognizeNewChatSwipe,
onBegan: { width in
beginNewChatSwipe(containerWidth: width)
},
onChanged: { translationX, width in
updateNewChatSwipe(with: translationX, containerWidth: width)
},
onEnded: { translationX, width, didFinish in
finishNewChatSwipe(
translationX: translationX,
containerWidth: width,
didFinish: didFinish
)
}
)
}
if viewModel.showsComposer { if viewModel.showsComposer {
Divider() Divider()
@@ -50,27 +168,114 @@ struct SybilWorkspaceView: View {
composerBar composerBar
} }
} }
.navigationTitle(viewModel.selectedTitle) }
.navigationBarTitleDisplayMode(.inline)
.toolbarRole(.editor) private func beginNewChatSwipe(containerWidth: CGFloat) {
.toolbar { let update = {
if !isSettingsSelected { newChatSwipeContainerWidth = max(containerWidth, 1)
ToolbarItem(placement: .topBarTrailing) { newChatSwipeIsActive = true
if viewModel.isSearchMode { newChatSwipeHasLatched = false
searchModeChip newChatSwipeDidTriggerHaptic = false
} else {
providerModelMenu
}
}
}
} }
.background(SybilTheme.background)
.frame(maxWidth: .infinity, maxHeight: .infinity, alignment: .topLeading) var transaction = Transaction()
.onChange(of: viewModel.isSending) { _, isSending in transaction.disablesAnimations = true
if !isSending, viewModel.showsComposer { withTransaction(transaction, update)
composerFocused = true
} if newChatSwipeFeedbackGenerator == nil {
newChatSwipeFeedbackGenerator = UIImpactFeedbackGenerator(style: .rigid)
} }
newChatSwipeFeedbackGenerator?.prepare()
}
private func updateNewChatSwipe(with rawTranslation: CGFloat, containerWidth: CGFloat) {
let nextOffset = NewChatSwipeMetrics.clampedOffset(for: rawTranslation, width: containerWidth)
let wasLatched = newChatSwipeHasLatched
let nextLatched = NewChatSwipeMetrics.isLatched(
offset: nextOffset,
width: containerWidth,
isCurrentlyLatched: newChatSwipeHasLatched
)
var transaction = Transaction()
transaction.disablesAnimations = true
withTransaction(transaction) {
newChatSwipeContainerWidth = max(containerWidth, 1)
newChatSwipeOffset = nextOffset
newChatSwipeHasLatched = nextLatched
}
if nextLatched && !wasLatched && !newChatSwipeDidTriggerHaptic {
newChatSwipeFeedbackGenerator?.impactOccurred(intensity: 0.95)
newChatSwipeDidTriggerHaptic = true
}
}
private func finishNewChatSwipe(translationX: CGFloat, containerWidth: CGFloat, didFinish: Bool) {
guard newChatSwipeIsActive else {
resetNewChatSwipe(animated: false)
return
}
updateNewChatSwipe(with: translationX, containerWidth: containerWidth)
if didFinish && newChatSwipeHasLatched {
Task {
await completeNewChatSwipe(containerWidth: containerWidth)
}
return
}
resetNewChatSwipe(animated: true)
}
@MainActor
private func completeNewChatSwipe(containerWidth: CGFloat) async {
newChatSwipeIsCompleting = true
withAnimation(.easeIn(duration: NewChatSwipeMetrics.completionAnimationDuration)) {
newChatSwipeCompletionOffset = -(containerWidth + NewChatSwipeMetrics.completionOvershoot)
}
try? await Task.sleep(for: .milliseconds(NewChatSwipeMetrics.completionAnimationDelayMs))
onRequestNewChat?()
resetNewChatSwipe(animated: false)
}
private func resetNewChatSwipe(animated: Bool) {
let reset = {
newChatSwipeOffset = 0
newChatSwipeCompletionOffset = 0
newChatSwipeIsActive = false
newChatSwipeIsCompleting = false
newChatSwipeHasLatched = false
newChatSwipeDidTriggerHaptic = false
}
if animated {
withAnimation(.spring(response: 0.28, dampingFraction: 0.82)) {
reset()
}
} else {
reset()
}
newChatSwipeFeedbackGenerator = nil
}
@MainActor
private func focusComposerIfRequested() async {
guard composerFocusRequest > 0 else {
return
}
await Task.yield()
try? await Task.sleep(for: .milliseconds(80))
guard viewModel.showsComposer, !viewModel.isSearchMode else {
return
}
composerFocused = true
} }
private var header: some View { private var header: some View {
@@ -148,54 +353,87 @@ struct SybilWorkspaceView: View {
} }
private var composerBar: some View { private var composerBar: some View {
HStack(alignment: .bottom, spacing: 10) { VStack(alignment: .leading, spacing: 10) {
TextField( if !viewModel.isSearchMode && !viewModel.composerAttachments.isEmpty {
viewModel.isSearchMode ? "Search the web" : "Message Sybil", SybilAttachmentListView(
text: $viewModel.composer, attachments: viewModel.composerAttachments,
axis: .vertical tone: .composer
) ) { attachmentID in
.focused($composerFocused) viewModel.removeComposerAttachment(id: attachmentID)
.textInputAutocapitalization(.sentences)
.autocorrectionDisabled(false)
.lineLimit(1 ... 6)
.submitLabel(.send)
.onSubmit {
Task {
await viewModel.sendComposer()
} }
} }
.padding(.horizontal, 12)
.padding(.vertical, 10)
.background(
RoundedRectangle(cornerRadius: 12)
.fill(SybilTheme.composerGradient)
.overlay(
RoundedRectangle(cornerRadius: 12)
.stroke(SybilTheme.primary.opacity(0.34), lineWidth: 1)
)
)
.foregroundStyle(SybilTheme.text)
Button { HStack(alignment: .bottom, spacing: 10) {
Task { if !viewModel.isSearchMode {
await viewModel.sendComposer() Button {
} isShowingAttachmentOptions = true
} label: { } label: {
Image(systemName: viewModel.isSearchMode ? "magnifyingglass" : "arrow.up") Image(systemName: "paperclip")
.font(.system(size: 17, weight: .semibold)) .font(.system(size: 17, weight: .semibold))
.frame(width: 40, height: 40) .frame(width: 40, height: 40)
.background( .background(
Circle() Circle()
.fill( .fill(SybilTheme.surface)
viewModel.composer.trimmingCharacters(in: .whitespacesAndNewlines).isEmpty || viewModel.isSending
? AnyShapeStyle(SybilTheme.surface)
: AnyShapeStyle(SybilTheme.primaryGradient)
) )
) .overlay(
.foregroundStyle(viewModel.composer.trimmingCharacters(in: .whitespacesAndNewlines).isEmpty || viewModel.isSending ? SybilTheme.textMuted : SybilTheme.text) Circle()
.stroke(SybilTheme.border.opacity(0.82), lineWidth: 1)
)
.foregroundStyle(viewModel.isSending ? SybilTheme.textMuted : SybilTheme.text)
}
.buttonStyle(.plain)
.disabled(viewModel.isSending)
.accessibilityLabel("Attach file")
}
TextField(
viewModel.isSearchMode ? "Search the web" : "Message Sybil",
text: $viewModel.composer,
axis: .vertical
)
.focused($composerFocused)
.textInputAutocapitalization(.sentences)
.autocorrectionDisabled(false)
.lineLimit(1 ... 6)
.submitLabel(.send)
.onSubmit {
Task {
await viewModel.sendComposer()
}
}
.padding(.horizontal, 12)
.padding(.vertical, 10)
.background(
RoundedRectangle(cornerRadius: 12)
.fill(SybilTheme.composerGradient)
.overlay(
RoundedRectangle(cornerRadius: 12)
.stroke(SybilTheme.primary.opacity(0.34), lineWidth: 1)
)
)
.foregroundStyle(SybilTheme.text)
Button {
Task {
await viewModel.sendComposer()
}
} label: {
Image(systemName: viewModel.isSearchMode ? "magnifyingglass" : "arrow.up")
.font(.system(size: 17, weight: .semibold))
.frame(width: 40, height: 40)
.background(
Circle()
.fill(
viewModel.canSendComposer
? AnyShapeStyle(SybilTheme.primaryGradient)
: AnyShapeStyle(SybilTheme.surface)
)
)
.foregroundStyle(viewModel.canSendComposer ? SybilTheme.text : SybilTheme.textMuted)
}
.buttonStyle(.plain)
.disabled(!viewModel.canSendComposer)
} }
.buttonStyle(.plain)
.disabled(viewModel.composer.trimmingCharacters(in: .whitespacesAndNewlines).isEmpty || viewModel.isSending)
} }
.padding(.horizontal, 14) .padding(.horizontal, 14)
.padding(.vertical, 12) .padding(.vertical, 12)
@@ -209,5 +447,493 @@ struct SybilWorkspaceView: View {
endPoint: .bottom endPoint: .bottom
) )
) )
.overlay {
if isComposerDropTargeted && !viewModel.isSearchMode {
RoundedRectangle(cornerRadius: 18)
.stroke(SybilTheme.accent.opacity(0.78), style: StrokeStyle(lineWidth: 1.5, dash: [7, 5]))
.padding(.horizontal, 14)
.padding(.vertical, 10)
}
}
.onDrop(of: [UTType.fileURL.identifier, UTType.image.identifier], isTargeted: $isComposerDropTargeted) { providers in
if viewModel.isSearchMode || viewModel.isSending {
return false
}
Task {
await importAttachmentsFromItemProviders(providers)
}
return true
}
.confirmationDialog("Add attachment", isPresented: $isShowingAttachmentOptions, titleVisibility: .visible) {
Button("Photo Library") {
isShowingPhotoPicker = true
}
Button("Files") {
isShowingFileImporter = true
}
Button("Paste from Clipboard") {
Task {
await pasteAttachmentsFromClipboard()
}
}
Button("Cancel", role: .cancel) {}
}
.photosPicker(
isPresented: $isShowingPhotoPicker,
selection: $photoPickerItems,
maxSelectionCount: max(1, SybilChatAttachmentSupport.maxAttachmentsPerMessage - viewModel.composerAttachments.count),
matching: .images
)
.fileImporter(
isPresented: $isShowingFileImporter,
allowedContentTypes: [.item],
allowsMultipleSelection: true
) { result in
Task {
do {
let urls = try result.get()
let attachments = try SybilChatAttachmentSupport.buildAttachments(from: urls)
try await MainActor.run {
try viewModel.appendComposerAttachments(attachments)
}
composerFocused = true
} catch {
await MainActor.run {
viewModel.errorMessage = error.localizedDescription
}
SybilLog.error(SybilLog.ui, "File import failed", error: error)
}
}
}
.onChange(of: photoPickerItems) { _, items in
guard !items.isEmpty else { return }
Task {
do {
let attachments = try await loadAttachmentsFromPhotoPickerItems(items)
try await MainActor.run {
try viewModel.appendComposerAttachments(attachments)
photoPickerItems = []
}
composerFocused = true
} catch {
await MainActor.run {
viewModel.errorMessage = error.localizedDescription
photoPickerItems = []
}
SybilLog.error(SybilLog.ui, "Photo import failed", error: error)
}
}
}
}
@MainActor
private func importAttachmentsFromItemProviders(_ providers: [NSItemProvider]) async {
do {
let attachments = try await SybilChatAttachmentSupport.buildAttachments(from: providers)
try viewModel.appendComposerAttachments(attachments)
composerFocused = true
} catch {
viewModel.errorMessage = error.localizedDescription
SybilLog.error(SybilLog.ui, "Clipboard/drop attachment import failed", error: error)
}
}
private func loadAttachmentsFromPhotoPickerItems(_ items: [PhotosPickerItem]) async throws -> [ChatAttachment] {
var attachments: [ChatAttachment] = []
for item in items {
guard let data = try await item.loadTransferable(type: Data.self) else {
continue
}
let contentType = item.supportedContentTypes.first(where: { $0.conforms(to: .image) })
let filename = contentType?.preferredFilenameExtension.map { "photo.\($0)" } ?? "photo.jpg"
attachments.append(try SybilChatAttachmentSupport.buildImageAttachment(data: data, filename: filename, contentType: contentType))
}
return attachments
}
@MainActor
private func pasteAttachmentsFromClipboard() async {
do {
let pasteboard = UIPasteboard.general
var attachments: [ChatAttachment] = []
if let image = pasteboard.image {
attachments.append(try SybilChatAttachmentSupport.buildImageAttachment(image: image))
}
if let url = pasteboard.url, url.isFileURL {
attachments.append(contentsOf: try SybilChatAttachmentSupport.buildAttachments(from: [url]))
} else if let text = pasteboard.string?.trimmingCharacters(in: .whitespacesAndNewlines), !text.isEmpty {
attachments.append(try SybilChatAttachmentSupport.buildTextAttachment(text: text))
}
guard !attachments.isEmpty else {
viewModel.errorMessage = "Clipboard does not contain a supported attachment."
return
}
try viewModel.appendComposerAttachments(attachments)
composerFocused = true
} catch {
viewModel.errorMessage = error.localizedDescription
SybilLog.error(SybilLog.ui, "Clipboard attachment import failed", error: error)
}
}
}
enum NewChatSwipeMetrics {
static let referenceWidth: CGFloat = 390
static let horizontalActivationDistance: CGFloat = 18
static let directionDominanceRatio: CGFloat = 1.22
static let minimumLeftwardVelocity: CGFloat = 55
static let latchHysteresis: CGFloat = 32
static let completionOvershoot: CGFloat = 180
static let completionAnimationDuration = 0.24
static let completionAnimationDelayMs: UInt64 = 240
static func maxTravel(for width: CGFloat) -> CGFloat {
min(max(width * 0.46, 156), 240)
}
static func latchDistance(for width: CGFloat) -> CGFloat {
min(max(width * 0.28, 112), 152)
}
static func clampedOffset(for rawTranslation: CGFloat, width: CGFloat) -> CGFloat {
max(min(rawTranslation, 0), -maxTravel(for: width))
}
static func progress(for offset: CGFloat, width: CGFloat) -> CGFloat {
let travel = maxTravel(for: width)
guard travel > 0 else {
return 0
}
return min(max(abs(offset) / travel, 0), 1)
}
static func blurRadius(for offset: CGFloat, width: CGFloat) -> CGFloat {
progress(for: offset, width: width) * 10
}
static func shouldBeginPan(
leftwardTravel: CGFloat,
verticalTravel: CGFloat,
leftwardVelocity: CGFloat,
verticalVelocity: CGFloat
) -> Bool {
guard leftwardTravel > 0 || leftwardVelocity > 0 else {
return false
}
if leftwardTravel >= horizontalActivationDistance,
leftwardTravel >= verticalTravel * directionDominanceRatio {
return true
}
return leftwardVelocity >= minimumLeftwardVelocity &&
leftwardVelocity >= verticalVelocity * directionDominanceRatio
}
static func latchReleaseDistance(for width: CGFloat) -> CGFloat {
max(latchDistance(for: width) - latchHysteresis, horizontalActivationDistance)
}
static func isLatched(offset: CGFloat, width: CGFloat, isCurrentlyLatched: Bool = false) -> Bool {
let distance = abs(offset)
if isCurrentlyLatched {
return distance >= latchReleaseDistance(for: width)
}
return distance >= latchDistance(for: width)
}
}
private struct NewChatSwipePanInstaller: UIViewRepresentable {
var isEnabled: Bool
var onBegan: (CGFloat) -> Void
var onChanged: (CGFloat, CGFloat) -> Void
var onEnded: (CGFloat, CGFloat, Bool) -> Void
func makeCoordinator() -> Coordinator {
Coordinator()
}
func makeUIView(context: Context) -> InstallerView {
let view = InstallerView()
view.isUserInteractionEnabled = false
view.coordinator = context.coordinator
context.coordinator.markerView = view
return view
}
func updateUIView(_ uiView: InstallerView, context: Context) {
context.coordinator.update(
isEnabled: isEnabled,
onBegan: onBegan,
onChanged: onChanged,
onEnded: onEnded
)
context.coordinator.markerView = uiView
context.coordinator.installIfPossible()
}
static func dismantleUIView(_ uiView: InstallerView, coordinator: Coordinator) {
coordinator.detach()
}
final class InstallerView: UIView {
weak var coordinator: Coordinator?
override func didMoveToWindow() {
super.didMoveToWindow()
coordinator?.markerView = self
coordinator?.installIfPossible()
}
override func layoutSubviews() {
super.layoutSubviews()
coordinator?.configureScrollViewFailureRequirements()
}
}
final class Coordinator: NSObject, UIGestureRecognizerDelegate {
weak var markerView: UIView?
private weak var installedWindow: UIWindow?
private let panGesture = UIPanGestureRecognizer()
private var preparedScrollRecognizers: Set<ObjectIdentifier> = []
private var isEnabled = false
private var onBegan: (CGFloat) -> Void = { _ in }
private var onChanged: (CGFloat, CGFloat) -> Void = { _, _ in }
private var onEnded: (CGFloat, CGFloat, Bool) -> Void = { _, _, _ in }
override init() {
super.init()
panGesture.addTarget(self, action: #selector(handlePan(_:)))
panGesture.cancelsTouchesInView = true
panGesture.delaysTouchesBegan = false
panGesture.delaysTouchesEnded = false
panGesture.delegate = self
}
func update(
isEnabled: Bool,
onBegan: @escaping (CGFloat) -> Void,
onChanged: @escaping (CGFloat, CGFloat) -> Void,
onEnded: @escaping (CGFloat, CGFloat, Bool) -> Void
) {
self.isEnabled = isEnabled
self.onBegan = onBegan
self.onChanged = onChanged
self.onEnded = onEnded
panGesture.isEnabled = isEnabled
configureScrollViewFailureRequirements()
}
func installIfPossible() {
guard let window = markerView?.window else {
detach()
return
}
guard installedWindow !== window else {
configureScrollViewFailureRequirements()
return
}
installedWindow?.removeGestureRecognizer(panGesture)
window.addGestureRecognizer(panGesture)
installedWindow = window
configureScrollViewFailureRequirements()
}
func detach() {
installedWindow?.removeGestureRecognizer(panGesture)
installedWindow = nil
preparedScrollRecognizers = []
}
func configureScrollViewFailureRequirements() {
guard isEnabled, let markerView, let window = markerView.window else {
return
}
let markerFrame = markerView.convert(markerView.bounds, to: window)
for scrollView in window.sybilDescendantScrollViews {
let recognizerID = ObjectIdentifier(scrollView.panGestureRecognizer)
guard !preparedScrollRecognizers.contains(recognizerID) else {
continue
}
let scrollFrame = scrollView.convert(scrollView.bounds, to: window)
if scrollFrame.intersects(markerFrame) {
scrollView.panGestureRecognizer.require(toFail: panGesture)
preparedScrollRecognizers.insert(recognizerID)
}
}
}
@objc private func handlePan(_ recognizer: UIPanGestureRecognizer) {
guard let markerView else {
return
}
let width = max(markerView.bounds.width, 1)
let translationX = recognizer.translation(in: markerView).x
switch recognizer.state {
case .began:
onBegan(width)
onChanged(translationX, width)
case .changed:
onChanged(translationX, width)
case .ended:
onEnded(translationX, width, true)
case .cancelled, .failed:
onEnded(translationX, width, false)
case .possible:
break
@unknown default:
onEnded(translationX, width, false)
}
}
func gestureRecognizer(_ gestureRecognizer: UIGestureRecognizer, shouldReceive touch: UITouch) -> Bool {
guard isEnabled, gestureRecognizer === panGesture, let markerView else {
return false
}
return markerView.bounds.contains(touch.location(in: markerView))
}
func gestureRecognizerShouldBegin(_ gestureRecognizer: UIGestureRecognizer) -> Bool {
guard isEnabled, gestureRecognizer === panGesture, let markerView else {
return false
}
let translation = panGesture.translation(in: markerView)
let velocity = panGesture.velocity(in: markerView)
return NewChatSwipeMetrics.shouldBeginPan(
leftwardTravel: max(-translation.x, 0),
verticalTravel: abs(translation.y),
leftwardVelocity: max(-velocity.x, 0),
verticalVelocity: abs(velocity.y)
)
}
func gestureRecognizer(
_ gestureRecognizer: UIGestureRecognizer,
shouldRecognizeSimultaneouslyWith otherGestureRecognizer: UIGestureRecognizer
) -> Bool {
false
}
}
}
private extension UIView {
var sybilDescendantScrollViews: [UIScrollView] {
var scrollViews: [UIScrollView] = []
collectSybilScrollViews(into: &scrollViews)
return scrollViews
}
func collectSybilScrollViews(into scrollViews: inout [UIScrollView]) {
if let scrollView = self as? UIScrollView {
scrollViews.append(scrollView)
}
for subview in subviews {
subview.collectSybilScrollViews(into: &scrollViews)
}
}
}
private struct NewChatSwipeBackdrop: View {
var progress: CGFloat
var hasLatched: Bool
private var clampedProgress: CGFloat {
min(max(progress, 0), 1)
}
var body: some View {
ZStack(alignment: .trailing) {
Circle()
.fill((hasLatched ? SybilTheme.accent : SybilTheme.primary).opacity(0.16 + (0.18 * clampedProgress)))
.frame(width: 176, height: 176)
.blur(radius: 44)
.offset(x: 38, y: 18)
ZStack {
Circle()
.fill(
RadialGradient(
colors: [
(hasLatched ? SybilTheme.accent : SybilTheme.primary).opacity(0.28),
SybilTheme.surface.opacity(0.78)
],
center: .topLeading,
startRadius: 8,
endRadius: 58
)
)
.overlay(
Circle()
.stroke(
(hasLatched ? SybilTheme.accent : SybilTheme.primary).opacity(0.44 + (0.24 * clampedProgress)),
lineWidth: 1
)
)
.shadow(
color: (hasLatched ? SybilTheme.accent : SybilTheme.primary).opacity(0.24 + (0.20 * clampedProgress)),
radius: 24,
x: 0,
y: 0
)
Circle()
.fill(
AngularGradient(
colors: [
(hasLatched ? SybilTheme.accent : SybilTheme.primary).opacity(0.20),
Color.clear,
(hasLatched ? SybilTheme.accent : SybilTheme.primary).opacity(0.34)
],
center: .center
)
)
.frame(width: 72, height: 72)
.blur(radius: 10)
Image(systemName: hasLatched ? "checkmark" : "plus")
.font(.system(size: 31, weight: .bold, design: .rounded))
.foregroundStyle(SybilTheme.text)
.symbolEffect(.bounce, value: hasLatched)
Image(systemName: "sparkle")
.font(.system(size: 11, weight: .semibold))
.foregroundStyle((hasLatched ? SybilTheme.accent : SybilTheme.primary).opacity(0.90))
.offset(x: -26, y: -25)
}
.frame(width: 92, height: 92)
.background(
Circle()
.fill(SybilTheme.surface.opacity(0.42))
.blur(radius: 16)
)
}
.frame(maxWidth: .infinity, maxHeight: .infinity, alignment: .trailing)
.opacity(clampedProgress)
.offset(x: (1 - clampedProgress) * 28)
.animation(.easeOut(duration: 0.16), value: hasLatched)
.accessibilityHidden(true)
} }
} }

View File

@@ -1,6 +1,286 @@
import CoreGraphics
import Foundation
import Testing import Testing
@testable import Sybil @testable import Sybil
@Test func example() async throws { private struct MockClientCallSnapshot: Sendable {
// Write your test here and use APIs like `#expect(...)` to check expected conditions. var listChats = 0
var listSearches = 0
var getChat = 0
var getSearch = 0
}
private struct UnexpectedClientCall: Error {}
private actor MockSybilClient: SybilAPIClienting {
private let chatsResponse: [ChatSummary]
private let searchesResponse: [SearchSummary]
private let chatDetails: [String: ChatDetail]
private let searchDetails: [String: SearchDetail]
private var snapshot = MockClientCallSnapshot()
init(
chatsResponse: [ChatSummary] = [],
searchesResponse: [SearchSummary] = [],
chatDetails: [String: ChatDetail] = [:],
searchDetails: [String: SearchDetail] = [:]
) {
self.chatsResponse = chatsResponse
self.searchesResponse = searchesResponse
self.chatDetails = chatDetails
self.searchDetails = searchDetails
}
func currentSnapshot() -> MockClientCallSnapshot {
snapshot
}
func verifySession() async throws -> AuthSession {
AuthSession(authenticated: true, mode: "open")
}
func listChats() async throws -> [ChatSummary] {
snapshot.listChats += 1
return chatsResponse
}
func createChat(title: String?) async throws -> ChatSummary {
throw UnexpectedClientCall()
}
func getChat(chatID: String) async throws -> ChatDetail {
snapshot.getChat += 1
guard let detail = chatDetails[chatID] else {
throw UnexpectedClientCall()
}
return detail
}
func deleteChat(chatID: String) async throws {
throw UnexpectedClientCall()
}
func suggestChatTitle(chatID: String, content: String) async throws -> ChatSummary {
throw UnexpectedClientCall()
}
func listSearches() async throws -> [SearchSummary] {
snapshot.listSearches += 1
return searchesResponse
}
func createSearch(title: String?, query: String?) async throws -> SearchSummary {
throw UnexpectedClientCall()
}
func getSearch(searchID: String) async throws -> SearchDetail {
snapshot.getSearch += 1
guard let detail = searchDetails[searchID] else {
throw UnexpectedClientCall()
}
return detail
}
func createChatFromSearch(searchID: String, title: String?) async throws -> ChatSummary {
throw UnexpectedClientCall()
}
func deleteSearch(searchID: String) async throws {
throw UnexpectedClientCall()
}
func listModels() async throws -> ModelCatalogResponse {
ModelCatalogResponse(providers: [:])
}
func runCompletionStream(
body: CompletionStreamRequest,
onEvent: @escaping @Sendable (CompletionStreamEvent) async -> Void
) async throws {
throw UnexpectedClientCall()
}
func runSearchStream(
searchID: String,
body: SearchRunRequest,
onEvent: @escaping @Sendable (SearchStreamEvent) async -> Void
) async throws {
throw UnexpectedClientCall()
}
}
@MainActor
private func testSettings(named name: String) -> SybilSettingsStore {
let defaults = UserDefaults(suiteName: name)!
defaults.removePersistentDomain(forName: name)
let settings = SybilSettingsStore(defaults: defaults)
settings.apiBaseURL = "http://127.0.0.1:8787"
return settings
}
private func makeChatSummary(id: String, date: Date) -> ChatSummary {
ChatSummary(
id: id,
title: "Chat \(id)",
createdAt: date,
updatedAt: date,
initiatedProvider: .openai,
initiatedModel: "gpt-4.1-mini",
lastUsedProvider: .openai,
lastUsedModel: "gpt-4.1-mini"
)
}
private func makeChatDetail(id: String, date: Date, body: String) -> ChatDetail {
ChatDetail(
id: id,
title: "Chat \(id)",
createdAt: date,
updatedAt: date,
initiatedProvider: .openai,
initiatedModel: "gpt-4.1-mini",
lastUsedProvider: .openai,
lastUsedModel: "gpt-4.1-mini",
messages: [
Message(
id: "message-\(id)",
createdAt: date,
role: .assistant,
content: body,
name: nil
)
]
)
}
private func makeSearchSummary(id: String, date: Date) -> SearchSummary {
SearchSummary(
id: id,
title: "Search \(id)",
query: "query-\(id)",
createdAt: date,
updatedAt: date
)
}
private func makeSearchDetail(id: String, date: Date, answer: String) -> SearchDetail {
SearchDetail(
id: id,
title: "Search \(id)",
query: "query-\(id)",
createdAt: date,
updatedAt: date,
requestId: "request-\(id)",
latencyMs: 42,
error: nil,
answerText: answer,
answerRequestId: "answer-\(id)",
answerCitations: [],
answerError: nil,
results: []
)
}
@MainActor
@Test func normalizedAPIBaseURLPreservesExplicitAPIPath() async throws {
let defaults = UserDefaults(suiteName: #function)!
defaults.removePersistentDomain(forName: #function)
let settings = SybilSettingsStore(defaults: defaults)
settings.apiBaseURL = "https://sybil.bajor.cloud/api/"
#expect(settings.normalizedAPIBaseURL?.absoluteString == "https://sybil.bajor.cloud/api")
}
@MainActor
@Test func normalizedAPIBaseURLTrimsWhitespaceAndTrailingSlashes() async throws {
let defaults = UserDefaults(suiteName: #function)!
defaults.removePersistentDomain(forName: #function)
let settings = SybilSettingsStore(defaults: defaults)
settings.apiBaseURL = " http://127.0.0.1:8787/// "
#expect(settings.normalizedAPIBaseURL?.absoluteString == "http://127.0.0.1:8787")
}
@MainActor
@Test func foregroundListRefreshDoesNotReloadHiddenSelection() async throws {
let date = Date(timeIntervalSince1970: 1_700_000_000)
let chat = makeChatSummary(id: "chat-1", date: date)
let search = makeSearchSummary(id: "search-1", date: date)
let client = MockSybilClient(
chatsResponse: [chat],
searchesResponse: [search],
chatDetails: ["chat-1": makeChatDetail(id: "chat-1", date: date, body: "fresh chat body")]
)
let viewModel = SybilViewModel(settings: testSettings(named: #function)) { _ in client }
viewModel.isAuthenticated = true
viewModel.isCheckingSession = false
viewModel.selectedItem = .chat("chat-1")
await viewModel.refreshVisibleContent(refreshCollections: true, refreshSelection: false)
let snapshot = await client.currentSnapshot()
#expect(snapshot.listChats == 1)
#expect(snapshot.listSearches == 1)
#expect(snapshot.getChat == 0)
#expect(snapshot.getSearch == 0)
#expect(viewModel.selectedItem == .chat("chat-1"))
}
@MainActor
@Test func foregroundChatRefreshReloadsSelectedTranscript() async throws {
let date = Date(timeIntervalSince1970: 1_700_000_100)
let detail = makeChatDetail(id: "chat-2", date: date, body: "refreshed transcript")
let client = MockSybilClient(chatDetails: ["chat-2": detail])
let viewModel = SybilViewModel(settings: testSettings(named: #function)) { _ in client }
viewModel.isAuthenticated = true
viewModel.isCheckingSession = false
viewModel.selectedItem = .chat("chat-2")
await viewModel.refreshVisibleContent(refreshCollections: false, refreshSelection: true)
let snapshot = await client.currentSnapshot()
#expect(snapshot.listChats == 0)
#expect(snapshot.listSearches == 0)
#expect(snapshot.getChat == 1)
#expect(viewModel.selectedChat?.messages.first?.content == "refreshed transcript")
}
@MainActor
@Test func foregroundSearchRefreshReloadsSelectedSearch() async throws {
let date = Date(timeIntervalSince1970: 1_700_000_200)
let detail = makeSearchDetail(id: "search-2", date: date, answer: "fresh answer")
let client = MockSybilClient(searchDetails: ["search-2": detail])
let viewModel = SybilViewModel(settings: testSettings(named: #function)) { _ in client }
viewModel.isAuthenticated = true
viewModel.isCheckingSession = false
viewModel.selectedItem = .search("search-2")
await viewModel.refreshVisibleContent(refreshCollections: false, refreshSelection: true)
let snapshot = await client.currentSnapshot()
#expect(snapshot.listChats == 0)
#expect(snapshot.listSearches == 0)
#expect(snapshot.getSearch == 1)
#expect(viewModel.selectedSearch?.answerText == "fresh answer")
}
@Test func newChatSwipeMetricsClampProgressAndLatch() async throws {
let width: CGFloat = 390
let maxTravel = NewChatSwipeMetrics.maxTravel(for: width)
let latchDistance = NewChatSwipeMetrics.latchDistance(for: width)
#expect(NewChatSwipeMetrics.clampedOffset(for: -500, width: width) == -maxTravel)
#expect(NewChatSwipeMetrics.progress(for: -maxTravel / 2, width: width) == 0.5)
#expect(NewChatSwipeMetrics.blurRadius(for: -maxTravel, width: width) == 10)
#expect(NewChatSwipeMetrics.isLatched(offset: -(latchDistance + 1), width: width))
#expect(!NewChatSwipeMetrics.isLatched(offset: -(latchDistance - 1), width: width))
#expect(NewChatSwipeMetrics.isLatched(offset: -(latchDistance - 1), width: width, isCurrentlyLatched: true))
#expect(!NewChatSwipeMetrics.isLatched(offset: -(NewChatSwipeMetrics.latchReleaseDistance(for: width) - 1), width: width, isCurrentlyLatched: true))
#expect(NewChatSwipeMetrics.shouldBeginPan(leftwardTravel: 24, verticalTravel: 8, leftwardVelocity: 0, verticalVelocity: 0))
#expect(NewChatSwipeMetrics.shouldBeginPan(leftwardTravel: 2, verticalTravel: 1, leftwardVelocity: 120, verticalVelocity: 30))
#expect(!NewChatSwipeMetrics.shouldBeginPan(leftwardTravel: 8, verticalTravel: 24, leftwardVelocity: 20, verticalVelocity: 140))
#expect(!NewChatSwipeMetrics.shouldBeginPan(leftwardTravel: 18, verticalTravel: 18, leftwardVelocity: 80, verticalVelocity: 90))
} }

View File

@@ -1,7 +1,7 @@
# Sybil Server # Sybil Server
Backend API for: Backend API for:
- LLM multiplexer (OpenAI / Anthropic / xAI (Grok)) - LLM multiplexer (OpenAI Responses / Anthropic / xAI Chat Completions-compatible Grok)
- Personal chat database (chats/messages + LLM call log) - Personal chat database (chats/messages + LLM call log)
## Stack ## Stack
@@ -44,6 +44,19 @@ If `ADMIN_TOKEN` is not set, the server runs in open mode (dev).
- `ANTHROPIC_API_KEY` - `ANTHROPIC_API_KEY`
- `XAI_API_KEY` - `XAI_API_KEY`
- `EXA_API_KEY` - `EXA_API_KEY`
- `CHAT_WEB_SEARCH_ENGINE` (`exa` by default, or `searxng` for chat tool calls only)
- `SEARXNG_BASE_URL` (required when `CHAT_WEB_SEARCH_ENGINE=searxng`; instance must allow `format=json`)
- `CHAT_MAX_TOOL_ROUNDS` (`100` by default; maximum model/tool result cycles per chat completion)
- `CHAT_CODEX_TOOL_ENABLED` (`false` by default; enables the `codex_exec` chat tool for OpenAI/xAI)
- `CHAT_CODEX_REMOTE_HOST` (required when Codex tool is enabled; SSH host/IP or `user@host`)
- `CHAT_CODEX_REMOTE_USER` (optional SSH user when host does not include one)
- `CHAT_CODEX_REMOTE_PORT` (`22` by default)
- `CHAT_CODEX_REMOTE_WORKDIR` (`/workspace/sybil-codex` by default; created and reused on the devbox)
- `CHAT_CODEX_SSH_KEY_PATH` (recommended: path to a read-only mounted private key)
- `CHAT_CODEX_SSH_PRIVATE_KEY_B64` (optional fallback private key delivery)
- `CHAT_CODEX_EXEC_TIMEOUT_MS` (`600000` by default)
- `CHAT_SHELL_TOOL_ENABLED` (`false` by default; enables the `shell_exec` chat tool for OpenAI/xAI on the same devbox)
- `CHAT_SHELL_EXEC_TIMEOUT_MS` (`120000` by default)
## API ## API
- `GET /health` - `GET /health`

View File

@@ -11,6 +11,7 @@
"prebuild": "node scripts/ensure-prisma-client.mjs", "prebuild": "node scripts/ensure-prisma-client.mjs",
"dev": "node ./node_modules/tsx/dist/cli.mjs watch src/index.ts", "dev": "node ./node_modules/tsx/dist/cli.mjs watch src/index.ts",
"start": "node dist/index.js", "start": "node dist/index.js",
"test": "node --test --import tsx tests/**/*.test.ts",
"build": "node ./node_modules/typescript/bin/tsc -p tsconfig.json", "build": "node ./node_modules/typescript/bin/tsc -p tsconfig.json",
"prisma:generate": "node ./node_modules/prisma/build/index.js generate", "prisma:generate": "node ./node_modules/prisma/build/index.js generate",
"db:migrate": "node ./node_modules/prisma/build/index.js migrate dev", "db:migrate": "node ./node_modules/prisma/build/index.js migrate dev",

View File

@@ -1,5 +1,52 @@
import path from "node:path";
import { fileURLToPath } from "node:url";
import { config as loadDotenv } from "dotenv";
import { z } from "zod"; import { z } from "zod";
import "dotenv/config";
loadDotenv({ quiet: true });
loadDotenv({ path: path.resolve(path.dirname(fileURLToPath(import.meta.url)), "../../.env"), quiet: true });
const OptionalUrlSchema = z.preprocess(
(value) => (typeof value === "string" && value.trim() === "" ? undefined : value),
z.string().trim().url().optional()
);
const ChatWebSearchEngineSchema = z.preprocess(
(value) => {
if (typeof value !== "string") return value;
const trimmed = value.trim();
return trimmed ? trimmed.toLowerCase() : undefined;
},
z.enum(["exa", "searxng"]).default("exa")
);
const BooleanFlagSchema = z.preprocess((value) => {
if (typeof value !== "string") return value;
const normalized = value.trim().toLowerCase();
if (!normalized) return undefined;
if (["1", "true", "yes", "on"].includes(normalized)) return true;
if (["0", "false", "no", "off"].includes(normalized)) return false;
return value;
}, z.boolean().default(false));
const OptionalTrimmedStringSchema = z.preprocess(
(value) => (typeof value === "string" && value.trim() === "" ? undefined : value),
z.string().trim().min(1).optional()
);
function defaultedPositiveInt(defaultValue: number) {
return z.preprocess(
(value) => (typeof value === "string" && value.trim() === "" ? undefined : value),
z.coerce.number().int().positive().default(defaultValue)
);
}
function defaultedTrimmedString(defaultValue: string) {
return z.preprocess(
(value) => (typeof value === "string" && value.trim() === "" ? undefined : value),
z.string().trim().min(1).default(defaultValue)
);
}
const EnvSchema = z.object({ const EnvSchema = z.object({
PORT: z.coerce.number().int().positive().default(8787), PORT: z.coerce.number().int().positive().default(8787),
@@ -13,6 +60,42 @@ const EnvSchema = z.object({
ANTHROPIC_API_KEY: z.string().optional(), ANTHROPIC_API_KEY: z.string().optional(),
XAI_API_KEY: z.string().optional(), XAI_API_KEY: z.string().optional(),
EXA_API_KEY: z.string().optional(), EXA_API_KEY: z.string().optional(),
// Chat-mode web_search tool configuration. Search mode remains Exa-only for now.
CHAT_WEB_SEARCH_ENGINE: ChatWebSearchEngineSchema,
SEARXNG_BASE_URL: OptionalUrlSchema,
CHAT_MAX_TOOL_ROUNDS: defaultedPositiveInt(100),
// Optional chat-mode Codex tool. When enabled, the server SSHes into a remote
// devbox and runs `codex exec` in a persistent scratch directory there.
CHAT_CODEX_TOOL_ENABLED: BooleanFlagSchema,
CHAT_CODEX_REMOTE_HOST: OptionalTrimmedStringSchema,
CHAT_CODEX_REMOTE_USER: OptionalTrimmedStringSchema,
CHAT_CODEX_REMOTE_PORT: defaultedPositiveInt(22),
CHAT_CODEX_REMOTE_WORKDIR: defaultedTrimmedString("/workspace/sybil-codex"),
CHAT_CODEX_SSH_KEY_PATH: OptionalTrimmedStringSchema,
CHAT_CODEX_SSH_PRIVATE_KEY_B64: OptionalTrimmedStringSchema,
CHAT_CODEX_EXEC_TIMEOUT_MS: defaultedPositiveInt(600_000),
// Optional arbitrary shell tool that runs only on the configured devbox.
CHAT_SHELL_TOOL_ENABLED: BooleanFlagSchema,
CHAT_SHELL_EXEC_TIMEOUT_MS: defaultedPositiveInt(120_000),
}).superRefine((value, ctx) => {
if (value.CHAT_WEB_SEARCH_ENGINE === "searxng" && !value.SEARXNG_BASE_URL) {
ctx.addIssue({
code: "custom",
path: ["SEARXNG_BASE_URL"],
message: "SEARXNG_BASE_URL is required when CHAT_WEB_SEARCH_ENGINE=searxng",
});
}
if ((value.CHAT_CODEX_TOOL_ENABLED || value.CHAT_SHELL_TOOL_ENABLED) && !value.CHAT_CODEX_REMOTE_HOST) {
ctx.addIssue({
code: "custom",
path: ["CHAT_CODEX_REMOTE_HOST"],
message: "CHAT_CODEX_REMOTE_HOST is required when CHAT_CODEX_TOOL_ENABLED=true or CHAT_SHELL_TOOL_ENABLED=true",
});
}
}); });
export type Env = z.infer<typeof EnvSchema>; export type Env = z.infer<typeof EnvSchema>;

View File

@@ -9,6 +9,7 @@ import { warmModelCatalog } from "./llm/model-catalog.js";
import { registerRoutes } from "./routes.js"; import { registerRoutes } from "./routes.js";
const app = Fastify({ const app = Fastify({
bodyLimit: 32 * 1024 * 1024,
disableRequestLogging: true, disableRequestLogging: true,
logger: { logger: {
transport: { transport: {

View File

@@ -1,15 +1,33 @@
import { execFile } from "node:child_process";
import { mkdtemp, rm, writeFile } from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import { promisify } from "node:util";
import { convert as htmlToText } from "html-to-text"; import { convert as htmlToText } from "html-to-text";
import type OpenAI from "openai"; import type OpenAI from "openai";
import { z } from "zod"; import { z } from "zod";
import { env } from "../env.js";
import { exaClient } from "../search/exa.js"; import { exaClient } from "../search/exa.js";
import { searchSearxng } from "../search/searxng.js";
import { buildOpenAIConversationMessage, buildOpenAIResponsesInputMessage } from "./message-content.js";
import type { ChatMessage } from "./types.js"; import type { ChatMessage } from "./types.js";
const MAX_TOOL_ROUNDS = 4; const MAX_TOOL_ROUNDS = env.CHAT_MAX_TOOL_ROUNDS;
const DEFAULT_WEB_RESULTS = 5; const DEFAULT_WEB_RESULTS = 5;
const MAX_WEB_RESULTS = 10; const MAX_WEB_RESULTS = 10;
const DEFAULT_FETCH_MAX_CHARACTERS = 12_000; const DEFAULT_FETCH_MAX_CHARACTERS = 12_000;
const MAX_FETCH_MAX_CHARACTERS = 50_000; const MAX_FETCH_MAX_CHARACTERS = 50_000;
const FETCH_TIMEOUT_MS = 12_000; const FETCH_TIMEOUT_MS = 12_000;
const MAX_CODEX_PROMPT_CHARACTERS = 60_000;
const DEFAULT_CODEX_MAX_OUTPUT_CHARACTERS = 24_000;
const MAX_CODEX_MAX_OUTPUT_CHARACTERS = 80_000;
const MAX_SHELL_COMMAND_CHARACTERS = 20_000;
const DEFAULT_SHELL_MAX_OUTPUT_CHARACTERS = 24_000;
const MAX_SHELL_MAX_OUTPUT_CHARACTERS = 80_000;
const REMOTE_EXEC_MAX_BUFFER_BYTES = 1_000_000;
const MAX_DANGLING_TOOL_INTENT_RETRIES = 1;
const execFileAsync = promisify(execFile);
const WebSearchArgsSchema = z const WebSearchArgsSchema = z
.object({ .object({
@@ -21,6 +39,8 @@ const WebSearchArgsSchema = z
}) })
.strict(); .strict();
type WebSearchArgs = z.infer<typeof WebSearchArgsSchema>;
const FetchUrlArgsSchema = z const FetchUrlArgsSchema = z
.object({ .object({
url: z.string().trim().url(), url: z.string().trim().url(),
@@ -28,7 +48,79 @@ const FetchUrlArgsSchema = z
}) })
.strict(); .strict();
const CHAT_TOOLS: any[] = [ const CodexExecArgsSchema = z
.object({
prompt: z.string().trim().min(1).max(MAX_CODEX_PROMPT_CHARACTERS),
maxCharacters: z.coerce.number().int().min(1_000).max(MAX_CODEX_MAX_OUTPUT_CHARACTERS).optional(),
})
.strict();
type CodexExecArgs = z.infer<typeof CodexExecArgsSchema>;
const ShellExecArgsSchema = z
.object({
command: z.string().trim().min(1).max(MAX_SHELL_COMMAND_CHARACTERS),
maxCharacters: z.coerce.number().int().min(1_000).max(MAX_SHELL_MAX_OUTPUT_CHARACTERS).optional(),
})
.strict();
type ShellExecArgs = z.infer<typeof ShellExecArgsSchema>;
const CODEX_EXEC_TOOL = {
type: "function",
function: {
name: "codex_exec",
description:
"Delegate a coding, terminal, or multi-step software task to a persistent remote Codex CLI workspace. Use for complex code changes, repository inspection, running programs/tests, debugging build failures, or other tasks that need a real shell. The task runs non-interactively; the remote Codex instance must make reasonable assumptions, complete the task, and return a final summary with relevant stdout/stderr.",
parameters: {
type: "object",
properties: {
prompt: {
type: "string",
description:
"A complete, self-contained instruction for the remote Codex instance. Include the goal, relevant context, constraints, and what result to report back.",
},
maxCharacters: {
type: "integer",
minimum: 1_000,
maximum: MAX_CODEX_MAX_OUTPUT_CHARACTERS,
description: "Maximum stdout/stderr characters returned to the model (default 24000).",
},
},
required: ["prompt"],
additionalProperties: false,
},
},
};
const SHELL_EXEC_TOOL = {
type: "function",
function: {
name: "shell_exec",
description:
"Run an arbitrary non-interactive shell command on the configured remote devbox, starting in the persistent scratch workspace. Use for quick Python scripts, calculations, file inspection, package/tool checks, tests, and command-line work that needs a real shell. This does not run inside the Sybil server container.",
parameters: {
type: "object",
properties: {
command: {
type: "string",
description:
"Shell command to run on the devbox. The command is executed with bash -lc when bash exists, otherwise sh -lc, starting in the persistent scratch workspace.",
},
maxCharacters: {
type: "integer",
minimum: 1_000,
maximum: MAX_SHELL_MAX_OUTPUT_CHARACTERS,
description: "Maximum stdout/stderr characters returned to the model (default 24000).",
},
},
required: ["command"],
additionalProperties: false,
},
},
};
const BASE_CHAT_TOOLS: any[] = [
{ {
type: "function", type: "function",
function: { function: {
@@ -90,10 +182,34 @@ const CHAT_TOOLS: any[] = [
}, },
]; ];
const CHAT_TOOLS: any[] = [
...BASE_CHAT_TOOLS,
...(env.CHAT_CODEX_TOOL_ENABLED ? [CODEX_EXEC_TOOL] : []),
...(env.CHAT_SHELL_TOOL_ENABLED ? [SHELL_EXEC_TOOL] : []),
];
const RESPONSES_CHAT_TOOLS: any[] = CHAT_TOOLS.map((tool) => {
if (tool?.type !== "function") return tool;
return {
type: "function",
name: tool.function.name,
description: tool.function.description,
parameters: tool.function.parameters,
strict: false,
};
});
export const CHAT_TOOL_SYSTEM_PROMPT = export const CHAT_TOOL_SYSTEM_PROMPT =
"You can use tools to gather up-to-date web information when needed. " + "You can use tools to gather up-to-date web information when needed. " +
"Use web_search for discovery and recent facts, and fetch_url to read the full content of a specific page. " + "Use web_search for discovery and recent facts, and fetch_url to read the full content of a specific page. " +
"Prefer tools when the user asks for current events, verification, sources, or details you do not already have. " + "Prefer tools when the user asks for current events, verification, sources, or details you do not already have. " +
"When you decide tool use is needed, call the tool immediately in the same response; do not say you are running a tool unless you actually call it. " +
(env.CHAT_CODEX_TOOL_ENABLED
? "Use codex_exec when a request needs substantial coding work, repository inspection, shell commands, tests, debugging, or another complex task suited to a persistent Codex workspace. Provide codex_exec a complete prompt with the goal, constraints, assumptions, and expected report-back format. Never ask codex_exec to wait for user input or run interactive commands. "
: "") +
(env.CHAT_SHELL_TOOL_ENABLED
? "Use shell_exec for direct non-interactive command-line work on the remote devbox, including quick Python programs, calculations, file inspection, running tests, and small scripts. "
: "") +
"Do not fabricate tool outputs; reason only from provided tool results."; "Do not fabricate tool outputs; reason only from provided tool results.";
type ToolRunOutcome = { type ToolRunOutcome = {
@@ -187,6 +303,24 @@ function buildToolSummary(name: string, args: Record<string, unknown>, status: "
return url ? `Fetching URL ${toSingleLine(url, 140)} failed.${errSuffix}` : `Fetching URL failed.${errSuffix}`; return url ? `Fetching URL ${toSingleLine(url, 140)} failed.${errSuffix}` : `Fetching URL failed.${errSuffix}`;
} }
if (name === "codex_exec") {
const prompt = typeof args.prompt === "string" ? args.prompt.trim() : "";
if (status === "completed") {
return prompt ? `Ran Codex task: '${toSingleLine(prompt, 120)}'.` : "Ran Codex task.";
}
return prompt ? `Codex task '${toSingleLine(prompt, 120)}' failed.${errSuffix}` : `Codex task failed.${errSuffix}`;
}
if (name === "shell_exec") {
const command = typeof args.command === "string" ? args.command.trim() : "";
if (status === "completed") {
return command ? `Ran devbox shell command: '${toSingleLine(command, 120)}'.` : "Ran devbox shell command.";
}
return command
? `Devbox shell command '${toSingleLine(command, 120)}' failed.${errSuffix}`
: `Devbox shell command failed.${errSuffix}`;
}
if (status === "completed") { if (status === "completed") {
return `Ran tool '${name}'.`; return `Ran tool '${name}'.`;
} }
@@ -246,29 +380,18 @@ function extractHtmlTitle(html: string) {
} }
function normalizeIncomingMessages(messages: ChatMessage[]) { function normalizeIncomingMessages(messages: ChatMessage[]) {
const normalized = messages.map((m) => { const normalized = messages.map((message) => buildOpenAIConversationMessage(message));
if (m.role === "tool") {
const name = m.name?.trim() || "tool";
return {
role: "user",
content: `Tool output (${name}):\n${m.content}`,
};
}
if (m.role === "assistant" || m.role === "system" || m.role === "user") {
const out: any = { role: m.role, content: m.content };
if (m.name && (m.role === "assistant" || m.role === "user")) {
out.name = m.name;
}
return out;
}
return { role: "user", content: m.content };
});
return [{ role: "system", content: CHAT_TOOL_SYSTEM_PROMPT }, ...normalized]; return [{ role: "system", content: CHAT_TOOL_SYSTEM_PROMPT }, ...normalized];
} }
async function runWebSearchTool(input: unknown): Promise<ToolRunOutcome> { function normalizeIncomingResponsesInput(messages: ChatMessage[]) {
const args = WebSearchArgsSchema.parse(input); const normalized = messages.map((message) => buildOpenAIResponsesInputMessage(message));
return [{ role: "system", content: CHAT_TOOL_SYSTEM_PROMPT }, ...normalized];
}
async function runExaWebSearchTool(args: WebSearchArgs): Promise<ToolRunOutcome> {
const exa = exaClient(); const exa = exaClient();
const response = await exa.search(args.query, { const response = await exa.search(args.query, {
type: args.type ?? "auto", type: args.type ?? "auto",
@@ -292,6 +415,7 @@ async function runWebSearchTool(input: unknown): Promise<ToolRunOutcome> {
const results = Array.isArray(response?.results) ? response.results : []; const results = Array.isArray(response?.results) ? response.results : [];
return { return {
ok: true, ok: true,
searchEngine: "exa",
query: args.query, query: args.query,
requestId: response?.requestId ?? null, requestId: response?.requestId ?? null,
results: results.map((result: any, index: number) => ({ results: results.map((result: any, index: number) => ({
@@ -309,6 +433,40 @@ async function runWebSearchTool(input: unknown): Promise<ToolRunOutcome> {
}; };
} }
async function runSearxngWebSearchTool(args: WebSearchArgs): Promise<ToolRunOutcome> {
const response = await searchSearxng(args.query, {
numResults: args.numResults ?? DEFAULT_WEB_RESULTS,
includeDomains: args.includeDomains,
excludeDomains: args.excludeDomains,
});
return {
ok: true,
searchEngine: "searxng",
query: args.query,
requestId: response.requestId,
results: response.results.map((result, index) => ({
rank: index + 1,
title: result.title,
url: result.url,
publishedDate: result.publishedDate,
author: null,
summary: result.summary,
text: result.text,
highlights: result.summary ? [clipText(result.summary, 280)] : [],
engines: result.engines,
})),
};
}
async function runWebSearchTool(input: unknown): Promise<ToolRunOutcome> {
const args = WebSearchArgsSchema.parse(input);
if (env.CHAT_WEB_SEARCH_ENGINE === "searxng") {
return runSearxngWebSearchTool(args);
}
return runExaWebSearchTool(args);
}
function assertSafeFetchUrl(urlRaw: string) { function assertSafeFetchUrl(urlRaw: string) {
const parsed = new URL(urlRaw); const parsed = new URL(urlRaw);
if (parsed.protocol !== "http:" && parsed.protocol !== "https:") { if (parsed.protocol !== "http:" && parsed.protocol !== "https:") {
@@ -379,9 +537,228 @@ async function runFetchUrlTool(input: unknown): Promise<ToolRunOutcome> {
}; };
} }
function shellQuote(value: string) {
return `'${value.replace(/'/g, `'\\''`)}'`;
}
function buildDevboxSshTarget() {
const host = env.CHAT_CODEX_REMOTE_HOST;
if (!host) {
throw new Error("CHAT_CODEX_REMOTE_HOST not set");
}
if (!env.CHAT_CODEX_REMOTE_USER || host.includes("@")) {
return host;
}
return `${env.CHAT_CODEX_REMOTE_USER}@${host}`;
}
function buildRemoteCodexCommand(prompt: string) {
const workdir = env.CHAT_CODEX_REMOTE_WORKDIR.trim();
const wrappedPrompt = [
"You are running in a non-interactive batch environment.",
"",
"Rules:",
"- Do not ask questions or wait for user input.",
"- Do not use interactive commands, editors, pagers, or prompts.",
"- If details are ambiguous, make a reasonable assumption and continue.",
"- Complete the task in one run, including any requested file edits, commands, and verification.",
"- End with a concise final report that includes changed files, commands run, and outcomes.",
"",
"Task:",
prompt,
].join("\n");
const codexCommand =
`codex exec --dangerously-bypass-approvals-and-sandbox --skip-git-repo-check ${shellQuote(wrappedPrompt)} < /dev/null`;
return `mkdir -p ${shellQuote(workdir)} && cd ${shellQuote(workdir)} && ${codexCommand}`;
}
function buildRemoteShellCommand(command: string) {
const workdir = env.CHAT_CODEX_REMOTE_WORKDIR.trim();
const quotedCommand = shellQuote(command);
return (
`mkdir -p ${shellQuote(workdir)} && cd ${shellQuote(workdir)} && ` +
`if command -v bash >/dev/null 2>&1; then bash -lc ${quotedCommand}; else sh -lc ${quotedCommand}; fi`
);
}
async function withDevboxSshKeyPath<T>(fn: (keyPath?: string) => Promise<T>) {
if (env.CHAT_CODEX_SSH_KEY_PATH) {
return fn(env.CHAT_CODEX_SSH_KEY_PATH);
}
if (!env.CHAT_CODEX_SSH_PRIVATE_KEY_B64) {
return fn(undefined);
}
const tmpDir = await mkdtemp(path.join(os.tmpdir(), "sybil-codex-ssh-"));
const keyPath = path.join(tmpDir, "id");
try {
await writeFile(keyPath, Buffer.from(env.CHAT_CODEX_SSH_PRIVATE_KEY_B64, "base64"), { mode: 0o600 });
return await fn(keyPath);
} finally {
await rm(tmpDir, { recursive: true, force: true });
}
}
function clipRemoteOutput(value: string, maxCharacters: number) {
if (value.length <= maxCharacters) {
return { text: value, truncated: false };
}
return {
text: `${value.slice(0, maxCharacters)}\n\n[truncated ${value.length - maxCharacters} characters]`,
truncated: true,
};
}
function bufferOrStringToString(value: unknown) {
if (typeof value === "string") return value;
if (Buffer.isBuffer(value)) return value.toString("utf8");
return "";
}
async function runCodexExecTool(input: unknown): Promise<ToolRunOutcome> {
if (!env.CHAT_CODEX_TOOL_ENABLED) {
return { ok: false, error: "codex_exec is disabled." };
}
const args: CodexExecArgs = CodexExecArgsSchema.parse(input);
const maxCharacters = args.maxCharacters ?? DEFAULT_CODEX_MAX_OUTPUT_CHARACTERS;
const sshTarget = buildDevboxSshTarget();
const remoteCommand = buildRemoteCodexCommand(args.prompt);
const run = async (keyPath?: string) => {
const sshArgs = [
"-n",
"-o",
"BatchMode=yes",
"-o",
"StrictHostKeyChecking=accept-new",
"-o",
"UserKnownHostsFile=/tmp/sybil-codex-known-hosts",
"-p",
String(env.CHAT_CODEX_REMOTE_PORT),
];
if (keyPath) {
sshArgs.push("-i", keyPath);
}
sshArgs.push(sshTarget, remoteCommand);
try {
const result = await execFileAsync("ssh", sshArgs, {
timeout: env.CHAT_CODEX_EXEC_TIMEOUT_MS,
maxBuffer: REMOTE_EXEC_MAX_BUFFER_BYTES,
});
const stdout = clipRemoteOutput(bufferOrStringToString(result.stdout), maxCharacters);
const stderr = clipRemoteOutput(bufferOrStringToString(result.stderr), Math.min(maxCharacters, 12_000));
return {
ok: true,
host: env.CHAT_CODEX_REMOTE_HOST,
workdir: env.CHAT_CODEX_REMOTE_WORKDIR,
stdout: stdout.text,
stderr: stderr.text,
stdoutTruncated: stdout.truncated,
stderrTruncated: stderr.truncated,
};
} catch (err: any) {
const stdout = clipRemoteOutput(bufferOrStringToString(err?.stdout), maxCharacters);
const stderr = clipRemoteOutput(bufferOrStringToString(err?.stderr), Math.min(maxCharacters, 12_000));
return {
ok: false,
error: err?.killed
? `Remote Codex command timed out after ${env.CHAT_CODEX_EXEC_TIMEOUT_MS}ms.`
: err?.message ?? String(err),
exitCode: typeof err?.code === "number" ? err.code : null,
signal: typeof err?.signal === "string" ? err.signal : null,
host: env.CHAT_CODEX_REMOTE_HOST,
workdir: env.CHAT_CODEX_REMOTE_WORKDIR,
stdout: stdout.text,
stderr: stderr.text,
stdoutTruncated: stdout.truncated,
stderrTruncated: stderr.truncated,
};
}
};
return withDevboxSshKeyPath(run);
}
async function runShellExecTool(input: unknown): Promise<ToolRunOutcome> {
if (!env.CHAT_SHELL_TOOL_ENABLED) {
return { ok: false, error: "shell_exec is disabled." };
}
const args: ShellExecArgs = ShellExecArgsSchema.parse(input);
const maxCharacters = args.maxCharacters ?? DEFAULT_SHELL_MAX_OUTPUT_CHARACTERS;
const sshTarget = buildDevboxSshTarget();
const remoteCommand = buildRemoteShellCommand(args.command);
const run = async (keyPath?: string) => {
const sshArgs = [
"-n",
"-o",
"BatchMode=yes",
"-o",
"StrictHostKeyChecking=accept-new",
"-o",
"UserKnownHostsFile=/tmp/sybil-codex-known-hosts",
"-p",
String(env.CHAT_CODEX_REMOTE_PORT),
];
if (keyPath) {
sshArgs.push("-i", keyPath);
}
sshArgs.push(sshTarget, remoteCommand);
try {
const result = await execFileAsync("ssh", sshArgs, {
timeout: env.CHAT_SHELL_EXEC_TIMEOUT_MS,
maxBuffer: REMOTE_EXEC_MAX_BUFFER_BYTES,
});
const stdout = clipRemoteOutput(bufferOrStringToString(result.stdout), maxCharacters);
const stderr = clipRemoteOutput(bufferOrStringToString(result.stderr), Math.min(maxCharacters, 12_000));
return {
ok: true,
host: env.CHAT_CODEX_REMOTE_HOST,
workdir: env.CHAT_CODEX_REMOTE_WORKDIR,
command: args.command,
stdout: stdout.text,
stderr: stderr.text,
stdoutTruncated: stdout.truncated,
stderrTruncated: stderr.truncated,
};
} catch (err: any) {
const stdout = clipRemoteOutput(bufferOrStringToString(err?.stdout), maxCharacters);
const stderr = clipRemoteOutput(bufferOrStringToString(err?.stderr), Math.min(maxCharacters, 12_000));
return {
ok: false,
error: err?.killed
? `Remote shell command timed out after ${env.CHAT_SHELL_EXEC_TIMEOUT_MS}ms.`
: err?.message ?? String(err),
exitCode: typeof err?.code === "number" ? err.code : null,
signal: typeof err?.signal === "string" ? err.signal : null,
host: env.CHAT_CODEX_REMOTE_HOST,
workdir: env.CHAT_CODEX_REMOTE_WORKDIR,
command: args.command,
stdout: stdout.text,
stderr: stderr.text,
stdoutTruncated: stdout.truncated,
stderrTruncated: stderr.truncated,
};
}
};
return withDevboxSshKeyPath(run);
}
async function executeTool(name: string, args: unknown): Promise<ToolRunOutcome> { async function executeTool(name: string, args: unknown): Promise<ToolRunOutcome> {
if (name === "web_search") return runWebSearchTool(args); if (name === "web_search") return runWebSearchTool(args);
if (name === "fetch_url") return runFetchUrlTool(args); if (name === "fetch_url") return runFetchUrlTool(args);
if (name === "codex_exec") return runCodexExecTool(args);
if (name === "shell_exec") return runShellExecTool(args);
return { ok: false, error: `Unknown tool: ${name}` }; return { ok: false, error: `Unknown tool: ${name}` };
} }
@@ -396,6 +773,49 @@ function parseToolArgs(raw: unknown) {
} }
} }
function buildEventArgs(name: string, args: Record<string, unknown>) {
if (name === "codex_exec" && typeof args.prompt === "string") {
return {
...args,
prompt: clipText(args.prompt, 1_000),
};
}
if (name === "shell_exec" && typeof args.command === "string") {
return {
...args,
command: clipText(args.command, 1_000),
};
}
return args;
}
function looksLikeDanglingToolIntent(text: string) {
const normalized = text
.toLowerCase()
.replace(/[`*_>#-]/g, " ")
.replace(/\s+/g, " ")
.trim();
if (!normalized) return false;
if (normalized.length > 800) return false;
if (/\blet me know\b/.test(normalized) || /\bif you (want|would like)\b/.test(normalized)) return false;
return (
/\b(calling|running|executing|trying|checking|testing)\b.{0,80}\b(now|it|tool|command|shell_exec|codex_exec)\b/.test(normalized) ||
/\b(let me|i'?ll|i will)\b.{0,120}\b(run|execute|call|try|check|test)\b/.test(normalized) ||
/\b(stand by|hang on|one moment)\b/.test(normalized)
);
}
function appendDanglingToolIntentCorrection(conversation: any[], text: string) {
conversation.push({ role: "assistant", content: text });
conversation.push({
role: "system",
content:
"Internal correction: the previous assistant message claimed it would run a tool, but no tool call was made. If the task needs an available tool, call it now. Otherwise provide the final answer directly without saying you will run a tool.",
});
}
function mergeUsage(acc: Required<ToolAwareUsage>, usage: any) { function mergeUsage(acc: Required<ToolAwareUsage>, usage: any) {
if (!usage) return false; if (!usage) return false;
acc.inputTokens += usage.prompt_tokens ?? 0; acc.inputTokens += usage.prompt_tokens ?? 0;
@@ -404,6 +824,58 @@ function mergeUsage(acc: Required<ToolAwareUsage>, usage: any) {
return true; return true;
} }
function mergeResponsesUsage(acc: Required<ToolAwareUsage>, usage: any) {
if (!usage) return false;
acc.inputTokens += usage.input_tokens ?? 0;
acc.outputTokens += usage.output_tokens ?? 0;
acc.totalTokens += usage.total_tokens ?? 0;
return true;
}
function getResponseOutputItems(response: any) {
return Array.isArray(response?.output) ? response.output : [];
}
function extractResponsesText(response: any, fallback = "") {
if (typeof response?.output_text === "string") return response.output_text;
const parts: string[] = [];
for (const item of getResponseOutputItems(response)) {
if (item?.type !== "message" || !Array.isArray(item.content)) continue;
for (const content of item.content) {
if (content?.type === "output_text" && typeof content.text === "string") {
parts.push(content.text);
} else if (content?.type === "refusal" && typeof content.refusal === "string") {
parts.push(content.refusal);
}
}
}
return parts.join("") || fallback;
}
function getUnstreamedText(finalText: string, streamedText: string) {
if (!finalText) return "";
if (!streamedText) return finalText;
return finalText.startsWith(streamedText) ? finalText.slice(streamedText.length) : "";
}
function getResponseFailureMessage(response: any) {
if (response?.status !== "failed" && response?.status !== "incomplete") return null;
const errorMessage = typeof response?.error?.message === "string" ? response.error.message : null;
const incompleteReason = typeof response?.incomplete_details?.reason === "string" ? response.incomplete_details.reason : null;
return errorMessage ?? (incompleteReason ? `Response incomplete: ${incompleteReason}` : `Response ${response.status}.`);
}
function normalizeResponsesToolCalls(outputItems: any[], round: number): NormalizedToolCall[] {
return outputItems
.filter((item) => item?.type === "function_call")
.map((call: any, index: number) => ({
id: call.call_id ?? call.id ?? `tool_call_${round}_${index}`,
name: call.name ?? "unknown_tool",
arguments: call.arguments ?? "{}",
}));
}
type NormalizedToolCall = { type NormalizedToolCall = {
id: string; id: string;
name: string; name: string;
@@ -445,12 +917,13 @@ async function executeToolCallAndBuildEvent(
: undefined; : undefined;
const completedAtMs = Date.now(); const completedAtMs = Date.now();
const eventArgs = buildEventArgs(call.name, parsedArgs);
const event: ToolExecutionEvent = { const event: ToolExecutionEvent = {
toolCallId: call.id, toolCallId: call.id,
name: call.name, name: call.name,
status, status,
summary: buildToolSummary(call.name, parsedArgs, status, error), summary: buildToolSummary(call.name, eventArgs, status, error),
args: parsedArgs, args: eventArgs,
startedAt, startedAt,
completedAt: new Date(completedAtMs).toISOString(), completedAt: new Date(completedAtMs).toISOString(),
durationMs: completedAtMs - startedAtMs, durationMs: completedAtMs - startedAtMs,
@@ -466,12 +939,82 @@ async function executeToolCallAndBuildEvent(
} }
export async function runToolAwareOpenAIChat(params: ToolAwareCompletionParams): Promise<ToolAwareCompletionResult> { export async function runToolAwareOpenAIChat(params: ToolAwareCompletionParams): Promise<ToolAwareCompletionResult> {
const input: any[] = normalizeIncomingResponsesInput(params.messages);
const rawResponses: unknown[] = [];
const toolEvents: ToolExecutionEvent[] = [];
const usageAcc: Required<ToolAwareUsage> = { inputTokens: 0, outputTokens: 0, totalTokens: 0 };
let sawUsage = false;
let totalToolCalls = 0;
let danglingToolIntentRetries = 0;
for (let round = 0; round < MAX_TOOL_ROUNDS; round += 1) {
const response = await params.client.responses.create({
model: params.model,
input,
temperature: params.temperature,
max_output_tokens: params.maxTokens,
tools: RESPONSES_CHAT_TOOLS,
tool_choice: "auto",
parallel_tool_calls: true,
// Tool loops pass response output items back as input; reasoning items need persistence.
store: true,
} as any);
rawResponses.push(response);
sawUsage = mergeResponsesUsage(usageAcc, response?.usage) || sawUsage;
const failureMessage = getResponseFailureMessage(response);
if (failureMessage) {
throw new Error(failureMessage);
}
const outputItems = getResponseOutputItems(response);
const normalizedToolCalls = normalizeResponsesToolCalls(outputItems, round);
if (!normalizedToolCalls.length) {
const text = extractResponsesText(response);
if (danglingToolIntentRetries < MAX_DANGLING_TOOL_INTENT_RETRIES && looksLikeDanglingToolIntent(text)) {
danglingToolIntentRetries += 1;
appendDanglingToolIntentCorrection(input, text);
continue;
}
return {
text,
usage: sawUsage ? usageAcc : undefined,
raw: { responses: rawResponses, toolCallsUsed: totalToolCalls, api: "responses" },
toolEvents,
};
}
totalToolCalls += normalizedToolCalls.length;
input.push(...outputItems);
for (const call of normalizedToolCalls) {
const { event, toolResult } = await executeToolCallAndBuildEvent(call, params);
toolEvents.push(event);
input.push({
type: "function_call_output",
call_id: call.id,
output: JSON.stringify(toolResult),
});
}
}
return {
text: "I reached the tool-call limit while gathering information. Please narrow the request and try again.",
usage: sawUsage ? usageAcc : undefined,
raw: { responses: rawResponses, toolCallsUsed: totalToolCalls, toolCallLimitReached: true, api: "responses" },
toolEvents,
};
}
export async function runToolAwareChatCompletions(params: ToolAwareCompletionParams): Promise<ToolAwareCompletionResult> {
const conversation: any[] = normalizeIncomingMessages(params.messages); const conversation: any[] = normalizeIncomingMessages(params.messages);
const rawResponses: unknown[] = []; const rawResponses: unknown[] = [];
const toolEvents: ToolExecutionEvent[] = []; const toolEvents: ToolExecutionEvent[] = [];
const usageAcc: Required<ToolAwareUsage> = { inputTokens: 0, outputTokens: 0, totalTokens: 0 }; const usageAcc: Required<ToolAwareUsage> = { inputTokens: 0, outputTokens: 0, totalTokens: 0 };
let sawUsage = false; let sawUsage = false;
let totalToolCalls = 0; let totalToolCalls = 0;
let danglingToolIntentRetries = 0;
for (let round = 0; round < MAX_TOOL_ROUNDS; round += 1) { for (let round = 0; round < MAX_TOOL_ROUNDS; round += 1) {
const completion = await params.client.chat.completions.create({ const completion = await params.client.chat.completions.create({
@@ -497,8 +1040,14 @@ export async function runToolAwareOpenAIChat(params: ToolAwareCompletionParams):
const toolCalls = Array.isArray(message.tool_calls) ? message.tool_calls : []; const toolCalls = Array.isArray(message.tool_calls) ? message.tool_calls : [];
if (!toolCalls.length) { if (!toolCalls.length) {
const text = typeof message.content === "string" ? message.content : "";
if (danglingToolIntentRetries < MAX_DANGLING_TOOL_INTENT_RETRIES && looksLikeDanglingToolIntent(text)) {
danglingToolIntentRetries += 1;
appendDanglingToolIntentCorrection(conversation, text);
continue;
}
return { return {
text: typeof message.content === "string" ? message.content : "", text,
usage: sawUsage ? usageAcc : undefined, usage: sawUsage ? usageAcc : undefined,
raw: { responses: rawResponses, toolCallsUsed: totalToolCalls }, raw: { responses: rawResponses, toolCallsUsed: totalToolCalls },
toolEvents, toolEvents,
@@ -546,6 +1095,132 @@ export async function runToolAwareOpenAIChat(params: ToolAwareCompletionParams):
export async function* runToolAwareOpenAIChatStream( export async function* runToolAwareOpenAIChatStream(
params: ToolAwareCompletionParams params: ToolAwareCompletionParams
): AsyncGenerator<ToolAwareStreamingEvent> {
const input: any[] = normalizeIncomingResponsesInput(params.messages);
const rawResponses: unknown[] = [];
const toolEvents: ToolExecutionEvent[] = [];
const usageAcc: Required<ToolAwareUsage> = { inputTokens: 0, outputTokens: 0, totalTokens: 0 };
let sawUsage = false;
let totalToolCalls = 0;
let danglingToolIntentRetries = 0;
for (let round = 0; round < MAX_TOOL_ROUNDS; round += 1) {
const stream = await params.client.responses.create({
model: params.model,
input,
temperature: params.temperature,
max_output_tokens: params.maxTokens,
tools: RESPONSES_CHAT_TOOLS,
tool_choice: "auto",
parallel_tool_calls: true,
// Tool loops pass response output items back as input; reasoning items need persistence.
store: true,
stream: true,
} as any);
let roundText = "";
let streamedRoundText = "";
let roundHasToolCalls = false;
let canStreamRoundText = false;
let completedResponse: any | null = null;
const completedOutputItems: any[] = [];
for await (const event of stream as any as AsyncIterable<any>) {
rawResponses.push(event);
if (event?.type === "response.output_text.delta" && typeof event.delta === "string") {
roundText += event.delta;
if (canStreamRoundText && !roundHasToolCalls && event.delta.length) {
streamedRoundText += event.delta;
yield { type: "delta", text: event.delta };
}
} else if (event?.type === "response.output_item.added" && event.item) {
if (event.item.type === "function_call") {
roundHasToolCalls = true;
canStreamRoundText = false;
} else if (event.item.type === "message" && !roundHasToolCalls) {
canStreamRoundText = true;
}
} else if (event?.type === "response.output_item.done" && event.item) {
completedOutputItems[event.output_index ?? completedOutputItems.length] = event.item;
if (event.item.type === "function_call") {
roundHasToolCalls = true;
canStreamRoundText = false;
}
} else if (event?.type === "response.completed") {
completedResponse = event.response;
sawUsage = mergeResponsesUsage(usageAcc, event.response?.usage) || sawUsage;
} else if (event?.type === "response.failed" || event?.type === "response.incomplete") {
completedResponse = event.response;
sawUsage = mergeResponsesUsage(usageAcc, event.response?.usage) || sawUsage;
} else if (event?.type === "error") {
throw new Error(event.message ?? "OpenAI Responses stream failed.");
}
}
const failureMessage = getResponseFailureMessage(completedResponse);
if (failureMessage) {
throw new Error(failureMessage);
}
const outputItems = getResponseOutputItems(completedResponse);
const responseOutputItems = outputItems.length ? outputItems : completedOutputItems.filter(Boolean);
const normalizedToolCalls = normalizeResponsesToolCalls(responseOutputItems, round);
if (!normalizedToolCalls.length) {
const text = extractResponsesText(completedResponse, roundText);
if (
!streamedRoundText &&
danglingToolIntentRetries < MAX_DANGLING_TOOL_INTENT_RETRIES &&
looksLikeDanglingToolIntent(text)
) {
danglingToolIntentRetries += 1;
appendDanglingToolIntentCorrection(input, text);
continue;
}
const unstreamedText = getUnstreamedText(text, streamedRoundText);
if (unstreamedText) {
yield { type: "delta", text: unstreamedText };
}
yield {
type: "done",
result: {
text,
usage: sawUsage ? usageAcc : undefined,
raw: { streamed: true, responses: rawResponses, toolCallsUsed: totalToolCalls, api: "responses" },
toolEvents,
},
};
return;
}
totalToolCalls += normalizedToolCalls.length;
input.push(...responseOutputItems);
for (const call of normalizedToolCalls) {
const { event, toolResult } = await executeToolCallAndBuildEvent(call, params);
toolEvents.push(event);
yield { type: "tool_call", event };
input.push({
type: "function_call_output",
call_id: call.id,
output: JSON.stringify(toolResult),
});
}
}
yield {
type: "done",
result: {
text: "I reached the tool-call limit while gathering information. Please narrow the request and try again.",
usage: sawUsage ? usageAcc : undefined,
raw: { streamed: true, responses: rawResponses, toolCallsUsed: totalToolCalls, toolCallLimitReached: true, api: "responses" },
toolEvents,
},
};
}
export async function* runToolAwareChatCompletionsStream(
params: ToolAwareCompletionParams
): AsyncGenerator<ToolAwareStreamingEvent> { ): AsyncGenerator<ToolAwareStreamingEvent> {
const conversation: any[] = normalizeIncomingMessages(params.messages); const conversation: any[] = normalizeIncomingMessages(params.messages);
const rawResponses: unknown[] = []; const rawResponses: unknown[] = [];
@@ -553,6 +1228,7 @@ export async function* runToolAwareOpenAIChatStream(
const usageAcc: Required<ToolAwareUsage> = { inputTokens: 0, outputTokens: 0, totalTokens: 0 }; const usageAcc: Required<ToolAwareUsage> = { inputTokens: 0, outputTokens: 0, totalTokens: 0 };
let sawUsage = false; let sawUsage = false;
let totalToolCalls = 0; let totalToolCalls = 0;
let danglingToolIntentRetries = 0;
for (let round = 0; round < MAX_TOOL_ROUNDS; round += 1) { for (let round = 0; round < MAX_TOOL_ROUNDS; round += 1) {
const stream = await params.client.chat.completions.create({ const stream = await params.client.chat.completions.create({
@@ -567,6 +1243,8 @@ export async function* runToolAwareOpenAIChatStream(
} as any); } as any);
let roundText = ""; let roundText = "";
let streamedRoundText = "";
let roundHasToolCalls = false;
const roundToolCalls = new Map<number, { id?: string; name?: string; arguments: string }>(); const roundToolCalls = new Map<number, { id?: string; name?: string; arguments: string }>();
for await (const chunk of stream as any as AsyncIterable<any>) { for await (const chunk of stream as any as AsyncIterable<any>) {
@@ -577,12 +1255,16 @@ export async function* runToolAwareOpenAIChatStream(
const deltaText = choice?.delta?.content ?? ""; const deltaText = choice?.delta?.content ?? "";
if (typeof deltaText === "string" && deltaText.length) { if (typeof deltaText === "string" && deltaText.length) {
roundText += deltaText; roundText += deltaText;
if (roundToolCalls.size === 0) { if (!roundHasToolCalls) {
streamedRoundText += deltaText;
yield { type: "delta", text: deltaText }; yield { type: "delta", text: deltaText };
} }
} }
const deltaToolCalls = Array.isArray(choice?.delta?.tool_calls) ? choice.delta.tool_calls : []; const deltaToolCalls = Array.isArray(choice?.delta?.tool_calls) ? choice.delta.tool_calls : [];
if (deltaToolCalls.length) {
roundHasToolCalls = true;
}
for (const toolCall of deltaToolCalls) { for (const toolCall of deltaToolCalls) {
const idx = typeof toolCall?.index === "number" ? toolCall.index : 0; const idx = typeof toolCall?.index === "number" ? toolCall.index : 0;
const entry = roundToolCalls.get(idx) ?? { arguments: "" }; const entry = roundToolCalls.get(idx) ?? { arguments: "" };
@@ -608,6 +1290,19 @@ export async function* runToolAwareOpenAIChatStream(
})); }));
if (!normalizedToolCalls.length) { if (!normalizedToolCalls.length) {
if (
!streamedRoundText &&
danglingToolIntentRetries < MAX_DANGLING_TOOL_INTENT_RETRIES &&
looksLikeDanglingToolIntent(roundText)
) {
danglingToolIntentRetries += 1;
appendDanglingToolIntentCorrection(conversation, roundText);
continue;
}
const unstreamedText = getUnstreamedText(roundText, streamedRoundText);
if (unstreamedText) {
yield { type: "delta", text: unstreamedText };
}
yield { yield {
type: "done", type: "done",
result: { result: {
@@ -621,7 +1316,7 @@ export async function* runToolAwareOpenAIChatStream(
} }
totalToolCalls += normalizedToolCalls.length; totalToolCalls += normalizedToolCalls.length;
conversation.push({ const assistantToolCallMessage: any = {
role: "assistant", role: "assistant",
tool_calls: normalizedToolCalls.map((call) => ({ tool_calls: normalizedToolCalls.map((call) => ({
id: call.id, id: call.id,
@@ -631,7 +1326,11 @@ export async function* runToolAwareOpenAIChatStream(
arguments: call.arguments, arguments: call.arguments,
}, },
})), })),
}); };
if (roundText) {
assistantToolCallMessage.content = roundText;
}
conversation.push(assistantToolCallMessage);
for (const call of normalizedToolCalls) { for (const call of normalizedToolCalls) {
const { event, toolResult } = await executeToolCallAndBuildEvent(call, params); const { event, toolResult } = await executeToolCallAndBuildEvent(call, params);

View File

@@ -0,0 +1,268 @@
import type { ChatAttachment, ChatImageAttachment, ChatMessage, ChatTextAttachment } from "./types.js";
function escapeAttribute(value: string) {
return value.replace(/"/g, "&quot;");
}
function getImageAttachments(message: ChatMessage) {
return (message.attachments ?? []).filter((attachment): attachment is ChatImageAttachment => attachment.kind === "image");
}
function getTextAttachments(message: ChatMessage) {
return (message.attachments ?? []).filter((attachment): attachment is ChatTextAttachment => attachment.kind === "text");
}
function buildImageSummaryText(attachments: ChatImageAttachment[]) {
if (!attachments.length) return null;
const label = attachments.length === 1 ? "Attached image" : "Attached images";
return `${label}: ${attachments.map((attachment) => attachment.filename).join(", ")}.`;
}
function buildTextAttachmentPrompt(attachment: ChatTextAttachment) {
const truncationNote = attachment.truncated ? ' truncated="true"' : "";
return [
`Attached text file: ${attachment.filename}${attachment.truncated ? " (content truncated)" : ""}`,
`<attached_file filename="${escapeAttribute(attachment.filename)}" mime_type="${escapeAttribute(attachment.mimeType)}"${truncationNote}>`,
attachment.text,
"</attached_file>",
].join("\n");
}
function toOpenAIContent(message: ChatMessage) {
const imageAttachments = getImageAttachments(message);
const textAttachments = getTextAttachments(message);
if (!imageAttachments.length && !textAttachments.length) {
return message.content;
}
const parts: Array<Record<string, unknown>> = [];
for (const attachment of imageAttachments) {
parts.push({
type: "image_url",
image_url: {
url: attachment.dataUrl,
detail: "auto",
},
});
}
const imageSummary = buildImageSummaryText(imageAttachments);
if (imageSummary) {
parts.push({ type: "text", text: imageSummary });
}
for (const attachment of textAttachments) {
parts.push({ type: "text", text: buildTextAttachmentPrompt(attachment) });
}
if (message.content.trim()) {
parts.push({ type: "text", text: message.content });
}
if (parts.length === 1 && parts[0]?.type === "text" && typeof parts[0].text === "string") {
return parts[0].text;
}
return parts;
}
function toOpenAIResponsesContent(message: ChatMessage) {
const imageAttachments = getImageAttachments(message);
const textAttachments = getTextAttachments(message);
if (!imageAttachments.length && !textAttachments.length) {
return message.content;
}
const parts: Array<Record<string, unknown>> = [];
for (const attachment of imageAttachments) {
parts.push({
type: "input_image",
image_url: attachment.dataUrl,
detail: "auto",
});
}
const imageSummary = buildImageSummaryText(imageAttachments);
if (imageSummary) {
parts.push({ type: "input_text", text: imageSummary });
}
for (const attachment of textAttachments) {
parts.push({ type: "input_text", text: buildTextAttachmentPrompt(attachment) });
}
if (message.content.trim()) {
parts.push({ type: "input_text", text: message.content });
}
if (parts.length === 1 && parts[0]?.type === "input_text" && typeof parts[0].text === "string") {
return parts[0].text;
}
return parts;
}
function parseImageDataUrl(attachment: ChatImageAttachment) {
const match = attachment.dataUrl.match(/^data:(image\/(?:png|jpeg));base64,([a-z0-9+/=\s]+)$/i);
if (!match) {
throw new Error(`Invalid image attachment data URL for '${attachment.filename}'.`);
}
const mediaType = match[1].toLowerCase();
if (mediaType !== attachment.mimeType) {
throw new Error(`Image attachment MIME type mismatch for '${attachment.filename}'.`);
}
return {
mediaType,
data: match[2].replace(/\s+/g, ""),
};
}
function toAnthropicContent(message: ChatMessage) {
const imageAttachments = getImageAttachments(message);
const textAttachments = getTextAttachments(message);
if (!imageAttachments.length && !textAttachments.length) {
return message.content;
}
const blocks: Array<Record<string, unknown>> = [];
for (const attachment of imageAttachments) {
const source = parseImageDataUrl(attachment);
blocks.push({
type: "image",
source: {
type: "base64",
media_type: source.mediaType,
data: source.data,
},
});
}
const imageSummary = buildImageSummaryText(imageAttachments);
if (imageSummary) {
blocks.push({ type: "text", text: imageSummary });
}
for (const attachment of textAttachments) {
blocks.push({ type: "text", text: buildTextAttachmentPrompt(attachment) });
}
if (message.content.trim()) {
blocks.push({ type: "text", text: message.content });
}
if (blocks.length === 1 && blocks[0]?.type === "text" && typeof blocks[0].text === "string") {
return blocks[0].text;
}
return blocks;
}
export function buildOpenAIConversationMessage(message: ChatMessage) {
if (message.role === "tool") {
const name = message.name?.trim() || "tool";
return {
role: "user",
content: `Tool output (${name}):\n${message.content}`,
};
}
const out: Record<string, unknown> = {
role: message.role,
content: toOpenAIContent(message),
};
if (message.name && (message.role === "assistant" || message.role === "user")) {
out.name = message.name;
}
return out;
}
export function buildOpenAIResponsesInputMessage(message: ChatMessage) {
if (message.role === "tool") {
const name = message.name?.trim() || "tool";
return {
role: "user",
content: `Tool output (${name}):\n${message.content}`,
};
}
return {
role: message.role,
content: toOpenAIResponsesContent(message),
};
}
const ANTHROPIC_NO_SERVER_TOOLS_PROMPT =
"This Anthropic backend path does not have server-managed tool calls. Do not claim to run shell commands, Codex tasks, web searches, or fetch URLs. If the user asks for tool execution, explain that they should switch to OpenAI or xAI in this app for tool-enabled chat.";
export function getAnthropicSystemPrompt(messages: ChatMessage[]) {
return [ANTHROPIC_NO_SERVER_TOOLS_PROMPT, messages.find((message) => message.role === "system")?.content]
.filter(Boolean)
.join("\n\n");
}
export function buildAnthropicConversationMessage(message: ChatMessage) {
if (message.role === "system") {
throw new Error("System messages must be handled separately for Anthropic.");
}
if (message.role === "tool") {
const name = message.name?.trim() || "tool";
return {
role: "user",
content: `Tool output (${name}):\n${message.content}`,
};
}
return {
role: message.role === "assistant" ? "assistant" : "user",
content: toAnthropicContent(message),
};
}
export function buildComparableAttachments(input: unknown): ChatAttachment[] {
if (!Array.isArray(input)) return [];
const attachments: ChatAttachment[] = [];
for (const entry of input) {
if (!entry || typeof entry !== "object" || Array.isArray(entry)) continue;
const record = entry as Record<string, unknown>;
const kind = record.kind;
const id = typeof record.id === "string" ? record.id : "";
const filename = typeof record.filename === "string" ? record.filename : "";
const mimeType = typeof record.mimeType === "string" ? record.mimeType : "";
const sizeBytes = typeof record.sizeBytes === "number" ? record.sizeBytes : 0;
if (kind === "image" && typeof record.dataUrl === "string") {
attachments.push({
kind,
id,
filename,
mimeType: mimeType === "image/png" ? "image/png" : "image/jpeg",
sizeBytes,
dataUrl: record.dataUrl,
});
continue;
}
if (kind === "text" && typeof record.text === "string") {
attachments.push({
kind,
id,
filename,
mimeType,
sizeBytes,
text: record.text,
truncated: record.truncated === true,
});
}
}
return attachments;
}

View File

@@ -23,6 +23,15 @@ function uniqSorted(models: string[]) {
return [...new Set(models.map((value) => value.trim()).filter(Boolean))].sort((a, b) => a.localeCompare(b)); return [...new Set(models.map((value) => value.trim()).filter(Boolean))].sort((a, b) => a.localeCompare(b));
} }
function isLikelyOpenAIResponsesModel(model: string) {
const id = model.toLowerCase();
if (id.includes("embedding") || id.includes("moderation")) return false;
if (id.includes("audio") || id.includes("realtime") || id.includes("transcribe") || id.includes("tts")) return false;
if (id.includes("image") || id.includes("dall-e") || id.includes("sora")) return false;
if (id.includes("search") || id.includes("computer-use")) return false;
return /^(gpt-|o\d|chatgpt-)/.test(id);
}
async function withTimeout<T>(promise: Promise<T>, timeoutMs: number, label: string) { async function withTimeout<T>(promise: Promise<T>, timeoutMs: number, label: string) {
let timeoutId: NodeJS.Timeout | null = null; let timeoutId: NodeJS.Timeout | null = null;
try { try {
@@ -42,7 +51,7 @@ async function withTimeout<T>(promise: Promise<T>, timeoutMs: number, label: str
async function fetchProviderModels(provider: Provider) { async function fetchProviderModels(provider: Provider) {
if (provider === "openai") { if (provider === "openai") {
const page = await openaiClient().models.list(); const page = await openaiClient().models.list();
return uniqSorted(page.data.map((model) => model.id)); return uniqSorted(page.data.map((model) => model.id).filter(isLikelyOpenAIResponsesModel));
} }
if (provider === "anthropic") { if (provider === "anthropic") {

View File

@@ -1,7 +1,8 @@
import { performance } from "node:perf_hooks"; import { performance } from "node:perf_hooks";
import { prisma } from "../db.js"; import { prisma } from "../db.js";
import { anthropicClient, openaiClient, xaiClient } from "./providers.js"; import { anthropicClient, openaiClient, xaiClient } from "./providers.js";
import { buildToolLogMessageData, runToolAwareOpenAIChat } from "./chat-tools.js"; import { buildToolLogMessageData, runToolAwareChatCompletions, runToolAwareOpenAIChat } from "./chat-tools.js";
import { buildAnthropicConversationMessage, getAnthropicSystemPrompt } from "./message-content.js";
import type { MultiplexRequest, MultiplexResponse, Provider } from "./types.js"; import type { MultiplexRequest, MultiplexResponse, Provider } from "./types.js";
function asProviderEnum(p: Provider) { function asProviderEnum(p: Provider) {
@@ -47,8 +48,8 @@ export async function runMultiplex(req: MultiplexRequest): Promise<MultiplexResp
let raw: unknown; let raw: unknown;
let toolMessages: ReturnType<typeof buildToolLogMessageData>[] = []; let toolMessages: ReturnType<typeof buildToolLogMessageData>[] = [];
if (req.provider === "openai" || req.provider === "xai") { if (req.provider === "openai") {
const client = req.provider === "openai" ? openaiClient() : xaiClient(); const client = openaiClient();
const r = await runToolAwareOpenAIChat({ const r = await runToolAwareOpenAIChat({
client, client,
model: req.model, model: req.model,
@@ -65,14 +66,29 @@ export async function runMultiplex(req: MultiplexRequest): Promise<MultiplexResp
outText = r.text; outText = r.text;
usage = r.usage; usage = r.usage;
toolMessages = r.toolEvents.map((event) => buildToolLogMessageData(call.chatId, event)); toolMessages = r.toolEvents.map((event) => buildToolLogMessageData(call.chatId, event));
} else if (req.provider === "xai") {
const client = xaiClient();
const r = await runToolAwareChatCompletions({
client,
model: req.model,
messages: req.messages,
temperature: req.temperature,
maxTokens: req.maxTokens,
logContext: {
provider: req.provider,
model: req.model,
chatId,
},
});
raw = r.raw;
outText = r.text;
usage = r.usage;
toolMessages = r.toolEvents.map((event) => buildToolLogMessageData(call.chatId, event));
} else if (req.provider === "anthropic") { } else if (req.provider === "anthropic") {
const client = anthropicClient(); const client = anthropicClient();
// Anthropic splits system prompt. We'll convert first system message into system string. const system = getAnthropicSystemPrompt(req.messages);
const system = req.messages.find((m) => m.role === "system")?.content; const msgs = req.messages.filter((message) => message.role !== "system").map((message) => buildAnthropicConversationMessage(message));
const msgs = req.messages
.filter((m) => m.role !== "system")
.map((m) => ({ role: m.role === "assistant" ? "assistant" : "user", content: m.content }));
const r = await client.messages.create({ const r = await client.messages.create({
model: req.model, model: req.model,

View File

@@ -1,7 +1,13 @@
import { performance } from "node:perf_hooks"; import { performance } from "node:perf_hooks";
import { prisma } from "../db.js"; import { prisma } from "../db.js";
import { anthropicClient, openaiClient, xaiClient } from "./providers.js"; import { anthropicClient, openaiClient, xaiClient } from "./providers.js";
import { buildToolLogMessageData, runToolAwareOpenAIChatStream, type ToolExecutionEvent } from "./chat-tools.js"; import {
buildToolLogMessageData,
runToolAwareChatCompletionsStream,
runToolAwareOpenAIChatStream,
type ToolExecutionEvent,
} from "./chat-tools.js";
import { buildAnthropicConversationMessage, getAnthropicSystemPrompt } from "./message-content.js";
import type { MultiplexRequest, Provider } from "./types.js"; import type { MultiplexRequest, Provider } from "./types.js";
export type StreamEvent = export type StreamEvent =
@@ -52,23 +58,37 @@ export async function* runMultiplexStream(req: MultiplexRequest): AsyncGenerator
let text = ""; let text = "";
let usage: StreamEvent extends any ? any : never; let usage: StreamEvent extends any ? any : never;
let raw: unknown = { streamed: true }; let raw: unknown = { streamed: true };
let toolMessages: ReturnType<typeof buildToolLogMessageData>[] = [];
try { try {
if (req.provider === "openai" || req.provider === "xai") { if (req.provider === "openai" || req.provider === "xai") {
const client = req.provider === "openai" ? openaiClient() : xaiClient(); const client = req.provider === "openai" ? openaiClient() : xaiClient();
for await (const ev of runToolAwareOpenAIChatStream({ const streamEvents =
client, req.provider === "openai"
model: req.model, ? runToolAwareOpenAIChatStream({
messages: req.messages, client,
temperature: req.temperature, model: req.model,
maxTokens: req.maxTokens, messages: req.messages,
logContext: { temperature: req.temperature,
provider: req.provider, maxTokens: req.maxTokens,
model: req.model, logContext: {
chatId, provider: req.provider,
}, model: req.model,
})) { chatId,
},
})
: runToolAwareChatCompletionsStream({
client,
model: req.model,
messages: req.messages,
temperature: req.temperature,
maxTokens: req.maxTokens,
logContext: {
provider: req.provider,
model: req.model,
chatId,
},
});
for await (const ev of streamEvents) {
if (ev.type === "delta") { if (ev.type === "delta") {
text += ev.text; text += ev.text;
yield { type: "delta", text: ev.text }; yield { type: "delta", text: ev.text };
@@ -76,7 +96,16 @@ export async function* runMultiplexStream(req: MultiplexRequest): AsyncGenerator
} }
if (ev.type === "tool_call") { if (ev.type === "tool_call") {
toolMessages.push(buildToolLogMessageData(chatId, ev.event)); const toolMessage = buildToolLogMessageData(chatId, ev.event);
await prisma.message.create({
data: {
chatId: toolMessage.chatId,
role: toolMessage.role as any,
content: toolMessage.content,
name: toolMessage.name,
metadata: toolMessage.metadata as any,
},
});
yield { type: "tool_call", event: ev.event }; yield { type: "tool_call", event: ev.event };
continue; continue;
} }
@@ -88,10 +117,8 @@ export async function* runMultiplexStream(req: MultiplexRequest): AsyncGenerator
} else if (req.provider === "anthropic") { } else if (req.provider === "anthropic") {
const client = anthropicClient(); const client = anthropicClient();
const system = req.messages.find((m) => m.role === "system")?.content; const system = getAnthropicSystemPrompt(req.messages);
const msgs = req.messages const msgs = req.messages.filter((message) => message.role !== "system").map((message) => buildAnthropicConversationMessage(message));
.filter((m) => m.role !== "system")
.map((m) => ({ role: m.role === "assistant" ? "assistant" : "user", content: m.content }));
const stream = await client.messages.create({ const stream = await client.messages.create({
model: req.model, model: req.model,
@@ -130,17 +157,6 @@ export async function* runMultiplexStream(req: MultiplexRequest): AsyncGenerator
const latencyMs = Math.round(performance.now() - t0); const latencyMs = Math.round(performance.now() - t0);
await prisma.$transaction(async (tx) => { await prisma.$transaction(async (tx) => {
if (toolMessages.length) {
await tx.message.createMany({
data: toolMessages.map((message) => ({
chatId: message.chatId,
role: message.role as any,
content: message.content,
name: message.name,
metadata: message.metadata as any,
})),
});
}
await tx.message.create({ await tx.message.create({
data: { chatId, role: "assistant" as any, content: text }, data: { chatId, role: "assistant" as any, content: text },
}); });

View File

@@ -1,9 +1,31 @@
export type Provider = "openai" | "anthropic" | "xai"; export type Provider = "openai" | "anthropic" | "xai";
export type ChatImageAttachment = {
kind: "image";
id: string;
filename: string;
mimeType: "image/png" | "image/jpeg";
sizeBytes: number;
dataUrl: string;
};
export type ChatTextAttachment = {
kind: "text";
id: string;
filename: string;
mimeType: string;
sizeBytes: number;
text: string;
truncated?: boolean;
};
export type ChatAttachment = ChatImageAttachment | ChatTextAttachment;
export type ChatMessage = { export type ChatMessage = {
role: "system" | "user" | "assistant" | "tool"; role: "system" | "user" | "assistant" | "tool";
content: string; content: string;
name?: string; name?: string;
attachments?: ChatAttachment[];
}; };
export type MultiplexRequest = { export type MultiplexRequest = {

View File

@@ -4,23 +4,33 @@ import type { FastifyInstance } from "fastify";
import { prisma } from "./db.js"; import { prisma } from "./db.js";
import { requireAdmin } from "./auth.js"; import { requireAdmin } from "./auth.js";
import { env } from "./env.js"; import { env } from "./env.js";
import { buildComparableAttachments } from "./llm/message-content.js";
import { runMultiplex } from "./llm/multiplexer.js"; import { runMultiplex } from "./llm/multiplexer.js";
import { runMultiplexStream } from "./llm/streaming.js"; import { runMultiplexStream } from "./llm/streaming.js";
import { getModelCatalogSnapshot } from "./llm/model-catalog.js"; import { getModelCatalogSnapshot } from "./llm/model-catalog.js";
import { openaiClient } from "./llm/providers.js"; import { openaiClient } from "./llm/providers.js";
import { exaClient } from "./search/exa.js"; import { exaClient } from "./search/exa.js";
import type { ChatAttachment } from "./llm/types.js";
type IncomingChatMessage = { type IncomingChatMessage = {
role: "system" | "user" | "assistant" | "tool"; role: "system" | "user" | "assistant" | "tool";
content: string; content: string;
name?: string; name?: string;
attachments?: ChatAttachment[];
}; };
function sameMessage( function sameMessage(
a: { role: string; content: string; name?: string | null }, a: { role: string; content: string; name?: string | null; metadata?: unknown },
b: { role: string; content: string; name?: string | null } b: { role: string; content: string; name?: string | null; attachments?: ChatAttachment[] }
) { ) {
return a.role === b.role && a.content === b.content && (a.name ?? null) === (b.name ?? null); const existingAttachments = JSON.stringify(buildComparableAttachments((a.metadata as Record<string, unknown> | null)?.attachments ?? null));
const incomingAttachments = JSON.stringify(b.attachments ?? []);
return (
a.role === b.role &&
a.content === b.content &&
(a.name ?? null) === (b.name ?? null) &&
existingAttachments === incomingAttachments
);
} }
function isToolCallLogMetadata(value: unknown) { function isToolCallLogMetadata(value: unknown) {
@@ -60,10 +70,67 @@ async function storeNonAssistantMessages(chatId: string, messages: IncomingChatM
role: m.role as any, role: m.role as any,
content: m.content, content: m.content,
name: m.name, name: m.name,
metadata: m.attachments?.length ? ({ attachments: m.attachments } as any) : undefined,
})), })),
}); });
} }
const MAX_CHAT_ATTACHMENTS = 8;
const MAX_IMAGE_ATTACHMENT_BYTES = 6 * 1024 * 1024;
const MAX_TEXT_ATTACHMENT_CHARS = 200_000;
const MAX_IMAGE_DATA_URL_CHARS = 8_500_000;
const ChatAttachmentSchema = z.discriminatedUnion("kind", [
z.object({
kind: z.literal("image"),
id: z.string().trim().min(1).max(128),
filename: z.string().trim().min(1).max(255),
mimeType: z.enum(["image/png", "image/jpeg"]),
sizeBytes: z.number().int().positive().max(MAX_IMAGE_ATTACHMENT_BYTES),
dataUrl: z
.string()
.max(MAX_IMAGE_DATA_URL_CHARS)
.regex(/^data:image\/(?:png|jpeg);base64,[a-z0-9+/=\s]+$/i, "Invalid image data URL"),
}),
z.object({
kind: z.literal("text"),
id: z.string().trim().min(1).max(128),
filename: z.string().trim().min(1).max(255),
mimeType: z.string().trim().min(1).max(127),
sizeBytes: z.number().int().positive().max(8 * 1024 * 1024),
text: z.string().max(MAX_TEXT_ATTACHMENT_CHARS),
truncated: z.boolean().optional(),
}),
]);
const CompletionMessageSchema = z
.object({
role: z.enum(["system", "user", "assistant", "tool"]),
content: z.string(),
name: z.string().optional(),
attachments: z.array(ChatAttachmentSchema).max(MAX_CHAT_ATTACHMENTS).optional(),
})
.superRefine((value, ctx) => {
if (value.attachments?.length && value.role === "tool") {
ctx.addIssue({
code: z.ZodIssueCode.custom,
message: "Tool messages cannot include attachments.",
path: ["attachments"],
});
}
});
function mergeAttachmentsIntoMetadata(metadata: unknown, attachments?: ChatAttachment[]) {
if (!attachments?.length) return metadata as any;
if (!metadata || typeof metadata !== "object" || Array.isArray(metadata)) {
return { attachments };
}
return {
...(metadata as Record<string, unknown>),
attachments,
};
}
const SearchRunBody = z.object({ const SearchRunBody = z.object({
query: z.string().trim().min(1).optional(), query: z.string().trim().min(1).optional(),
title: z.string().trim().min(1).optional(), title: z.string().trim().min(1).optional(),
@@ -108,6 +175,13 @@ function mapSearchResultPreview(result: any, index: number) {
}; };
} }
function truncateContextPart(value: string | null | undefined, maxLength: number) {
const trimmed = value?.trim();
if (!trimmed) return null;
if (trimmed.length <= maxLength) return trimmed;
return `${trimmed.slice(0, maxLength - 1).trimEnd()}...`;
}
function parseAnswerText(answerResponse: any) { function parseAnswerText(answerResponse: any) {
if (typeof answerResponse?.answer === "string") return answerResponse.answer; if (typeof answerResponse?.answer === "string") return answerResponse.answer;
if (answerResponse?.answer) return JSON.stringify(answerResponse.answer, null, 2); if (answerResponse?.answer) return JSON.stringify(answerResponse.answer, null, 2);
@@ -129,16 +203,15 @@ async function generateChatTitle(content: string) {
const systemPrompt = const systemPrompt =
"You create short chat titles. Return exactly one line, maximum 4 words, no quotes, no trailing punctuation."; "You create short chat titles. Return exactly one line, maximum 4 words, no quotes, no trailing punctuation.";
const userPrompt = `User request:\n${content}\n\nTitle:`; const userPrompt = `User request:\n${content}\n\nTitle:`;
const response = await openaiClient().chat.completions.create({ const response = await openaiClient().responses.create({
model: "gpt-4.1-mini", model: "gpt-4.1-mini",
temperature: 0, temperature: 0,
max_completion_tokens: 20, max_output_tokens: 20,
messages: [ instructions: systemPrompt,
{ role: "system", content: systemPrompt }, input: userPrompt,
{ role: "user", content: userPrompt }, store: false,
],
}); });
return response.choices?.[0]?.message?.content ?? ""; return response.output_text ?? "";
} }
function normalizeUrlForMatch(input: string | null | undefined) { function normalizeUrlForMatch(input: string | null | undefined) {
@@ -153,6 +226,57 @@ function normalizeUrlForMatch(input: string | null | undefined) {
} }
} }
function buildSearchChatContext(search: any) {
const query = truncateContextPart(search.query, 500) ?? truncateContextPart(search.title, 500) ?? "Untitled search";
const lines: string[] = [
"You are Sybil. The user started this chat from a saved web search. Use the search answer and result context below when answering follow-up questions. If the context is insufficient, say so and use available tools when appropriate.",
"",
`Search query: ${query}`,
];
const answer = truncateContextPart(search.answerText, 6000);
if (answer) {
lines.push("", "Search answer:", answer);
}
if (Array.isArray(search.answerCitations) && search.answerCitations.length) {
lines.push("", "Answer citations:");
for (const [index, citation] of search.answerCitations.slice(0, 8).entries()) {
const title = truncateContextPart(citation?.title, 160);
const url = truncateContextPart(citation?.url ?? citation?.id, 400);
if (title || url) {
lines.push(`${index + 1}. ${[title, url].filter(Boolean).join(" - ")}`);
}
}
}
if (Array.isArray(search.results) && search.results.length) {
lines.push("", "Search results:");
for (const result of search.results.slice(0, 10)) {
const title = truncateContextPart(result.title, 180) ?? result.url;
const url = truncateContextPart(result.url, 500);
const published = truncateContextPart(result.publishedDate, 80);
const author = truncateContextPart(result.author, 120);
const text = truncateContextPart(result.text, 1000);
const highlights = Array.isArray(result.highlights)
? result.highlights
.map((highlight: unknown) => truncateContextPart(typeof highlight === "string" ? highlight : null, 360))
.filter(Boolean)
: [];
lines.push(`${result.rank + 1}. ${title}`);
if (url) lines.push(` URL: ${url}`);
if (published || author) lines.push(` Source detail: ${[published, author].filter(Boolean).join(" - ")}`);
if (text) lines.push(` Text: ${text}`);
for (const highlight of highlights.slice(0, 2)) {
lines.push(` Highlight: ${highlight}`);
}
}
}
return lines.join("\n");
}
function buildSseHeaders(originHeader: string | undefined) { function buildSseHeaders(originHeader: string | undefined) {
const origin = originHeader && originHeader !== "null" ? originHeader : "*"; const origin = originHeader && originHeader !== "null" ? originHeader : "*";
const headers: Record<string, string> = { const headers: Record<string, string> = {
@@ -370,6 +494,54 @@ export async function registerRoutes(app: FastifyInstance) {
return { search }; return { search };
}); });
app.post("/v1/searches/:searchId/chat", async (req) => {
requireAdmin(req);
const Params = z.object({ searchId: z.string() });
const Body = z.object({ title: z.string().optional() });
const { searchId } = Params.parse(req.params);
const body = Body.parse(req.body ?? {});
const search = await prisma.search.findUnique({
where: { id: searchId },
include: { results: { orderBy: { rank: "asc" } } },
});
if (!search) return app.httpErrors.notFound("search not found");
const fallbackTitle = search.query?.trim() || search.title?.trim() || "Search results";
const title = body.title?.trim() || `Search: ${fallbackTitle.slice(0, 72)}`;
const context = buildSearchChatContext(search);
const chat = await prisma.chat.create({
data: {
title,
messages: {
create: {
role: "system" as any,
content: context,
metadata: {
kind: "search_context",
searchId: search.id,
query: search.query,
resultCount: search.results.length,
},
},
},
},
select: {
id: true,
title: true,
createdAt: true,
updatedAt: true,
initiatedProvider: true,
initiatedModel: true,
lastUsedProvider: true,
lastUsedModel: true,
},
});
return { chat };
});
app.post("/v1/searches/:searchId/run", async (req) => { app.post("/v1/searches/:searchId/run", async (req) => {
requireAdmin(req); requireAdmin(req);
const Params = z.object({ searchId: z.string() }); const Params = z.object({ searchId: z.string() });
@@ -662,6 +834,7 @@ export async function registerRoutes(app: FastifyInstance) {
content: z.string(), content: z.string(),
name: z.string().optional(), name: z.string().optional(),
metadata: z.unknown().optional(), metadata: z.unknown().optional(),
attachments: z.array(ChatAttachmentSchema).max(MAX_CHAT_ATTACHMENTS).optional(),
}); });
const { chatId } = Params.parse(req.params); const { chatId } = Params.parse(req.params);
@@ -673,7 +846,7 @@ export async function registerRoutes(app: FastifyInstance) {
role: body.role as any, role: body.role as any,
content: body.content, content: body.content,
name: body.name, name: body.name,
metadata: body.metadata as any, metadata: mergeAttachmentsIntoMetadata(body.metadata, body.attachments) as any,
}, },
}); });
@@ -688,13 +861,7 @@ export async function registerRoutes(app: FastifyInstance) {
chatId: z.string().optional(), chatId: z.string().optional(),
provider: z.enum(["openai", "anthropic", "xai"]), provider: z.enum(["openai", "anthropic", "xai"]),
model: z.string().min(1), model: z.string().min(1),
messages: z.array( messages: z.array(CompletionMessageSchema),
z.object({
role: z.enum(["system", "user", "assistant", "tool"]),
content: z.string(),
name: z.string().optional(),
})
),
temperature: z.number().min(0).max(2).optional(), temperature: z.number().min(0).max(2).optional(),
maxTokens: z.number().int().positive().optional(), maxTokens: z.number().int().positive().optional(),
}); });
@@ -728,13 +895,7 @@ export async function registerRoutes(app: FastifyInstance) {
chatId: z.string().optional(), chatId: z.string().optional(),
provider: z.enum(["openai", "anthropic", "xai"]), provider: z.enum(["openai", "anthropic", "xai"]),
model: z.string().min(1), model: z.string().min(1),
messages: z.array( messages: z.array(CompletionMessageSchema),
z.object({
role: z.enum(["system", "user", "assistant", "tool"]),
content: z.string(),
name: z.string().optional(),
})
),
temperature: z.number().min(0).max(2).optional(), temperature: z.number().min(0).max(2).optional(),
maxTokens: z.number().int().positive().optional(), maxTokens: z.number().int().positive().optional(),
}); });
@@ -753,6 +914,7 @@ export async function registerRoutes(app: FastifyInstance) {
} }
reply.raw.writeHead(200, buildSseHeaders(typeof req.headers.origin === "string" ? req.headers.origin : undefined)); reply.raw.writeHead(200, buildSseHeaders(typeof req.headers.origin === "string" ? req.headers.origin : undefined));
reply.raw.flushHeaders();
const send = (event: string, data: any) => { const send = (event: string, data: any) => {
reply.raw.write(`event: ${event}\n`); reply.raw.write(`event: ${event}\n`);

View File

@@ -0,0 +1,160 @@
import { env } from "../env.js";
const SEARXNG_TIMEOUT_MS = 12_000;
const DEFAULT_SEARXNG_CATEGORIES = "general";
export type SearxngSearchOptions = {
numResults: number;
includeDomains?: string[];
excludeDomains?: string[];
};
export type SearxngSearchResult = {
title: string | null;
url: string | null;
publishedDate: string | null;
summary: string | null;
text: string | null;
engines: string[];
};
export type SearxngSearchResponse = {
query: string;
requestId: null;
results: SearxngSearchResult[];
};
function clipText(input: string, maxCharacters: number) {
return input.length <= maxCharacters ? input : `${input.slice(0, maxCharacters)}...`;
}
function compactWhitespace(input: string) {
return input.replace(/\r/g, "").replace(/[ \t]+\n/g, "\n").replace(/\n{3,}/g, "\n\n").replace(/\s+/g, " ").trim();
}
function requireSearxngBaseUrl() {
if (!env.SEARXNG_BASE_URL) {
throw new Error("SEARXNG_BASE_URL not set");
}
return env.SEARXNG_BASE_URL.endsWith("/") ? env.SEARXNG_BASE_URL : `${env.SEARXNG_BASE_URL}/`;
}
function normalizeDomain(input: string) {
const trimmed = input.trim().toLowerCase();
if (!trimmed) return null;
try {
const parsed = new URL(trimmed.includes("://") ? trimmed : `https://${trimmed}`);
return parsed.hostname.replace(/^www\./, "");
} catch {
return trimmed.split(/[/?#]/, 1)[0]?.replace(/^www\./, "") || null;
}
}
function normalizeDomains(input: string[] | undefined) {
return Array.from(new Set((input ?? []).map(normalizeDomain).filter((domain): domain is string => Boolean(domain))));
}
function hostnameMatchesDomain(urlRaw: string | null, domain: string) {
if (!urlRaw) return false;
try {
const hostname = new URL(urlRaw).hostname.toLowerCase().replace(/^www\./, "");
return hostname === domain || hostname.endsWith(`.${domain}`);
} catch {
return false;
}
}
function filterResultsByDomains(results: SearxngSearchResult[], options: SearxngSearchOptions) {
const includeDomains = normalizeDomains(options.includeDomains);
const excludeDomains = normalizeDomains(options.excludeDomains);
return results.filter((result) => {
if (includeDomains.length && !includeDomains.some((domain) => hostnameMatchesDomain(result.url, domain))) return false;
if (excludeDomains.some((domain) => hostnameMatchesDomain(result.url, domain))) return false;
return true;
});
}
function buildSearxngQuery(query: string, options: SearxngSearchOptions) {
const includeDomains = normalizeDomains(options.includeDomains);
const excludeDomains = normalizeDomains(options.excludeDomains);
const includeClause =
includeDomains.length === 0
? ""
: includeDomains.length === 1
? `site:${includeDomains[0]}`
: `(${includeDomains.map((domain) => `site:${domain}`).join(" OR ")})`;
const excludeClause = excludeDomains.map((domain) => `-site:${domain}`).join(" ");
return [query, includeClause, excludeClause].filter(Boolean).join(" ");
}
function buildSearchUrl(query: string, options: SearxngSearchOptions) {
const url = new URL("search", requireSearxngBaseUrl());
url.searchParams.set("q", buildSearxngQuery(query, options));
url.searchParams.set("categories", DEFAULT_SEARXNG_CATEGORIES);
url.searchParams.set("language", "auto");
url.searchParams.set("safesearch", "1");
url.searchParams.set("format", "json");
return url;
}
async function fetchSearxng(url: URL, accept: string) {
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), SEARXNG_TIMEOUT_MS);
try {
return await fetch(url, {
redirect: "follow",
signal: controller.signal,
headers: {
"User-Agent": "SybilBot/1.0 (+https://sybil.local)",
Accept: accept,
},
});
} finally {
clearTimeout(timeout);
}
}
function stringOrNull(value: unknown) {
if (typeof value !== "string") return null;
const normalized = compactWhitespace(value);
return normalized || null;
}
function stringArray(value: unknown) {
if (!Array.isArray(value)) return [];
return value.filter((item): item is string => typeof item === "string").map(compactWhitespace).filter(Boolean);
}
function mapJsonResult(result: any): SearxngSearchResult {
const summary = stringOrNull(result?.content) ?? stringOrNull(result?.snippet);
const text = summary ? clipText(summary, 700) : null;
return {
title: stringOrNull(result?.title),
url: stringOrNull(result?.url),
publishedDate: stringOrNull(result?.publishedDate) ?? stringOrNull(result?.published_date),
summary: summary ? clipText(summary, 1_400) : null,
text,
engines: stringArray(result?.engines ?? (typeof result?.engine === "string" ? [result.engine] : [])),
};
}
export async function searchSearxng(query: string, options: SearxngSearchOptions): Promise<SearxngSearchResponse> {
const url = buildSearchUrl(query, options);
const response = await fetchSearxng(url, "application/json");
if (!response.ok) {
await response.arrayBuffer();
throw new Error(`SearXNG JSON search failed with status ${response.status}. Verify search.formats includes json.`);
}
const contentType = response.headers.get("content-type")?.toLowerCase() ?? "";
if (!contentType.includes("application/json")) {
await response.arrayBuffer();
throw new Error(`SearXNG JSON search returned ${contentType || "unknown content type"}.`);
}
const data: any = await response.json();
const results = Array.isArray(data?.results) ? data.results.map(mapJsonResult) : [];
return { query, requestId: null, results: filterResultsByDomains(results, options).slice(0, options.numResults) };
}

View File

@@ -0,0 +1,107 @@
import assert from "node:assert/strict";
import test from "node:test";
import {
runToolAwareChatCompletionsStream,
runToolAwareOpenAIChatStream,
type ToolAwareStreamingEvent,
} from "../src/llm/chat-tools.js";
async function* streamFrom(events: any[]) {
for (const event of events) {
await Promise.resolve();
yield event;
}
}
async function collectEvents(iterable: AsyncIterable<ToolAwareStreamingEvent>) {
const events: ToolAwareStreamingEvent[] = [];
for await (const event of iterable) {
events.push(event);
}
return events;
}
test("OpenAI Responses stream emits text deltas as they arrive", async () => {
const outputMessage = {
id: "msg_1",
type: "message",
role: "assistant",
status: "completed",
content: [{ type: "output_text", text: "Hello" }],
};
const client = {
responses: {
create: async () =>
streamFrom([
{ type: "response.output_item.added", item: { ...outputMessage, content: [] }, output_index: 0 },
{ type: "response.output_text.delta", delta: "Hel", output_index: 0, content_index: 0 },
{ type: "response.output_text.delta", delta: "lo", output_index: 0, content_index: 0 },
{ type: "response.output_item.done", item: outputMessage, output_index: 0 },
{
type: "response.completed",
response: {
status: "completed",
output_text: "Hello",
output: [outputMessage],
usage: { input_tokens: 2, output_tokens: 1, total_tokens: 3 },
},
},
]),
},
};
const events = await collectEvents(
runToolAwareOpenAIChatStream({
client: client as any,
model: "gpt-test",
messages: [{ role: "user", content: "Say hello" }],
})
);
assert.deepEqual(
events.map((event) => event.type),
["delta", "delta", "done"]
);
assert.deepEqual(
events.filter((event) => event.type === "delta").map((event) => event.text),
["Hel", "lo"]
);
assert.equal(events.at(-1)?.type === "done" ? events.at(-1)?.result.text : null, "Hello");
});
test("OpenAI-compatible Chat Completions stream emits text deltas as they arrive", async () => {
const client = {
chat: {
completions: {
create: async () =>
streamFrom([
{ choices: [{ delta: { role: "assistant" } }] },
{ choices: [{ delta: { content: "Hel" } }] },
{ choices: [{ delta: { content: "lo" } }] },
{
choices: [{ delta: {}, finish_reason: "stop" }],
usage: { prompt_tokens: 2, completion_tokens: 1, total_tokens: 3 },
},
]),
},
},
};
const events = await collectEvents(
runToolAwareChatCompletionsStream({
client: client as any,
model: "grok-test",
messages: [{ role: "user", content: "Say hello" }],
})
);
assert.deepEqual(
events.map((event) => event.type),
["delta", "delta", "done"]
);
assert.deepEqual(
events.filter((event) => event.type === "delta").map((event) => event.text),
["Hel", "lo"]
);
assert.equal(events.at(-1)?.type === "done" ? events.at(-1)?.result.text : null, "Hello");
});

View File

@@ -40,6 +40,10 @@ Default dev URL: `http://localhost:5173`
- Composer adapts to the active item: - Composer adapts to the active item:
- Chat sends `POST /v1/chat-completions/stream` (SSE). - Chat sends `POST /v1/chat-completions/stream` (SSE).
- Search sends `POST /v1/searches/:searchId/run/stream` (SSE). - Search sends `POST /v1/searches/:searchId/run/stream` (SSE).
- Keyboard shortcuts:
- `Cmd/Ctrl+J`: start a new chat.
- `Shift+Cmd/Ctrl+J`: start a new search.
- `Cmd/Ctrl+Up/Down`: move through the sidebar list.
Client API contract docs: Client API contract docs:
- `../docs/api/rest.md` - `../docs/api/rest.md`

View File

@@ -1,13 +1,15 @@
import { useEffect, useMemo, useRef, useState } from "preact/hooks"; import { useEffect, useMemo, useRef, useState } from "preact/hooks";
import { Check, ChevronDown, Globe2, Menu, MessageSquare, Plus, Search, SendHorizontal, Trash2 } from "lucide-preact"; import { Check, ChevronDown, Globe2, Menu, MessageSquare, Paperclip, Plus, Search, SendHorizontal, Trash2 } from "lucide-preact";
import { Button } from "@/components/ui/button"; import { Button } from "@/components/ui/button";
import { Textarea } from "@/components/ui/textarea"; import { Textarea } from "@/components/ui/textarea";
import { Separator } from "@/components/ui/separator"; import { Separator } from "@/components/ui/separator";
import { AuthScreen } from "@/components/auth/auth-screen"; import { AuthScreen } from "@/components/auth/auth-screen";
import { ChatAttachmentList } from "@/components/chat/chat-attachment-list";
import { ChatMessagesPanel } from "@/components/chat/chat-messages-panel"; import { ChatMessagesPanel } from "@/components/chat/chat-messages-panel";
import { SearchResultsPanel } from "@/components/search/search-results-panel"; import { SearchResultsPanel } from "@/components/search/search-results-panel";
import { import {
createChat, createChat,
createChatFromSearch,
createSearch, createSearch,
deleteChat, deleteChat,
deleteSearch, deleteSearch,
@@ -19,6 +21,8 @@ import {
runCompletionStream, runCompletionStream,
runSearchStream, runSearchStream,
suggestChatTitle, suggestChatTitle,
getMessageAttachments,
type ChatAttachment,
type ModelCatalogResponse, type ModelCatalogResponse,
type Provider, type Provider,
type ChatDetail, type ChatDetail,
@@ -97,12 +101,181 @@ const EMPTY_MODEL_PREFERENCES: ProviderModelPreferences = {
xai: null, xai: null,
}; };
const TRANSCRIPT_BOTTOM_GAP = 20;
const REPLY_SCROLL_BUFFER_MIN = 288;
const REPLY_SCROLL_BUFFER_MAX = 576;
const REPLY_SCROLL_BUFFER_VIEWPORT_RATIO = 0.52;
const MAX_CHAT_ATTACHMENTS = 8;
const MAX_IMAGE_ATTACHMENT_BYTES = 6 * 1024 * 1024;
const MAX_TEXT_ATTACHMENT_BYTES = 8 * 1024 * 1024;
const MAX_TEXT_ATTACHMENT_CHARS = 200_000;
const CHAT_FILE_ACCEPT =
".png,.jpg,.jpeg,.txt,.md,.markdown,.csv,.tsv,.json,.jsonl,.xml,.yaml,.yml,.html,.htm,.css,.js,.jsx,.ts,.tsx,.py,.rb,.java,.c,.cc,.cpp,.h,.hpp,.go,.rs,.sh,.sql,.log,.toml,.ini,.cfg,.conf,.swift,.kt,.m,.mm";
const TEXT_ATTACHMENT_EXTENSIONS = new Set([
".txt",
".md",
".markdown",
".csv",
".tsv",
".json",
".jsonl",
".xml",
".yaml",
".yml",
".html",
".htm",
".css",
".js",
".jsx",
".ts",
".tsx",
".py",
".rb",
".java",
".c",
".cc",
".cpp",
".h",
".hpp",
".go",
".rs",
".sh",
".sql",
".log",
".toml",
".ini",
".cfg",
".conf",
".swift",
".kt",
".m",
".mm",
]);
const TEXT_ATTACHMENT_MIME_TYPES = new Set([
"application/json",
"application/ld+json",
"application/sql",
"application/toml",
"application/x-httpd-php",
"application/x-javascript",
"application/x-sh",
"application/xml",
"application/yaml",
"application/x-yaml",
"image/svg+xml",
]);
function getModelOptions(catalog: ModelCatalogResponse["providers"], provider: Provider) { function getModelOptions(catalog: ModelCatalogResponse["providers"], provider: Provider) {
const providerModels = catalog[provider]?.models ?? []; const providerModels = catalog[provider]?.models ?? [];
if (providerModels.length) return providerModels; if (providerModels.length) return providerModels;
return PROVIDER_FALLBACK_MODELS[provider]; return PROVIDER_FALLBACK_MODELS[provider];
} }
function getReplyScrollBufferHeight() {
if (typeof window === "undefined") return REPLY_SCROLL_BUFFER_MIN;
return Math.min(
REPLY_SCROLL_BUFFER_MAX,
Math.max(REPLY_SCROLL_BUFFER_MIN, Math.round(window.innerHeight * REPLY_SCROLL_BUFFER_VIEWPORT_RATIO))
);
}
function getFileExtension(filename: string) {
const index = filename.lastIndexOf(".");
return index >= 0 ? filename.slice(index).toLowerCase() : "";
}
function createAttachmentId() {
if (typeof crypto !== "undefined" && typeof crypto.randomUUID === "function") {
return crypto.randomUUID();
}
return `att-${Date.now()}-${Math.random().toString(36).slice(2, 10)}`;
}
function inferImageMimeType(file: File) {
if (file.type === "image/png" || file.type === "image/jpeg") return file.type;
const extension = getFileExtension(file.name);
if (extension === ".png") return "image/png";
if (extension === ".jpg" || extension === ".jpeg") return "image/jpeg";
return null;
}
function isTextLikeFile(file: File) {
const mimeType = file.type.toLowerCase();
if (mimeType.startsWith("text/")) return true;
if (TEXT_ATTACHMENT_MIME_TYPES.has(mimeType)) return true;
return TEXT_ATTACHMENT_EXTENSIONS.has(getFileExtension(file.name));
}
function arrayBufferToBase64(buffer: ArrayBuffer) {
const bytes = new Uint8Array(buffer);
const chunkSize = 0x8000;
let binary = "";
for (let index = 0; index < bytes.length; index += chunkSize) {
const chunk = bytes.subarray(index, index + chunkSize);
binary += String.fromCharCode(...chunk);
}
return btoa(binary);
}
async function buildChatAttachment(file: File): Promise<ChatAttachment> {
const imageMimeType = inferImageMimeType(file);
if (imageMimeType) {
if (file.size > MAX_IMAGE_ATTACHMENT_BYTES) {
throw new Error(`Image '${file.name}' exceeds the 6 MB upload limit.`);
}
const base64 = arrayBufferToBase64(await file.arrayBuffer());
return {
kind: "image",
id: createAttachmentId(),
filename: file.name,
mimeType: imageMimeType,
sizeBytes: file.size,
dataUrl: `data:${imageMimeType};base64,${base64}`,
};
}
if (!isTextLikeFile(file)) {
throw new Error(`Unsupported file type for '${file.name}'. Use PNG/JPEG images or text-based files.`);
}
if (file.size > MAX_TEXT_ATTACHMENT_BYTES) {
throw new Error(`Text file '${file.name}' exceeds the 8 MB upload limit.`);
}
const normalizedText = (await file.text()).replace(/\r\n/g, "\n").replace(/\u0000/g, "");
const truncated = normalizedText.length > MAX_TEXT_ATTACHMENT_CHARS;
return {
kind: "text",
id: createAttachmentId(),
filename: file.name,
mimeType: file.type || "text/plain",
sizeBytes: file.size,
text: truncated ? normalizedText.slice(0, MAX_TEXT_ATTACHMENT_CHARS) : normalizedText,
truncated,
};
}
function buildAttachmentSummary(attachments: ChatAttachment[]) {
if (!attachments.length) return "";
const filenames = attachments.map((attachment) => attachment.filename).join(", ");
return attachments.length === 1 ? filenames : `Attached: ${filenames}`;
}
function getFilesFromDataTransfer(dataTransfer: DataTransfer | null) {
if (!dataTransfer) return [];
const fromItems = Array.from(dataTransfer.items ?? [])
.filter((item) => item.kind === "file")
.map((item) => item.getAsFile())
.filter((file): file is File => file instanceof File);
if (fromItems.length) return fromItems;
return Array.from(dataTransfer.files ?? []);
}
function hasFileTransfer(dataTransfer: DataTransfer | null) {
if (!dataTransfer) return false;
return Array.from(dataTransfer.types ?? []).includes("Files") || getFilesFromDataTransfer(dataTransfer).length > 0;
}
function loadStoredModelPreferences() { function loadStoredModelPreferences() {
if (typeof window === "undefined") return EMPTY_MODEL_PREFERENCES; if (typeof window === "undefined") return EMPTY_MODEL_PREFERENCES;
try { try {
@@ -164,6 +337,10 @@ function isToolCallLogMessage(message: Message) {
return asToolLogMetadata(message.metadata) !== null; return asToolLogMetadata(message.metadata) !== null;
} }
function isDisplayableMessage(message: Message) {
return message.role !== "system";
}
function buildOptimisticToolMessage(event: ToolCallEvent): Message { function buildOptimisticToolMessage(event: ToolCallEvent): Message {
return { return {
id: `temp-tool-${event.toolCallId}`, id: `temp-tool-${event.toolCallId}`,
@@ -329,8 +506,12 @@ function ModelCombobox({ options, value, onChange, disabled = false }: ModelComb
function getChatTitle(chat: Pick<ChatSummary, "title">, messages?: ChatDetail["messages"]) { function getChatTitle(chat: Pick<ChatSummary, "title">, messages?: ChatDetail["messages"]) {
if (chat.title?.trim()) return chat.title.trim(); if (chat.title?.trim()) return chat.title.trim();
const firstUserMessage = messages?.find((m) => m.role === "user")?.content.trim(); const firstUserMessage = messages?.find((message) => message.role === "user");
if (firstUserMessage) return firstUserMessage.slice(0, 48); const firstUserText = firstUserMessage?.content.trim();
if (firstUserText) return firstUserText.slice(0, 48);
const firstUserAttachments = firstUserMessage ? getMessageAttachments(firstUserMessage.metadata) : [];
const attachmentSummary = buildAttachmentSummary(firstUserAttachments);
if (attachmentSummary) return attachmentSummary.slice(0, 48);
return "New chat"; return "New chat";
} }
@@ -427,8 +608,11 @@ export default function App() {
const [isLoadingCollections, setIsLoadingCollections] = useState(false); const [isLoadingCollections, setIsLoadingCollections] = useState(false);
const [isLoadingSelection, setIsLoadingSelection] = useState(false); const [isLoadingSelection, setIsLoadingSelection] = useState(false);
const [isSending, setIsSending] = useState(false); const [isSending, setIsSending] = useState(false);
const [isStartingSearchChat, setIsStartingSearchChat] = useState(false);
const [pendingChatState, setPendingChatState] = useState<{ chatId: string | null; messages: Message[] } | null>(null); const [pendingChatState, setPendingChatState] = useState<{ chatId: string | null; messages: Message[] } | null>(null);
const [composer, setComposer] = useState(""); const [composer, setComposer] = useState("");
const [pendingAttachments, setPendingAttachments] = useState<ChatAttachment[]>([]);
const [isComposerDropActive, setIsComposerDropActive] = useState(false);
const [provider, setProvider] = useState<Provider>("openai"); const [provider, setProvider] = useState<Provider>("openai");
const [modelCatalog, setModelCatalog] = useState<ModelCatalogResponse["providers"]>(EMPTY_MODEL_CATALOG); const [modelCatalog, setModelCatalog] = useState<ModelCatalogResponse["providers"]>(EMPTY_MODEL_CATALOG);
const [providerModelPreferences, setProviderModelPreferences] = useState<ProviderModelPreferences>(() => loadStoredModelPreferences()); const [providerModelPreferences, setProviderModelPreferences] = useState<ProviderModelPreferences>(() => loadStoredModelPreferences());
@@ -437,21 +621,55 @@ export default function App() {
return stored.openai ?? PROVIDER_FALLBACK_MODELS.openai[0]; return stored.openai ?? PROVIDER_FALLBACK_MODELS.openai[0];
}); });
const [error, setError] = useState<string | null>(null); const [error, setError] = useState<string | null>(null);
const [transcriptTailSpacerHeight, setTranscriptTailSpacerHeight] = useState(TRANSCRIPT_BOTTOM_GAP);
const transcriptContainerRef = useRef<HTMLDivElement>(null); const transcriptContainerRef = useRef<HTMLDivElement>(null);
const transcriptEndRef = useRef<HTMLDivElement>(null); const transcriptEndRef = useRef<HTMLDivElement>(null);
const contextMenuRef = useRef<HTMLDivElement>(null); const contextMenuRef = useRef<HTMLDivElement>(null);
const fileInputRef = useRef<HTMLInputElement>(null);
const dragDepthRef = useRef(0);
const pendingAttachmentsRef = useRef<ChatAttachment[]>([]);
const selectedItemRef = useRef<SidebarSelection | null>(null); const selectedItemRef = useRef<SidebarSelection | null>(null);
const pendingTitleGenerationRef = useRef<Set<string>>(new Set()); const pendingTitleGenerationRef = useRef<Set<string>>(new Set());
const searchRunAbortRef = useRef<AbortController | null>(null); const searchRunAbortRef = useRef<AbortController | null>(null);
const searchRunCounterRef = useRef(0); const searchRunCounterRef = useRef(0);
const shouldAutoScrollRef = useRef(true); const shouldAutoScrollRef = useRef(true);
const wasSendingRef = useRef(false); const wasSendingRef = useRef(false);
const pendingReplyScrollRef = useRef(false);
const transcriptTailSpacerHeightRef = useRef(TRANSCRIPT_BOTTOM_GAP);
const [contextMenu, setContextMenu] = useState<ContextMenuState | null>(null); const [contextMenu, setContextMenu] = useState<ContextMenuState | null>(null);
const [isMobileSidebarOpen, setIsMobileSidebarOpen] = useState(false); const [isMobileSidebarOpen, setIsMobileSidebarOpen] = useState(false);
const [sidebarQuery, setSidebarQuery] = useState(""); const [sidebarQuery, setSidebarQuery] = useState("");
const initialRouteSelectionRef = useRef<SidebarSelection | null>(readSidebarSelectionFromUrl()); const initialRouteSelectionRef = useRef<SidebarSelection | null>(readSidebarSelectionFromUrl());
const hasSyncedSelectionHistoryRef = useRef(false); const hasSyncedSelectionHistoryRef = useRef(false);
const setTranscriptTailSpacer = (height: number) => {
const nextHeight = Math.max(TRANSCRIPT_BOTTOM_GAP, Math.ceil(height));
transcriptTailSpacerHeightRef.current = nextHeight;
setTranscriptTailSpacerHeight(nextHeight);
};
const expandTranscriptTailSpacer = (height: number) => {
const targetHeight = Math.max(TRANSCRIPT_BOTTOM_GAP, Math.ceil(height));
setTranscriptTailSpacerHeight((currentHeight) => {
const nextHeight = Math.max(currentHeight, targetHeight);
transcriptTailSpacerHeightRef.current = nextHeight;
return nextHeight;
});
};
const settleTranscriptTailSpacer = () => {
const container = transcriptContainerRef.current;
const currentSpacerHeight = transcriptTailSpacerHeightRef.current;
if (!container) {
setTranscriptTailSpacer(TRANSCRIPT_BOTTOM_GAP);
return;
}
const scrollHeightWithoutSpacer = container.scrollHeight - currentSpacerHeight;
const requiredSpacerHeight = container.scrollTop + container.clientHeight - scrollHeightWithoutSpacer;
setTranscriptTailSpacer(requiredSpacerHeight);
};
const focusComposer = () => { const focusComposer = () => {
if (typeof window === "undefined") return; if (typeof window === "undefined") return;
window.requestAnimationFrame(() => { window.requestAnimationFrame(() => {
@@ -468,6 +686,10 @@ export default function App() {
textarea.style.height = `${textarea.scrollHeight}px`; textarea.style.height = `${textarea.scrollHeight}px`;
}, [composer]); }, [composer]);
useEffect(() => {
pendingAttachmentsRef.current = pendingAttachments;
}, [pendingAttachments]);
const sidebarItems = useMemo(() => buildSidebarItems(chats, searches), [chats, searches]); const sidebarItems = useMemo(() => buildSidebarItems(chats, searches), [chats, searches]);
const filteredSidebarItems = useMemo(() => { const filteredSidebarItems = useMemo(() => {
const query = sidebarQuery.trim().toLowerCase(); const query = sidebarQuery.trim().toLowerCase();
@@ -490,6 +712,7 @@ export default function App() {
setDraftKind(null); setDraftKind(null);
setPendingChatState(null); setPendingChatState(null);
setComposer(""); setComposer("");
setPendingAttachments([]);
setError(null); setError(null);
}; };
@@ -637,9 +860,18 @@ export default function App() {
}, [providerModelPreferences]); }, [providerModelPreferences]);
const selectedKey = selectedItem ? `${selectedItem.kind}:${selectedItem.id}` : null; const selectedKey = selectedItem ? `${selectedItem.kind}:${selectedItem.id}` : null;
const isChatReplyStreamingInView =
isSending &&
draftKind !== "search" &&
selectedItem?.kind !== "search" &&
!!pendingChatState &&
(!pendingChatState.chatId || (selectedItem?.kind === "chat" && selectedItem.id === pendingChatState.chatId));
useEffect(() => { useEffect(() => {
shouldAutoScrollRef.current = true; shouldAutoScrollRef.current = true;
if (!isSending || !isChatReplyStreamingInView) {
setTranscriptTailSpacer(TRANSCRIPT_BOTTOM_GAP);
}
}, [draftKind, selectedItem?.kind, selectedKey]); }, [draftKind, selectedItem?.kind, selectedKey]);
useEffect(() => { useEffect(() => {
@@ -669,11 +901,27 @@ export default function App() {
if (draftKind === "search" || selectedItem?.kind === "search") return; if (draftKind === "search" || selectedItem?.kind === "search") return;
const wasSending = wasSendingRef.current; const wasSending = wasSendingRef.current;
wasSendingRef.current = isSending; wasSendingRef.current = isSending;
if (wasSending && !isSending) return; if (isSending) return;
if (wasSending) {
shouldAutoScrollRef.current = false;
return;
}
if (!shouldAutoScrollRef.current) return; if (!shouldAutoScrollRef.current) return;
transcriptEndRef.current?.scrollIntoView({ behavior: isSending ? "smooth" : "auto", block: "end" }); transcriptEndRef.current?.scrollIntoView({ behavior: "auto", block: "end" });
}, [draftKind, selectedChat?.messages.length, isSending, selectedItem?.kind, selectedKey]); }, [draftKind, selectedChat?.messages.length, isSending, selectedItem?.kind, selectedKey]);
useEffect(() => {
if (!isChatReplyStreamingInView || !pendingReplyScrollRef.current) return;
pendingReplyScrollRef.current = false;
shouldAutoScrollRef.current = true;
window.requestAnimationFrame(() => {
const container = transcriptContainerRef.current;
if (!container) return;
container.scrollTo({ top: container.scrollHeight, behavior: "smooth" });
});
}, [isChatReplyStreamingInView, pendingChatState?.chatId]);
useEffect(() => { useEffect(() => {
if (isSending) return; if (isSending) return;
const hasWorkspaceSelection = Boolean(selectedItem) || draftKind !== null; const hasWorkspaceSelection = Boolean(selectedItem) || draftKind !== null;
@@ -691,22 +939,26 @@ export default function App() {
const messages = selectedChat?.messages ?? []; const messages = selectedChat?.messages ?? [];
const isSearchMode = draftKind ? draftKind === "search" : selectedItem?.kind === "search"; const isSearchMode = draftKind ? draftKind === "search" : selectedItem?.kind === "search";
const isSearchRunning = isSending && isSearchMode; const isSearchRunning = isSending && isSearchMode;
const isSendingActiveChat = const isSendingActiveChat = isChatReplyStreamingInView;
isSending &&
!isSearchMode && useEffect(() => {
!!pendingChatState && if (isSearchMode && pendingAttachments.length) {
!!pendingChatState.chatId && setPendingAttachments([]);
selectedItem?.kind === "chat" && }
selectedItem.id === pendingChatState.chatId; if (isSearchMode) {
dragDepthRef.current = 0;
setIsComposerDropActive(false);
}
}, [isSearchMode, pendingAttachments.length]);
const displayMessages = useMemo(() => { const displayMessages = useMemo(() => {
if (!pendingChatState) return messages; if (!pendingChatState) return messages.filter(isDisplayableMessage);
if (pendingChatState.chatId) { if (pendingChatState.chatId) {
if (selectedItem?.kind === "chat" && selectedItem.id === pendingChatState.chatId) { if (selectedItem?.kind === "chat" && selectedItem.id === pendingChatState.chatId) {
return pendingChatState.messages; return pendingChatState.messages.filter(isDisplayableMessage);
} }
return messages; return messages.filter(isDisplayableMessage);
} }
return isSearchMode ? messages : pendingChatState.messages; return (isSearchMode ? messages : pendingChatState.messages).filter(isDisplayableMessage);
}, [isSearchMode, messages, pendingChatState, selectedItem]); }, [isSearchMode, messages, pendingChatState, selectedItem]);
const selectedChatSummary = useMemo(() => { const selectedChatSummary = useMemo(() => {
@@ -756,6 +1008,10 @@ export default function App() {
if (selectedSearchSummary) return `${getSearchTitle(selectedSearchSummary)} — Sybil`; if (selectedSearchSummary) return `${getSearchTitle(selectedSearchSummary)} — Sybil`;
return "Sybil"; return "Sybil";
}, [draftKind, selectedChat, selectedChatSummary, selectedItem, selectedSearch, selectedSearchSummary]); }, [draftKind, selectedChat, selectedChatSummary, selectedItem, selectedSearch, selectedSearchSummary]);
const primaryShortcutModifier = useMemo(() => {
if (typeof navigator === "undefined") return "Ctrl";
return /Mac|iPhone|iPad|iPod/i.test(navigator.platform) ? "Cmd" : "Ctrl";
}, []);
useEffect(() => { useEffect(() => {
document.title = pageTitle; document.title = pageTitle;
@@ -768,6 +1024,7 @@ export default function App() {
setSelectedItem(null); setSelectedItem(null);
setSelectedChat(null); setSelectedChat(null);
setSelectedSearch(null); setSelectedSearch(null);
setPendingAttachments([]);
setIsMobileSidebarOpen(false); setIsMobileSidebarOpen(false);
}; };
@@ -778,9 +1035,60 @@ export default function App() {
setSelectedItem(null); setSelectedItem(null);
setSelectedChat(null); setSelectedChat(null);
setSelectedSearch(null); setSelectedSearch(null);
setPendingAttachments([]);
setIsMobileSidebarOpen(false); setIsMobileSidebarOpen(false);
}; };
const selectAdjacentSidebarItem = (direction: -1 | 1) => {
if (!filteredSidebarItems.length) return;
setError(null);
setContextMenu(null);
setDraftKind(null);
setIsMobileSidebarOpen(false);
setSelectedItem((current) => {
const currentIndex = current
? filteredSidebarItems.findIndex((item) => item.kind === current.kind && item.id === current.id)
: -1;
const fallbackIndex = direction > 0 ? 0 : filteredSidebarItems.length - 1;
const nextIndex =
currentIndex < 0
? fallbackIndex
: Math.min(filteredSidebarItems.length - 1, Math.max(0, currentIndex + direction));
const nextItem = filteredSidebarItems[nextIndex];
return { kind: nextItem.kind, id: nextItem.id };
});
};
useEffect(() => {
if (!isAuthenticated) return;
const handleKeyDown = (event: KeyboardEvent) => {
const hasPrimaryModifier = event.metaKey || event.ctrlKey;
if (!hasPrimaryModifier || event.altKey) return;
const key = event.key.toLowerCase();
if (key === "j") {
event.preventDefault();
if (event.shiftKey) {
handleCreateSearch();
} else {
handleCreateChat();
}
focusComposer();
return;
}
if (event.key === "ArrowUp" || event.key === "ArrowDown") {
event.preventDefault();
selectAdjacentSidebarItem(event.key === "ArrowUp" ? -1 : 1);
}
};
window.addEventListener("keydown", handleKeyDown);
return () => window.removeEventListener("keydown", handleKeyDown);
}, [filteredSidebarItems, isAuthenticated]);
const openContextMenu = (event: MouseEvent, item: SidebarSelection) => { const openContextMenu = (event: MouseEvent, item: SidebarSelection) => {
event.preventDefault(); event.preventDefault();
const menuWidth = 160; const menuWidth = 160;
@@ -830,14 +1138,98 @@ export default function App() {
}; };
}, [contextMenu]); }, [contextMenu]);
const handleSendChat = async (content: string) => { const handleOpenAttachmentPicker = () => {
fileInputRef.current?.click();
};
const handleRemovePendingAttachment = (attachmentId: string) => {
setPendingAttachments((current) => current.filter((attachment) => attachment.id !== attachmentId));
};
const appendPendingAttachments = async (files: File[]) => {
if (!files.length) return;
if (isSearchMode) {
setError("Attachments are only available in chat mode.");
return;
}
setError(null);
try {
const attachments = await Promise.all(files.map((file) => buildChatAttachment(file)));
if (pendingAttachmentsRef.current.length + attachments.length > MAX_CHAT_ATTACHMENTS) {
throw new Error(`You can attach up to ${MAX_CHAT_ATTACHMENTS} files per message.`);
}
setPendingAttachments((current) => current.concat(attachments));
focusComposer();
} catch (err) {
const message = err instanceof Error ? err.message : String(err);
setError(message);
}
};
const handleFileSelection = async (event: Event) => {
const input = event.currentTarget as HTMLInputElement;
const files = Array.from(input.files ?? []);
input.value = "";
await appendPendingAttachments(files);
};
const handleComposerPaste = async (event: ClipboardEvent) => {
const files = getFilesFromDataTransfer(event.clipboardData);
if (!files.length) return;
event.preventDefault();
await appendPendingAttachments(files);
};
const handleComposerDragEnter = (event: DragEvent) => {
if (!hasFileTransfer(event.dataTransfer)) return;
event.preventDefault();
if (isSearchMode) return;
dragDepthRef.current += 1;
setIsComposerDropActive(true);
};
const handleComposerDragOver = (event: DragEvent) => {
if (!hasFileTransfer(event.dataTransfer)) return;
event.preventDefault();
if (event.dataTransfer) {
event.dataTransfer.dropEffect = isSearchMode ? "none" : "copy";
}
if (!isSearchMode) {
setIsComposerDropActive(true);
}
};
const handleComposerDragLeave = (event: DragEvent) => {
if (!hasFileTransfer(event.dataTransfer)) return;
event.preventDefault();
if (isSearchMode) return;
dragDepthRef.current = Math.max(0, dragDepthRef.current - 1);
if (dragDepthRef.current === 0) {
setIsComposerDropActive(false);
}
};
const handleComposerDrop = async (event: DragEvent) => {
if (!hasFileTransfer(event.dataTransfer)) return;
event.preventDefault();
dragDepthRef.current = 0;
setIsComposerDropActive(false);
await appendPendingAttachments(getFilesFromDataTransfer(event.dataTransfer));
};
const handleSendChat = async (content: string, attachments: ChatAttachment[]) => {
pendingReplyScrollRef.current = true;
expandTranscriptTailSpacer(getReplyScrollBufferHeight());
const optimisticUserMessage: Message = { const optimisticUserMessage: Message = {
id: `temp-user-${Date.now()}`, id: `temp-user-${Date.now()}`,
createdAt: new Date().toISOString(), createdAt: new Date().toISOString(),
role: "user", role: "user",
content, content,
name: null, name: null,
metadata: null, metadata: attachments.length ? { attachments } : null,
}; };
const optimisticAssistantMessage: Message = { const optimisticAssistantMessage: Message = {
@@ -893,13 +1285,15 @@ export default function App() {
...baseChat.messages ...baseChat.messages
.filter((message) => !isToolCallLogMessage(message)) .filter((message) => !isToolCallLogMessage(message))
.map((message) => ({ .map((message) => ({
role: message.role, role: message.role,
content: message.content, content: message.content,
...(message.name ? { name: message.name } : {}), ...(message.name ? { name: message.name } : {}),
...(getMessageAttachments(message.metadata).length ? { attachments: getMessageAttachments(message.metadata) } : {}),
})), })),
{ {
role: "user", role: "user",
content, content,
...(attachments.length ? { attachments } : {}),
}, },
]; ];
@@ -912,7 +1306,8 @@ export default function App() {
const hasExistingTitle = Boolean(selectedChat?.id === chatId ? selectedChat.title?.trim() : chatSummary?.title?.trim()); const hasExistingTitle = Boolean(selectedChat?.id === chatId ? selectedChat.title?.trim() : chatSummary?.title?.trim());
if (!hasExistingTitle && !pendingTitleGenerationRef.current.has(chatId)) { if (!hasExistingTitle && !pendingTitleGenerationRef.current.has(chatId)) {
pendingTitleGenerationRef.current.add(chatId); pendingTitleGenerationRef.current.add(chatId);
void suggestChatTitle({ chatId, content }) const titleSeed = content || buildAttachmentSummary(attachments) || "Uploaded files";
void suggestChatTitle({ chatId, content: titleSeed })
.then((updatedChat) => { .then((updatedChat) => {
setChats((current) => setChats((current) =>
current.map((chat) => { current.map((chat) => {
@@ -1018,6 +1413,7 @@ export default function App() {
if (currentSelection?.kind === "chat" && currentSelection.id === chatId) { if (currentSelection?.kind === "chat" && currentSelection.id === chatId) {
await refreshChat(chatId); await refreshChat(chatId);
} }
settleTranscriptTailSpacer();
setPendingChatState(null); setPendingChatState(null);
}; };
@@ -1149,11 +1545,59 @@ export default function App() {
await refreshCollections({ kind: "search", id: searchId }); await refreshCollections({ kind: "search", id: searchId });
}; };
const handleStartChatFromSearch = async () => {
if (!selectedSearch || isStartingSearchChat || isSending) return;
setError(null);
setIsStartingSearchChat(true);
try {
const chat = await createChatFromSearch(selectedSearch.id);
setDraftKind(null);
setPendingChatState(null);
setComposer("");
setPendingAttachments([]);
setChats((current) => {
const withoutExisting = current.filter((existing) => existing.id !== chat.id);
return [chat, ...withoutExisting];
});
setSelectedItem({ kind: "chat", id: chat.id });
setSelectedChat({
id: chat.id,
title: chat.title,
createdAt: chat.createdAt,
updatedAt: chat.updatedAt,
initiatedProvider: chat.initiatedProvider,
initiatedModel: chat.initiatedModel,
lastUsedProvider: chat.lastUsedProvider,
lastUsedModel: chat.lastUsedModel,
messages: [],
});
setSelectedSearch(null);
await refreshCollections({ kind: "chat", id: chat.id });
await refreshChat(chat.id);
} catch (err) {
const message = err instanceof Error ? err.message : String(err);
if (message.includes("bearer token")) {
handleAuthFailure(message);
} else {
setError(message);
}
} finally {
setIsStartingSearchChat(false);
}
};
const handleSend = async () => { const handleSend = async () => {
const content = composer.trim(); const content = composer.trim();
if (!content || isSending) return; const attachments = pendingAttachments;
if ((!content && !attachments.length) || isSending) return;
if (isSearchMode && attachments.length) {
setError("Attachments are only available in chat mode.");
return;
}
setComposer(""); setComposer("");
setPendingAttachments([]);
setError(null); setError(null);
setIsSending(true); setIsSending(true);
@@ -1161,7 +1605,7 @@ export default function App() {
if (isSearchMode) { if (isSearchMode) {
await handleSendSearch(content); await handleSendSearch(content);
} else { } else {
await handleSendChat(content); await handleSendChat(content, attachments);
} }
} catch (err) { } catch (err) {
const message = err instanceof Error ? err.message : String(err); const message = err instanceof Error ? err.message : String(err);
@@ -1172,6 +1616,8 @@ export default function App() {
} }
if (!isSearchMode) { if (!isSearchMode) {
setComposer(content);
setPendingAttachments(attachments);
setPendingChatState(null); setPendingChatState(null);
} }
@@ -1240,10 +1686,16 @@ export default function App() {
<Button className="h-11 w-full justify-start gap-3 text-[15px]" onClick={handleCreateChat}> <Button className="h-11 w-full justify-start gap-3 text-[15px]" onClick={handleCreateChat}>
<Plus className="h-4 w-4" /> <Plus className="h-4 w-4" />
New chat New chat
<span className="ml-auto rounded-md border border-violet-100/12 bg-white/5 px-1.5 py-0.5 text-[10px] font-semibold uppercase tracking-wide text-violet-100/52">
{primaryShortcutModifier} J
</span>
</Button> </Button>
<Button className="h-10 w-full justify-start gap-3" variant="secondary" onClick={handleCreateSearch}> <Button className="h-10 w-full justify-start gap-3" variant="secondary" onClick={handleCreateSearch}>
<Search className="h-4 w-4" /> <Search className="h-4 w-4" />
New search New search
<span className="ml-auto rounded-md border border-violet-100/10 bg-white/[0.035] px-1.5 py-0.5 text-[10px] font-semibold uppercase tracking-wide text-violet-100/44">
Shift {primaryShortcutModifier} J
</span>
</Button> </Button>
<div className="relative"> <div className="relative">
<Search className="pointer-events-none absolute left-3 top-1/2 h-4 w-4 -translate-y-1/2 text-violet-200/58" /> <Search className="pointer-events-none absolute left-3 top-1/2 h-4 w-4 -translate-y-1/2 text-violet-200/58" />
@@ -1377,7 +1829,7 @@ export default function App() {
<div <div
ref={transcriptContainerRef} ref={transcriptContainerRef}
className="flex-1 overflow-y-auto px-4 pt-8 md:px-10 lg:px-14 pb-36 md:pb-44" className="flex-1 overflow-y-auto px-4 pt-8 md:px-10 lg:px-14 pb-36 md:pb-44 [overflow-anchor:none]"
onScroll={() => { onScroll={() => {
const container = transcriptContainerRef.current; const container = transcriptContainerRef.current;
if (!container) return; if (!container) return;
@@ -1388,13 +1840,59 @@ export default function App() {
{!isSearchMode ? ( {!isSearchMode ? (
<ChatMessagesPanel messages={displayMessages} isLoading={isLoadingSelection} isSending={isSendingActiveChat} /> <ChatMessagesPanel messages={displayMessages} isLoading={isLoadingSelection} isSending={isSendingActiveChat} />
) : ( ) : (
<SearchResultsPanel search={selectedSearch} isLoading={isLoadingSelection} isRunning={isSearchRunning} /> <SearchResultsPanel
search={selectedSearch}
isLoading={isLoadingSelection}
isRunning={isSearchRunning}
isStartingChat={isStartingSearchChat}
onStartChat={selectedSearch ? handleStartChatFromSearch : undefined}
/>
)} )}
<div
className="mx-auto max-w-4xl"
style={{ height: `${transcriptTailSpacerHeight}px` }}
aria-hidden="true"
/>
<div ref={transcriptEndRef} /> <div ref={transcriptEndRef} />
</div> </div>
<footer className="pointer-events-none absolute inset-x-0 bottom-0 z-10 bg-[linear-gradient(to_top,hsl(235_50%_4%)_0%,hsl(235_50%_4%_/_0.92)_58%,transparent)] p-3 pt-14 md:p-6 md:pt-20"> <footer className="pointer-events-none absolute inset-x-0 bottom-0 z-10 bg-[linear-gradient(to_top,hsl(235_50%_4%)_0%,hsl(235_50%_4%_/_0.92)_58%,transparent)] p-3 pt-14 md:p-6 md:pt-20">
<div className="pointer-events-auto mx-auto max-w-4xl rounded-2xl border border-violet-300/30 bg-[linear-gradient(135deg,hsl(235_48%_7%_/_0.96),hsl(258_48%_11%_/_0.94))] p-2 shadow-lg shadow-black/20"> <div
className={cn(
"pointer-events-auto mx-auto max-w-4xl rounded-2xl border bg-[linear-gradient(135deg,hsl(235_48%_7%_/_0.96),hsl(258_48%_11%_/_0.94))] p-2 shadow-lg shadow-black/20 transition",
isComposerDropActive
? "border-cyan-300/70 shadow-cyan-500/20"
: "border-violet-300/30"
)}
onDragEnter={handleComposerDragEnter}
onDragOver={handleComposerDragOver}
onDragLeave={handleComposerDragLeave}
onDrop={(event) => {
void handleComposerDrop(event);
}}
>
<input
ref={fileInputRef}
type="file"
multiple
accept={CHAT_FILE_ACCEPT}
className="hidden"
onChange={(event) => {
void handleFileSelection(event);
}}
/>
{!isSearchMode && pendingAttachments.length ? (
<div className="px-2 pb-2 pt-1">
<ChatAttachmentList attachments={pendingAttachments} onRemove={handleRemovePendingAttachment} />
</div>
) : null}
{!isSearchMode && isComposerDropActive ? (
<div className="px-3 pb-2">
<div className="rounded-xl border border-dashed border-cyan-300/55 bg-cyan-300/8 px-4 py-3 text-sm text-cyan-100">
Drop files to attach them
</div>
</div>
) : null}
<Textarea <Textarea
id="composer-input" id="composer-input"
rows={1} rows={1}
@@ -1405,6 +1903,9 @@ export default function App() {
textarea.style.height = `${textarea.scrollHeight}px`; textarea.style.height = `${textarea.scrollHeight}px`;
setComposer(textarea.value); setComposer(textarea.value);
}} }}
onPaste={(event) => {
void handleComposerPaste(event);
}}
onKeyDown={(event) => { onKeyDown={(event) => {
if (event.key === "Enter" && !event.shiftKey) { if (event.key === "Enter" && !event.shiftKey) {
event.preventDefault(); event.preventDefault();
@@ -1417,7 +1918,24 @@ export default function App() {
/> />
<div className={cn("flex items-center gap-3 px-2 pb-1", error ? "justify-between" : "justify-end")}> <div className={cn("flex items-center gap-3 px-2 pb-1", error ? "justify-between" : "justify-end")}>
{error ? <p className="min-w-0 truncate text-xs text-rose-300">{error}</p> : null} {error ? <p className="min-w-0 truncate text-xs text-rose-300">{error}</p> : null}
<Button className="h-10 w-10 rounded-lg" onClick={() => void handleSend()} size="icon" disabled={isSending || !composer.trim()}> {!isSearchMode ? (
<Button
className="h-10 w-10 rounded-lg"
onClick={handleOpenAttachmentPicker}
size="icon"
variant="secondary"
disabled={isSending || pendingAttachments.length >= MAX_CHAT_ATTACHMENTS}
aria-label="Attach files"
>
<Paperclip className="h-4 w-4" />
</Button>
) : null}
<Button
className="h-10 w-10 rounded-lg"
onClick={() => void handleSend()}
size="icon"
disabled={isSending || (!composer.trim() && !pendingAttachments.length)}
>
{isSearchMode ? <Search className="h-4 w-4" /> : <SendHorizontal className="h-4 w-4" />} {isSearchMode ? <Search className="h-4 w-4" /> : <SendHorizontal className="h-4 w-4" />}
</Button> </Button>
</div> </div>

View File

@@ -0,0 +1,103 @@
import { FileText, Image as ImageIcon, X } from "lucide-preact";
import type { ChatAttachment } from "@/lib/api";
import { cn } from "@/lib/utils";
type Props = {
attachments: ChatAttachment[];
tone?: "composer" | "user" | "assistant";
onRemove?: (id: string) => void;
};
function getTextPreview(value: string) {
const normalized = value.replace(/\r/g, "").trim();
if (!normalized) return "(empty file)";
return normalized.length <= 280 ? normalized : `${normalized.slice(0, 280).trimEnd()}...`;
}
function getSurfaceClasses(tone: Props["tone"]) {
if (tone === "user") {
return "border-white/12 bg-black/16 text-fuchsia-50";
}
if (tone === "assistant") {
return "border-violet-300/16 bg-violet-400/8 text-violet-50";
}
return "border-violet-300/18 bg-background/40 text-violet-50";
}
export function ChatAttachmentList({ attachments, tone = "composer", onRemove }: Props) {
if (!attachments.length) return null;
const surfaceClasses = getSurfaceClasses(tone);
return (
<div className="space-y-2">
{attachments.map((attachment) => {
const isImage = attachment.kind === "image";
return (
<div key={attachment.id} className={cn("overflow-hidden rounded-xl border", surfaceClasses)}>
{isImage ? (
<div className="grid gap-0 md:grid-cols-[minmax(0,220px)_minmax(0,1fr)]">
<div className="border-b border-white/10 bg-black/10 md:border-b-0 md:border-r">
<img src={attachment.dataUrl} alt={attachment.filename} className="block max-h-56 w-full object-cover" />
</div>
<div className="flex min-w-0 flex-col gap-2 p-3">
<div className="flex items-start gap-2">
<span className="mt-0.5 rounded-md border border-white/12 bg-white/5 p-1.5">
<ImageIcon className="h-3.5 w-3.5" />
</span>
<div className="min-w-0 flex-1">
<p className="truncate text-sm font-medium">{attachment.filename}</p>
<p className="text-xs text-muted-foreground">{attachment.mimeType}</p>
</div>
{onRemove ? (
<button
type="button"
className="rounded-md border border-white/10 p-1 text-muted-foreground transition hover:bg-white/8 hover:text-foreground"
onClick={() => onRemove(attachment.id)}
aria-label={`Remove ${attachment.filename}`}
>
<X className="h-3.5 w-3.5" />
</button>
) : null}
</div>
</div>
</div>
) : (
<div className="p-3">
<div className="flex items-start gap-2">
<span className="mt-0.5 rounded-md border border-white/12 bg-white/5 p-1.5">
<FileText className="h-3.5 w-3.5" />
</span>
<div className="min-w-0 flex-1">
<div className="flex items-start gap-2">
<div className="min-w-0 flex-1">
<p className="truncate text-sm font-medium">{attachment.filename}</p>
<p className="text-xs text-muted-foreground">
{attachment.mimeType}
{attachment.truncated ? " · truncated" : ""}
</p>
</div>
{onRemove ? (
<button
type="button"
className="rounded-md border border-white/10 p-1 text-muted-foreground transition hover:bg-white/8 hover:text-foreground"
onClick={() => onRemove(attachment.id)}
aria-label={`Remove ${attachment.filename}`}
>
<X className="h-3.5 w-3.5" />
</button>
) : null}
</div>
<pre className="mt-2 overflow-x-auto rounded-lg border border-white/8 bg-black/16 p-3 text-xs leading-5 text-inherit whitespace-pre-wrap">
{getTextPreview(attachment.text)}
</pre>
</div>
</div>
</div>
)}
</div>
);
})}
</div>
);
}

View File

@@ -1,5 +1,6 @@
import { cn } from "@/lib/utils"; import { cn } from "@/lib/utils";
import type { Message } from "@/lib/api"; import { ChatAttachmentList } from "@/components/chat/chat-attachment-list";
import { getMessageAttachments, type Message } from "@/lib/api";
import { MarkdownContent } from "@/components/markdown/markdown-content"; import { MarkdownContent } from "@/components/markdown/markdown-content";
import { Globe2, Link2, Wrench } from "lucide-preact"; import { Globe2, Link2, Wrench } from "lucide-preact";
@@ -68,28 +69,30 @@ export function ChatMessagesPanel({ messages, isLoading, isSending }: Props) {
const isUser = message.role === "user"; const isUser = message.role === "user";
const isPendingAssistant = message.id.startsWith("temp-assistant-") && isSending && message.content.trim().length === 0; const isPendingAssistant = message.id.startsWith("temp-assistant-") && isSending && message.content.trim().length === 0;
const attachments = getMessageAttachments(message.metadata);
return ( return (
<div key={message.id} className={cn("flex", isUser ? "justify-end" : "justify-start")}> <div key={message.id} className={cn("flex", isUser ? "justify-end" : "justify-start")}>
<div <div
className={cn( className={cn(
"max-w-[85%]", "max-w-[85%] space-y-3",
isUser isUser
? "rounded-xl border border-violet-300/24 bg-[linear-gradient(135deg,hsl(258_86%_48%_/_0.86),hsl(278_72%_29%_/_0.86))] px-4 py-3 text-sm leading-6 text-fuchsia-50 shadow-sm" ? "rounded-xl border border-violet-300/24 bg-[linear-gradient(135deg,hsl(258_86%_48%_/_0.86),hsl(278_72%_29%_/_0.86))] px-4 py-3 text-sm leading-6 text-fuchsia-50 shadow-sm"
: "text-base leading-7 text-violet-50" : "text-base leading-7 text-violet-50"
)} )}
> >
{attachments.length ? <ChatAttachmentList attachments={attachments} tone={isUser ? "user" : "assistant"} /> : null}
{isPendingAssistant ? ( {isPendingAssistant ? (
<span className="inline-flex items-center gap-1" aria-label="Assistant is typing" role="status"> <span className="inline-flex items-center gap-1" aria-label="Assistant is typing" role="status">
<span className="inline-block h-1.5 w-1.5 animate-bounce rounded-full bg-muted-foreground [animation-delay:0ms]" /> <span className="inline-block h-1.5 w-1.5 animate-bounce rounded-full bg-muted-foreground [animation-delay:0ms]" />
<span className="inline-block h-1.5 w-1.5 animate-bounce rounded-full bg-muted-foreground [animation-delay:140ms]" /> <span className="inline-block h-1.5 w-1.5 animate-bounce rounded-full bg-muted-foreground [animation-delay:140ms]" />
<span className="inline-block h-1.5 w-1.5 animate-bounce rounded-full bg-muted-foreground [animation-delay:280ms]" /> <span className="inline-block h-1.5 w-1.5 animate-bounce rounded-full bg-muted-foreground [animation-delay:280ms]" />
</span> </span>
) : ( ) : message.content.trim() ? (
<MarkdownContent <MarkdownContent
markdown={message.content} markdown={message.content}
className={cn("[&_a]:text-inherit [&_a]:underline", isUser ? "leading-[1.78] text-fuchsia-50" : "leading-[1.82] text-violet-50")} className={cn("[&_a]:text-inherit [&_a]:underline", isUser ? "leading-[1.78] text-fuchsia-50" : "leading-[1.82] text-violet-50")}
/> />
)} ) : null}
</div> </div>
</div> </div>
); );

View File

@@ -2,6 +2,7 @@ import { useEffect, useRef, useState } from "preact/hooks";
import type { SearchDetail } from "@/lib/api"; import type { SearchDetail } from "@/lib/api";
import { MarkdownContent } from "@/components/markdown/markdown-content"; import { MarkdownContent } from "@/components/markdown/markdown-content";
import { cn } from "@/lib/utils"; import { cn } from "@/lib/utils";
import { MessageSquare } from "lucide-preact";
function formatHost(url: string) { function formatHost(url: string) {
try { try {
@@ -29,6 +30,8 @@ type Props = {
className?: string; className?: string;
enableKeyboardNavigation?: boolean; enableKeyboardNavigation?: boolean;
openLinksInNewTab?: boolean; openLinksInNewTab?: boolean;
isStartingChat?: boolean;
onStartChat?: () => void;
}; };
export function SearchResultsPanel({ export function SearchResultsPanel({
@@ -38,6 +41,8 @@ export function SearchResultsPanel({
className, className,
enableKeyboardNavigation = false, enableKeyboardNavigation = false,
openLinksInNewTab = true, openLinksInNewTab = true,
isStartingChat = false,
onStartChat,
}: Props) { }: Props) {
const ANSWER_COLLAPSED_HEIGHT_CLASS = "h-[3rem]"; const ANSWER_COLLAPSED_HEIGHT_CLASS = "h-[3rem]";
const [isAnswerExpanded, setIsAnswerExpanded] = useState(false); const [isAnswerExpanded, setIsAnswerExpanded] = useState(false);
@@ -133,17 +138,31 @@ export function SearchResultsPanel({
const isAnswerLoading = isRunning && !hasAnswerText; const isAnswerLoading = isRunning && !hasAnswerText;
const hasCitations = citationEntries.length > 0; const hasCitations = citationEntries.length > 0;
const isExpandable = hasAnswerText && (canExpandAnswer || hasCitations); const isExpandable = hasAnswerText && (canExpandAnswer || hasCitations);
const canStartChat = !!search && !isLoading && !isRunning && !isStartingChat && (!!search.answerText || search.results.length > 0);
return ( return (
<div className={className ?? "mx-auto w-full max-w-4xl"}> <div className={className ?? "mx-auto w-full max-w-4xl"}>
{search?.query ? ( {search?.query ? (
<div className="mb-5"> <div className="mb-5 flex flex-col gap-3 md:flex-row md:items-start md:justify-between">
<p className="text-sm text-muted-foreground">Results for</p> <div className="min-w-0">
<h2 className="mt-1 break-words text-xl font-semibold text-violet-50">{search.query}</h2> <p className="text-sm text-muted-foreground">Results for</p>
<p className="mt-1 text-xs text-muted-foreground"> <h2 className="mt-1 break-words text-xl font-semibold text-violet-50">{search.query}</h2>
{search.results.length} result{search.results.length === 1 ? "" : "s"} <p className="mt-1 text-xs text-muted-foreground">
{search.latencyMs ? `${search.latencyMs} ms` : ""} {search.results.length} result{search.results.length === 1 ? "" : "s"}
</p> {search.latencyMs ? `${search.latencyMs} ms` : ""}
</p>
</div>
{onStartChat ? (
<button
type="button"
className="inline-flex h-10 shrink-0 items-center justify-center gap-2 rounded-lg border border-violet-300/24 bg-violet-300/10 px-3 text-sm font-medium text-violet-50 transition hover:bg-violet-300/16 disabled:cursor-not-allowed disabled:opacity-50"
onClick={onStartChat}
disabled={!canStartChat}
>
<MessageSquare className="h-4 w-4" />
{isStartingChat ? "Starting chat..." : "Chat with results"}
</button>
) : null}
</div> </div>
) : null} ) : null}

View File

@@ -90,6 +90,27 @@ export type SearchDetail = {
results: SearchResultItem[]; results: SearchResultItem[];
}; };
export type ChatImageAttachment = {
kind: "image";
id: string;
filename: string;
mimeType: "image/png" | "image/jpeg";
sizeBytes: number;
dataUrl: string;
};
export type ChatTextAttachment = {
kind: "text";
id: string;
filename: string;
mimeType: string;
sizeBytes: number;
text: string;
truncated?: boolean;
};
export type ChatAttachment = ChatImageAttachment | ChatTextAttachment;
export type SearchRunRequest = { export type SearchRunRequest = {
query?: string; query?: string;
title?: string; title?: string;
@@ -103,6 +124,7 @@ export type CompletionRequestMessage = {
role: "system" | "user" | "assistant" | "tool"; role: "system" | "user" | "assistant" | "tool";
content: string; content: string;
name?: string; name?: string;
attachments?: ChatAttachment[];
}; };
export type Provider = "openai" | "anthropic" | "xai"; export type Provider = "openai" | "anthropic" | "xai";
@@ -239,10 +261,61 @@ export async function getSearch(searchId: string) {
return data.search; return data.search;
} }
export async function createChatFromSearch(searchId: string, body?: { title?: string }) {
const data = await api<{ chat: ChatSummary }>(`/v1/searches/${searchId}/chat`, {
method: "POST",
body: JSON.stringify(body ?? {}),
});
return data.chat;
}
export async function deleteSearch(searchId: string) { export async function deleteSearch(searchId: string) {
await api<{ deleted: true }>(`/v1/searches/${searchId}`, { method: "DELETE" }); await api<{ deleted: true }>(`/v1/searches/${searchId}`, { method: "DELETE" });
} }
export function getMessageAttachments(metadata: unknown): ChatAttachment[] {
if (!metadata || typeof metadata !== "object" || Array.isArray(metadata)) return [];
const attachments = (metadata as Record<string, unknown>).attachments;
if (!Array.isArray(attachments)) return [];
const parsed: ChatAttachment[] = [];
for (const entry of attachments) {
if (!entry || typeof entry !== "object" || Array.isArray(entry)) continue;
const record = entry as Record<string, unknown>;
const kind = record.kind;
const id = typeof record.id === "string" ? record.id : "";
const filename = typeof record.filename === "string" ? record.filename : "";
const mimeType = typeof record.mimeType === "string" ? record.mimeType : "";
const sizeBytes = typeof record.sizeBytes === "number" ? record.sizeBytes : 0;
if (kind === "image" && typeof record.dataUrl === "string" && (mimeType === "image/png" || mimeType === "image/jpeg")) {
parsed.push({
kind,
id,
filename,
mimeType,
sizeBytes,
dataUrl: record.dataUrl,
} satisfies ChatImageAttachment);
continue;
}
if (kind === "text" && typeof record.text === "string") {
parsed.push({
kind,
id,
filename,
mimeType,
sizeBytes,
text: record.text,
truncated: record.truncated === true,
} satisfies ChatTextAttachment);
}
}
return parsed;
}
type RunSearchStreamHandlers = { type RunSearchStreamHandlers = {
onSearchResults?: (payload: { requestId: string | null; results: SearchResultItem[] }) => void; onSearchResults?: (payload: { requestId: string | null; results: SearchResultItem[] }) => void;
onSearchError?: (payload: { error: string }) => void; onSearchError?: (payload: { error: string }) => void;

View File

@@ -1 +1 @@
{"root":["./src/App.tsx","./src/main.tsx","./src/root-router.tsx","./src/vite-env.d.ts","./src/components/auth/auth-screen.tsx","./src/components/chat/chat-messages-panel.tsx","./src/components/markdown/markdown-content.tsx","./src/components/search/search-results-panel.tsx","./src/components/ui/button.tsx","./src/components/ui/input.tsx","./src/components/ui/scroll-area.tsx","./src/components/ui/separator.tsx","./src/components/ui/textarea.tsx","./src/hooks/use-session-auth.ts","./src/lib/api.ts","./src/lib/utils.ts","./src/pages/search-route-page.tsx"],"version":"5.9.3"} {"root":["./src/app.tsx","./src/main.tsx","./src/root-router.tsx","./src/vite-env.d.ts","./src/components/auth/auth-screen.tsx","./src/components/chat/chat-attachment-list.tsx","./src/components/chat/chat-messages-panel.tsx","./src/components/markdown/markdown-content.tsx","./src/components/search/search-results-panel.tsx","./src/components/ui/button.tsx","./src/components/ui/input.tsx","./src/components/ui/scroll-area.tsx","./src/components/ui/separator.tsx","./src/components/ui/textarea.tsx","./src/hooks/use-session-auth.ts","./src/lib/api.ts","./src/lib/utils.ts","./src/pages/search-route-page.tsx"],"version":"5.9.3"}