adds attachment support

This commit is contained in:
2026-05-02 19:21:06 -07:00
parent 11e6875de9
commit 38da3cea72
15 changed files with 949 additions and 67 deletions

1
dist/default.conf vendored
View File

@@ -1,6 +1,7 @@
server { server {
listen 80; listen 80;
server_name _; server_name _;
client_max_body_size 32m;
root /usr/share/nginx/html; root /usr/share/nginx/html;
index index.html; index index.html;

View File

@@ -10,6 +10,12 @@ Content type:
- Requests with bodies use `application/json`. - Requests with bodies use `application/json`.
- Responses are JSON unless noted otherwise. - Responses are JSON unless noted otherwise.
Chat upload limits:
- Chat completion and direct message payloads support inline attachments up to a 32 MB request body.
- Up to 8 attachments per message.
- Image attachments: PNG or JPEG only, max 6 MB each.
- Text attachments: up to 8 MB source size each; server accepts at most 200,000 characters of inlined text content per attachment.
## Health + Auth ## Health + Auth
### `GET /health` ### `GET /health`
@@ -74,11 +80,34 @@ Behavior notes:
"role": "system|user|assistant|tool", "role": "system|user|assistant|tool",
"content": "string", "content": "string",
"name": "optional", "name": "optional",
"metadata": {} "metadata": {},
"attachments": [
{
"kind": "image",
"id": "attachment-id",
"filename": "photo.jpg",
"mimeType": "image/jpeg",
"sizeBytes": 12345,
"dataUrl": "data:image/jpeg;base64,..."
},
{
"kind": "text",
"id": "attachment-id",
"filename": "notes.md",
"mimeType": "text/markdown",
"sizeBytes": 4567,
"text": "# Notes\\n...",
"truncated": false
}
]
} }
``` ```
- Response: `{ "message": Message }` - Response: `{ "message": Message }`
Notes:
- `attachments` is optional and is merged into stored `message.metadata.attachments`.
- Tool messages should not include attachments.
## Chat Completions (non-streaming) ## Chat Completions (non-streaming)
### `POST /v1/chat-completions` ### `POST /v1/chat-completions`
@@ -89,7 +118,30 @@ Behavior notes:
"provider": "openai|anthropic|xai", "provider": "openai|anthropic|xai",
"model": "string", "model": "string",
"messages": [ "messages": [
{ "role": "system|user|assistant|tool", "content": "string", "name": "optional" } {
"role": "system|user|assistant|tool",
"content": "string",
"name": "optional",
"attachments": [
{
"kind": "image",
"id": "attachment-id",
"filename": "photo.jpg",
"mimeType": "image/jpeg",
"sizeBytes": 12345,
"dataUrl": "data:image/jpeg;base64,..."
},
{
"kind": "text",
"id": "attachment-id",
"filename": "notes.md",
"mimeType": "text/markdown",
"sizeBytes": 4567,
"text": "# Notes\\n...",
"truncated": false
}
]
}
], ],
"temperature": 0.2, "temperature": 0.2,
"maxTokens": 256 "maxTokens": 256
@@ -112,7 +164,12 @@ Behavior notes:
- For `chatId` calls, server stores only *new* non-assistant messages from provided history to avoid duplicates. - For `chatId` calls, server stores only *new* non-assistant messages from provided history to avoid duplicates.
- Server persists final assistant output and call metadata (`LlmCall`) in DB. - Server persists final assistant output and call metadata (`LlmCall`) in DB.
- Server updates chat-level model metadata on each call: `lastUsedProvider`/`lastUsedModel`; first successful/failed call also initializes `initiatedProvider`/`initiatedModel` if unset. - Server updates chat-level model metadata on each call: `lastUsedProvider`/`lastUsedModel`; first successful/failed call also initializes `initiatedProvider`/`initiatedModel` if unset.
- Attachments are optional and currently apply to `user` messages. Persisted chat history stores them under `message.metadata.attachments`.
- Images are forwarded inline to providers as multimodal image parts. Use PNG or JPEG for cross-provider compatibility.
- Text files are forwarded as explicit text blocks rather than provider-managed file references. Large text attachments should already be truncated client-side before submission.
- For `openai` and `xai`, backend enables tool use during chat completion with an internal system instruction. - For `openai` and `xai`, backend enables tool use during chat completion with an internal system instruction.
- For `openai` and `xai`, image attachments are sent as chat-completions content parts alongside text.
- For `anthropic`, image attachments are sent as Messages API `image` blocks using base64 source data; text attachments are added as `text` blocks.
- Available tool calls for chat: `web_search` and `fetch_url`. - Available tool calls for chat: `web_search` and `fetch_url`.
- `web_search` returns ranked results with per-result summaries/snippets. Its backend engine is selected by `CHAT_WEB_SEARCH_ENGINE` (`exa` default, or `searxng` with `SEARXNG_BASE_URL` set). SearXNG mode requires the instance to allow `format=json`. - `web_search` returns ranked results with per-result summaries/snippets. Its backend engine is selected by `CHAT_WEB_SEARCH_ENGINE` (`exa` default, or `searxng` with `SEARXNG_BASE_URL` set). SearXNG mode requires the instance to allow `format=json`.
- `fetch_url` fetches a URL and returns plaintext page content (HTML converted to text server-side). - `fetch_url` fetches a URL and returns plaintext page content (HTML converted to text server-side).
@@ -189,10 +246,32 @@ Search run notes:
"role": "system|user|assistant|tool", "role": "system|user|assistant|tool",
"content": "...", "content": "...",
"name": null, "name": null,
"metadata": null "metadata": {
"attachments": [
{
"kind": "image",
"id": "attachment-id",
"filename": "photo.jpg",
"mimeType": "image/jpeg",
"sizeBytes": 12345,
"dataUrl": "data:image/jpeg;base64,..."
},
{
"kind": "text",
"id": "attachment-id",
"filename": "notes.md",
"mimeType": "text/markdown",
"sizeBytes": 4567,
"text": "# Notes\\n...",
"truncated": false
}
]
}
} }
``` ```
`metadata` remains nullable. Tool-call log messages still use `metadata.kind = "tool_call"`; regular user messages with attachments use `metadata.attachments`.
`ChatDetail` `ChatDetail`
```json ```json
{ {

View File

@@ -9,6 +9,7 @@ Transport:
- HTTP response uses `Content-Type: text/event-stream; charset=utf-8` - HTTP response uses `Content-Type: text/event-stream; charset=utf-8`
- Events are emitted in SSE format (`event: ...`, `data: ...`) - Events are emitted in SSE format (`event: ...`, `data: ...`)
- Request body is JSON - Request body is JSON
- Request body supports the same inline attachment schema and limits documented in `docs/api/rest.md`.
Authentication: Authentication:
- Same as REST endpoints (`Authorization: Bearer <token>` when token mode is enabled) - Same as REST endpoints (`Authorization: Bearer <token>` when token mode is enabled)
@@ -21,7 +22,30 @@ Authentication:
"provider": "openai|anthropic|xai", "provider": "openai|anthropic|xai",
"model": "string", "model": "string",
"messages": [ "messages": [
{ "role": "system|user|assistant|tool", "content": "string", "name": "optional" } {
"role": "system|user|assistant|tool",
"content": "string",
"name": "optional",
"attachments": [
{
"kind": "image",
"id": "attachment-id",
"filename": "photo.jpg",
"mimeType": "image/jpeg",
"sizeBytes": 12345,
"dataUrl": "data:image/jpeg;base64,..."
},
{
"kind": "text",
"id": "attachment-id",
"filename": "notes.md",
"mimeType": "text/markdown",
"sizeBytes": 4567,
"text": "# Notes\\n...",
"truncated": false
}
]
}
], ],
"temperature": 0.2, "temperature": 0.2,
"maxTokens": 256 "maxTokens": 256
@@ -32,6 +56,7 @@ Notes:
- If `chatId` is omitted, backend creates a new chat. - If `chatId` is omitted, backend creates a new chat.
- If `chatId` is provided, backend validates it exists. - If `chatId` is provided, backend validates it exists.
- Backend stores only new non-assistant input history rows to avoid duplicates. - Backend stores only new non-assistant input history rows to avoid duplicates.
- Attachments are optional and are persisted under `message.metadata.attachments` on stored user messages.
## Event Stream Contract ## Event Stream Contract
@@ -103,8 +128,9 @@ Event order:
## Provider Streaming Behavior ## Provider Streaming Behavior
- `openai`: backend may execute internal tool calls (`web_search`, `fetch_url`) before producing final text. - `openai`: backend may execute internal tool calls (`web_search`, `fetch_url`) before producing final text.
- `openai`: image attachments are sent as chat-completions content parts; text attachments are inlined as text parts.
- `xai`: same tool-enabled behavior as OpenAI. - `xai`: same tool-enabled behavior as OpenAI.
- `anthropic`: streamed via event stream; emits `delta` from `content_block_delta` with `text_delta`. - `anthropic`: streamed via event stream; emits `delta` from `content_block_delta` with `text_delta`. Image attachments are sent as base64 `image` blocks and text attachments are appended as `text` blocks.
- `web_search` uses `CHAT_WEB_SEARCH_ENGINE` (`exa` default, or `searxng` with `SEARXNG_BASE_URL` set). SearXNG mode requires the instance to allow `format=json`. This only affects chat-mode tool calls, not search-mode endpoints. - `web_search` uses `CHAT_WEB_SEARCH_ENGINE` (`exa` default, or `searxng` with `SEARXNG_BASE_URL` set). SearXNG mode requires the instance to allow `format=json`. This only affects chat-mode tool calls, not search-mode endpoints.
Tool-enabled streaming notes (`openai`/`xai`): Tool-enabled streaming notes (`openai`/`xai`):

View File

@@ -9,6 +9,7 @@ import { warmModelCatalog } from "./llm/model-catalog.js";
import { registerRoutes } from "./routes.js"; import { registerRoutes } from "./routes.js";
const app = Fastify({ const app = Fastify({
bodyLimit: 32 * 1024 * 1024,
disableRequestLogging: true, disableRequestLogging: true,
logger: { logger: {
transport: { transport: {

View File

@@ -4,6 +4,7 @@ import { z } from "zod";
import { env } from "../env.js"; import { env } from "../env.js";
import { exaClient } from "../search/exa.js"; import { exaClient } from "../search/exa.js";
import { searchSearxng } from "../search/searxng.js"; import { searchSearxng } from "../search/searxng.js";
import { buildOpenAIConversationMessage } from "./message-content.js";
import type { ChatMessage } from "./types.js"; import type { ChatMessage } from "./types.js";
const MAX_TOOL_ROUNDS = 4; const MAX_TOOL_ROUNDS = 4;
@@ -250,23 +251,7 @@ function extractHtmlTitle(html: string) {
} }
function normalizeIncomingMessages(messages: ChatMessage[]) { function normalizeIncomingMessages(messages: ChatMessage[]) {
const normalized = messages.map((m) => { const normalized = messages.map((message) => buildOpenAIConversationMessage(message));
if (m.role === "tool") {
const name = m.name?.trim() || "tool";
return {
role: "user",
content: `Tool output (${name}):\n${m.content}`,
};
}
if (m.role === "assistant" || m.role === "system" || m.role === "user") {
const out: any = { role: m.role, content: m.content };
if (m.name && (m.role === "assistant" || m.role === "user")) {
out.name = m.name;
}
return out;
}
return { role: "user", content: m.content };
});
return [{ role: "system", content: CHAT_TOOL_SYSTEM_PROMPT }, ...normalized]; return [{ role: "system", content: CHAT_TOOL_SYSTEM_PROMPT }, ...normalized];
} }

View File

@@ -0,0 +1,211 @@
import type { ChatAttachment, ChatImageAttachment, ChatMessage, ChatTextAttachment } from "./types.js";
function escapeAttribute(value: string) {
return value.replace(/"/g, "&quot;");
}
function getImageAttachments(message: ChatMessage) {
return (message.attachments ?? []).filter((attachment): attachment is ChatImageAttachment => attachment.kind === "image");
}
function getTextAttachments(message: ChatMessage) {
return (message.attachments ?? []).filter((attachment): attachment is ChatTextAttachment => attachment.kind === "text");
}
function buildImageSummaryText(attachments: ChatImageAttachment[]) {
if (!attachments.length) return null;
const label = attachments.length === 1 ? "Attached image" : "Attached images";
return `${label}: ${attachments.map((attachment) => attachment.filename).join(", ")}.`;
}
function buildTextAttachmentPrompt(attachment: ChatTextAttachment) {
const truncationNote = attachment.truncated ? ' truncated="true"' : "";
return [
`Attached text file: ${attachment.filename}${attachment.truncated ? " (content truncated)" : ""}`,
`<attached_file filename="${escapeAttribute(attachment.filename)}" mime_type="${escapeAttribute(attachment.mimeType)}"${truncationNote}>`,
attachment.text,
"</attached_file>",
].join("\n");
}
function toOpenAIContent(message: ChatMessage) {
const imageAttachments = getImageAttachments(message);
const textAttachments = getTextAttachments(message);
if (!imageAttachments.length && !textAttachments.length) {
return message.content;
}
const parts: Array<Record<string, unknown>> = [];
for (const attachment of imageAttachments) {
parts.push({
type: "image_url",
image_url: {
url: attachment.dataUrl,
detail: "auto",
},
});
}
const imageSummary = buildImageSummaryText(imageAttachments);
if (imageSummary) {
parts.push({ type: "text", text: imageSummary });
}
for (const attachment of textAttachments) {
parts.push({ type: "text", text: buildTextAttachmentPrompt(attachment) });
}
if (message.content.trim()) {
parts.push({ type: "text", text: message.content });
}
if (parts.length === 1 && parts[0]?.type === "text" && typeof parts[0].text === "string") {
return parts[0].text;
}
return parts;
}
function parseImageDataUrl(attachment: ChatImageAttachment) {
const match = attachment.dataUrl.match(/^data:(image\/(?:png|jpeg));base64,([a-z0-9+/=\s]+)$/i);
if (!match) {
throw new Error(`Invalid image attachment data URL for '${attachment.filename}'.`);
}
const mediaType = match[1].toLowerCase();
if (mediaType !== attachment.mimeType) {
throw new Error(`Image attachment MIME type mismatch for '${attachment.filename}'.`);
}
return {
mediaType,
data: match[2].replace(/\s+/g, ""),
};
}
function toAnthropicContent(message: ChatMessage) {
const imageAttachments = getImageAttachments(message);
const textAttachments = getTextAttachments(message);
if (!imageAttachments.length && !textAttachments.length) {
return message.content;
}
const blocks: Array<Record<string, unknown>> = [];
for (const attachment of imageAttachments) {
const source = parseImageDataUrl(attachment);
blocks.push({
type: "image",
source: {
type: "base64",
media_type: source.mediaType,
data: source.data,
},
});
}
const imageSummary = buildImageSummaryText(imageAttachments);
if (imageSummary) {
blocks.push({ type: "text", text: imageSummary });
}
for (const attachment of textAttachments) {
blocks.push({ type: "text", text: buildTextAttachmentPrompt(attachment) });
}
if (message.content.trim()) {
blocks.push({ type: "text", text: message.content });
}
if (blocks.length === 1 && blocks[0]?.type === "text" && typeof blocks[0].text === "string") {
return blocks[0].text;
}
return blocks;
}
export function buildOpenAIConversationMessage(message: ChatMessage) {
if (message.role === "tool") {
const name = message.name?.trim() || "tool";
return {
role: "user",
content: `Tool output (${name}):\n${message.content}`,
};
}
const out: Record<string, unknown> = {
role: message.role,
content: toOpenAIContent(message),
};
if (message.name && (message.role === "assistant" || message.role === "user")) {
out.name = message.name;
}
return out;
}
export function getAnthropicSystemPrompt(messages: ChatMessage[]) {
return messages.find((message) => message.role === "system")?.content;
}
export function buildAnthropicConversationMessage(message: ChatMessage) {
if (message.role === "system") {
throw new Error("System messages must be handled separately for Anthropic.");
}
if (message.role === "tool") {
const name = message.name?.trim() || "tool";
return {
role: "user",
content: `Tool output (${name}):\n${message.content}`,
};
}
return {
role: message.role === "assistant" ? "assistant" : "user",
content: toAnthropicContent(message),
};
}
export function buildComparableAttachments(input: unknown): ChatAttachment[] {
if (!Array.isArray(input)) return [];
const attachments: ChatAttachment[] = [];
for (const entry of input) {
if (!entry || typeof entry !== "object" || Array.isArray(entry)) continue;
const record = entry as Record<string, unknown>;
const kind = record.kind;
const id = typeof record.id === "string" ? record.id : "";
const filename = typeof record.filename === "string" ? record.filename : "";
const mimeType = typeof record.mimeType === "string" ? record.mimeType : "";
const sizeBytes = typeof record.sizeBytes === "number" ? record.sizeBytes : 0;
if (kind === "image" && typeof record.dataUrl === "string") {
attachments.push({
kind,
id,
filename,
mimeType: mimeType === "image/png" ? "image/png" : "image/jpeg",
sizeBytes,
dataUrl: record.dataUrl,
});
continue;
}
if (kind === "text" && typeof record.text === "string") {
attachments.push({
kind,
id,
filename,
mimeType,
sizeBytes,
text: record.text,
truncated: record.truncated === true,
});
}
}
return attachments;
}

View File

@@ -2,6 +2,7 @@ import { performance } from "node:perf_hooks";
import { prisma } from "../db.js"; import { prisma } from "../db.js";
import { anthropicClient, openaiClient, xaiClient } from "./providers.js"; import { anthropicClient, openaiClient, xaiClient } from "./providers.js";
import { buildToolLogMessageData, runToolAwareOpenAIChat } from "./chat-tools.js"; import { buildToolLogMessageData, runToolAwareOpenAIChat } from "./chat-tools.js";
import { buildAnthropicConversationMessage, getAnthropicSystemPrompt } from "./message-content.js";
import type { MultiplexRequest, MultiplexResponse, Provider } from "./types.js"; import type { MultiplexRequest, MultiplexResponse, Provider } from "./types.js";
function asProviderEnum(p: Provider) { function asProviderEnum(p: Provider) {
@@ -68,11 +69,8 @@ export async function runMultiplex(req: MultiplexRequest): Promise<MultiplexResp
} else if (req.provider === "anthropic") { } else if (req.provider === "anthropic") {
const client = anthropicClient(); const client = anthropicClient();
// Anthropic splits system prompt. We'll convert first system message into system string. const system = getAnthropicSystemPrompt(req.messages);
const system = req.messages.find((m) => m.role === "system")?.content; const msgs = req.messages.filter((message) => message.role !== "system").map((message) => buildAnthropicConversationMessage(message));
const msgs = req.messages
.filter((m) => m.role !== "system")
.map((m) => ({ role: m.role === "assistant" ? "assistant" : "user", content: m.content }));
const r = await client.messages.create({ const r = await client.messages.create({
model: req.model, model: req.model,

View File

@@ -2,6 +2,7 @@ import { performance } from "node:perf_hooks";
import { prisma } from "../db.js"; import { prisma } from "../db.js";
import { anthropicClient, openaiClient, xaiClient } from "./providers.js"; import { anthropicClient, openaiClient, xaiClient } from "./providers.js";
import { buildToolLogMessageData, runToolAwareOpenAIChatStream, type ToolExecutionEvent } from "./chat-tools.js"; import { buildToolLogMessageData, runToolAwareOpenAIChatStream, type ToolExecutionEvent } from "./chat-tools.js";
import { buildAnthropicConversationMessage, getAnthropicSystemPrompt } from "./message-content.js";
import type { MultiplexRequest, Provider } from "./types.js"; import type { MultiplexRequest, Provider } from "./types.js";
export type StreamEvent = export type StreamEvent =
@@ -88,10 +89,8 @@ export async function* runMultiplexStream(req: MultiplexRequest): AsyncGenerator
} else if (req.provider === "anthropic") { } else if (req.provider === "anthropic") {
const client = anthropicClient(); const client = anthropicClient();
const system = req.messages.find((m) => m.role === "system")?.content; const system = getAnthropicSystemPrompt(req.messages);
const msgs = req.messages const msgs = req.messages.filter((message) => message.role !== "system").map((message) => buildAnthropicConversationMessage(message));
.filter((m) => m.role !== "system")
.map((m) => ({ role: m.role === "assistant" ? "assistant" : "user", content: m.content }));
const stream = await client.messages.create({ const stream = await client.messages.create({
model: req.model, model: req.model,

View File

@@ -1,9 +1,31 @@
export type Provider = "openai" | "anthropic" | "xai"; export type Provider = "openai" | "anthropic" | "xai";
export type ChatImageAttachment = {
kind: "image";
id: string;
filename: string;
mimeType: "image/png" | "image/jpeg";
sizeBytes: number;
dataUrl: string;
};
export type ChatTextAttachment = {
kind: "text";
id: string;
filename: string;
mimeType: string;
sizeBytes: number;
text: string;
truncated?: boolean;
};
export type ChatAttachment = ChatImageAttachment | ChatTextAttachment;
export type ChatMessage = { export type ChatMessage = {
role: "system" | "user" | "assistant" | "tool"; role: "system" | "user" | "assistant" | "tool";
content: string; content: string;
name?: string; name?: string;
attachments?: ChatAttachment[];
}; };
export type MultiplexRequest = { export type MultiplexRequest = {

View File

@@ -4,23 +4,33 @@ import type { FastifyInstance } from "fastify";
import { prisma } from "./db.js"; import { prisma } from "./db.js";
import { requireAdmin } from "./auth.js"; import { requireAdmin } from "./auth.js";
import { env } from "./env.js"; import { env } from "./env.js";
import { buildComparableAttachments } from "./llm/message-content.js";
import { runMultiplex } from "./llm/multiplexer.js"; import { runMultiplex } from "./llm/multiplexer.js";
import { runMultiplexStream } from "./llm/streaming.js"; import { runMultiplexStream } from "./llm/streaming.js";
import { getModelCatalogSnapshot } from "./llm/model-catalog.js"; import { getModelCatalogSnapshot } from "./llm/model-catalog.js";
import { openaiClient } from "./llm/providers.js"; import { openaiClient } from "./llm/providers.js";
import { exaClient } from "./search/exa.js"; import { exaClient } from "./search/exa.js";
import type { ChatAttachment } from "./llm/types.js";
type IncomingChatMessage = { type IncomingChatMessage = {
role: "system" | "user" | "assistant" | "tool"; role: "system" | "user" | "assistant" | "tool";
content: string; content: string;
name?: string; name?: string;
attachments?: ChatAttachment[];
}; };
function sameMessage( function sameMessage(
a: { role: string; content: string; name?: string | null }, a: { role: string; content: string; name?: string | null; metadata?: unknown },
b: { role: string; content: string; name?: string | null } b: { role: string; content: string; name?: string | null; attachments?: ChatAttachment[] }
) { ) {
return a.role === b.role && a.content === b.content && (a.name ?? null) === (b.name ?? null); const existingAttachments = JSON.stringify(buildComparableAttachments((a.metadata as Record<string, unknown> | null)?.attachments ?? null));
const incomingAttachments = JSON.stringify(b.attachments ?? []);
return (
a.role === b.role &&
a.content === b.content &&
(a.name ?? null) === (b.name ?? null) &&
existingAttachments === incomingAttachments
);
} }
function isToolCallLogMetadata(value: unknown) { function isToolCallLogMetadata(value: unknown) {
@@ -60,10 +70,67 @@ async function storeNonAssistantMessages(chatId: string, messages: IncomingChatM
role: m.role as any, role: m.role as any,
content: m.content, content: m.content,
name: m.name, name: m.name,
metadata: m.attachments?.length ? ({ attachments: m.attachments } as any) : undefined,
})), })),
}); });
} }
const MAX_CHAT_ATTACHMENTS = 8;
const MAX_IMAGE_ATTACHMENT_BYTES = 6 * 1024 * 1024;
const MAX_TEXT_ATTACHMENT_CHARS = 200_000;
const MAX_IMAGE_DATA_URL_CHARS = 8_500_000;
const ChatAttachmentSchema = z.discriminatedUnion("kind", [
z.object({
kind: z.literal("image"),
id: z.string().trim().min(1).max(128),
filename: z.string().trim().min(1).max(255),
mimeType: z.enum(["image/png", "image/jpeg"]),
sizeBytes: z.number().int().positive().max(MAX_IMAGE_ATTACHMENT_BYTES),
dataUrl: z
.string()
.max(MAX_IMAGE_DATA_URL_CHARS)
.regex(/^data:image\/(?:png|jpeg);base64,[a-z0-9+/=\s]+$/i, "Invalid image data URL"),
}),
z.object({
kind: z.literal("text"),
id: z.string().trim().min(1).max(128),
filename: z.string().trim().min(1).max(255),
mimeType: z.string().trim().min(1).max(127),
sizeBytes: z.number().int().positive().max(8 * 1024 * 1024),
text: z.string().max(MAX_TEXT_ATTACHMENT_CHARS),
truncated: z.boolean().optional(),
}),
]);
const CompletionMessageSchema = z
.object({
role: z.enum(["system", "user", "assistant", "tool"]),
content: z.string(),
name: z.string().optional(),
attachments: z.array(ChatAttachmentSchema).max(MAX_CHAT_ATTACHMENTS).optional(),
})
.superRefine((value, ctx) => {
if (value.attachments?.length && value.role === "tool") {
ctx.addIssue({
code: z.ZodIssueCode.custom,
message: "Tool messages cannot include attachments.",
path: ["attachments"],
});
}
});
function mergeAttachmentsIntoMetadata(metadata: unknown, attachments?: ChatAttachment[]) {
if (!attachments?.length) return metadata as any;
if (!metadata || typeof metadata !== "object" || Array.isArray(metadata)) {
return { attachments };
}
return {
...(metadata as Record<string, unknown>),
attachments,
};
}
const SearchRunBody = z.object({ const SearchRunBody = z.object({
query: z.string().trim().min(1).optional(), query: z.string().trim().min(1).optional(),
title: z.string().trim().min(1).optional(), title: z.string().trim().min(1).optional(),
@@ -768,6 +835,7 @@ export async function registerRoutes(app: FastifyInstance) {
content: z.string(), content: z.string(),
name: z.string().optional(), name: z.string().optional(),
metadata: z.unknown().optional(), metadata: z.unknown().optional(),
attachments: z.array(ChatAttachmentSchema).max(MAX_CHAT_ATTACHMENTS).optional(),
}); });
const { chatId } = Params.parse(req.params); const { chatId } = Params.parse(req.params);
@@ -779,7 +847,7 @@ export async function registerRoutes(app: FastifyInstance) {
role: body.role as any, role: body.role as any,
content: body.content, content: body.content,
name: body.name, name: body.name,
metadata: body.metadata as any, metadata: mergeAttachmentsIntoMetadata(body.metadata, body.attachments) as any,
}, },
}); });
@@ -794,13 +862,7 @@ export async function registerRoutes(app: FastifyInstance) {
chatId: z.string().optional(), chatId: z.string().optional(),
provider: z.enum(["openai", "anthropic", "xai"]), provider: z.enum(["openai", "anthropic", "xai"]),
model: z.string().min(1), model: z.string().min(1),
messages: z.array( messages: z.array(CompletionMessageSchema),
z.object({
role: z.enum(["system", "user", "assistant", "tool"]),
content: z.string(),
name: z.string().optional(),
})
),
temperature: z.number().min(0).max(2).optional(), temperature: z.number().min(0).max(2).optional(),
maxTokens: z.number().int().positive().optional(), maxTokens: z.number().int().positive().optional(),
}); });
@@ -834,13 +896,7 @@ export async function registerRoutes(app: FastifyInstance) {
chatId: z.string().optional(), chatId: z.string().optional(),
provider: z.enum(["openai", "anthropic", "xai"]), provider: z.enum(["openai", "anthropic", "xai"]),
model: z.string().min(1), model: z.string().min(1),
messages: z.array( messages: z.array(CompletionMessageSchema),
z.object({
role: z.enum(["system", "user", "assistant", "tool"]),
content: z.string(),
name: z.string().optional(),
})
),
temperature: z.number().min(0).max(2).optional(), temperature: z.number().min(0).max(2).optional(),
maxTokens: z.number().int().positive().optional(), maxTokens: z.number().int().positive().optional(),
}); });

View File

@@ -1,9 +1,10 @@
import { useEffect, useMemo, useRef, useState } from "preact/hooks"; import { useEffect, useMemo, useRef, useState } from "preact/hooks";
import { Check, ChevronDown, Globe2, Menu, MessageSquare, Plus, Search, SendHorizontal, Trash2 } from "lucide-preact"; import { Check, ChevronDown, Globe2, Menu, MessageSquare, Paperclip, Plus, Search, SendHorizontal, Trash2 } from "lucide-preact";
import { Button } from "@/components/ui/button"; import { Button } from "@/components/ui/button";
import { Textarea } from "@/components/ui/textarea"; import { Textarea } from "@/components/ui/textarea";
import { Separator } from "@/components/ui/separator"; import { Separator } from "@/components/ui/separator";
import { AuthScreen } from "@/components/auth/auth-screen"; import { AuthScreen } from "@/components/auth/auth-screen";
import { ChatAttachmentList } from "@/components/chat/chat-attachment-list";
import { ChatMessagesPanel } from "@/components/chat/chat-messages-panel"; import { ChatMessagesPanel } from "@/components/chat/chat-messages-panel";
import { SearchResultsPanel } from "@/components/search/search-results-panel"; import { SearchResultsPanel } from "@/components/search/search-results-panel";
import { import {
@@ -20,6 +21,8 @@ import {
runCompletionStream, runCompletionStream,
runSearchStream, runSearchStream,
suggestChatTitle, suggestChatTitle,
getMessageAttachments,
type ChatAttachment,
type ModelCatalogResponse, type ModelCatalogResponse,
type Provider, type Provider,
type ChatDetail, type ChatDetail,
@@ -102,6 +105,65 @@ const TRANSCRIPT_BOTTOM_GAP = 20;
const REPLY_SCROLL_BUFFER_MIN = 288; const REPLY_SCROLL_BUFFER_MIN = 288;
const REPLY_SCROLL_BUFFER_MAX = 576; const REPLY_SCROLL_BUFFER_MAX = 576;
const REPLY_SCROLL_BUFFER_VIEWPORT_RATIO = 0.52; const REPLY_SCROLL_BUFFER_VIEWPORT_RATIO = 0.52;
const MAX_CHAT_ATTACHMENTS = 8;
const MAX_IMAGE_ATTACHMENT_BYTES = 6 * 1024 * 1024;
const MAX_TEXT_ATTACHMENT_BYTES = 8 * 1024 * 1024;
const MAX_TEXT_ATTACHMENT_CHARS = 200_000;
const CHAT_FILE_ACCEPT =
".png,.jpg,.jpeg,.txt,.md,.markdown,.csv,.tsv,.json,.jsonl,.xml,.yaml,.yml,.html,.htm,.css,.js,.jsx,.ts,.tsx,.py,.rb,.java,.c,.cc,.cpp,.h,.hpp,.go,.rs,.sh,.sql,.log,.toml,.ini,.cfg,.conf,.swift,.kt,.m,.mm";
const TEXT_ATTACHMENT_EXTENSIONS = new Set([
".txt",
".md",
".markdown",
".csv",
".tsv",
".json",
".jsonl",
".xml",
".yaml",
".yml",
".html",
".htm",
".css",
".js",
".jsx",
".ts",
".tsx",
".py",
".rb",
".java",
".c",
".cc",
".cpp",
".h",
".hpp",
".go",
".rs",
".sh",
".sql",
".log",
".toml",
".ini",
".cfg",
".conf",
".swift",
".kt",
".m",
".mm",
]);
const TEXT_ATTACHMENT_MIME_TYPES = new Set([
"application/json",
"application/ld+json",
"application/sql",
"application/toml",
"application/x-httpd-php",
"application/x-javascript",
"application/x-sh",
"application/xml",
"application/yaml",
"application/x-yaml",
"image/svg+xml",
]);
function getModelOptions(catalog: ModelCatalogResponse["providers"], provider: Provider) { function getModelOptions(catalog: ModelCatalogResponse["providers"], provider: Provider) {
const providerModels = catalog[provider]?.models ?? []; const providerModels = catalog[provider]?.models ?? [];
@@ -117,6 +179,103 @@ function getReplyScrollBufferHeight() {
); );
} }
function getFileExtension(filename: string) {
const index = filename.lastIndexOf(".");
return index >= 0 ? filename.slice(index).toLowerCase() : "";
}
function createAttachmentId() {
if (typeof crypto !== "undefined" && typeof crypto.randomUUID === "function") {
return crypto.randomUUID();
}
return `att-${Date.now()}-${Math.random().toString(36).slice(2, 10)}`;
}
function inferImageMimeType(file: File) {
if (file.type === "image/png" || file.type === "image/jpeg") return file.type;
const extension = getFileExtension(file.name);
if (extension === ".png") return "image/png";
if (extension === ".jpg" || extension === ".jpeg") return "image/jpeg";
return null;
}
function isTextLikeFile(file: File) {
const mimeType = file.type.toLowerCase();
if (mimeType.startsWith("text/")) return true;
if (TEXT_ATTACHMENT_MIME_TYPES.has(mimeType)) return true;
return TEXT_ATTACHMENT_EXTENSIONS.has(getFileExtension(file.name));
}
function arrayBufferToBase64(buffer: ArrayBuffer) {
const bytes = new Uint8Array(buffer);
const chunkSize = 0x8000;
let binary = "";
for (let index = 0; index < bytes.length; index += chunkSize) {
const chunk = bytes.subarray(index, index + chunkSize);
binary += String.fromCharCode(...chunk);
}
return btoa(binary);
}
async function buildChatAttachment(file: File): Promise<ChatAttachment> {
const imageMimeType = inferImageMimeType(file);
if (imageMimeType) {
if (file.size > MAX_IMAGE_ATTACHMENT_BYTES) {
throw new Error(`Image '${file.name}' exceeds the 6 MB upload limit.`);
}
const base64 = arrayBufferToBase64(await file.arrayBuffer());
return {
kind: "image",
id: createAttachmentId(),
filename: file.name,
mimeType: imageMimeType,
sizeBytes: file.size,
dataUrl: `data:${imageMimeType};base64,${base64}`,
};
}
if (!isTextLikeFile(file)) {
throw new Error(`Unsupported file type for '${file.name}'. Use PNG/JPEG images or text-based files.`);
}
if (file.size > MAX_TEXT_ATTACHMENT_BYTES) {
throw new Error(`Text file '${file.name}' exceeds the 8 MB upload limit.`);
}
const normalizedText = (await file.text()).replace(/\r\n/g, "\n").replace(/\u0000/g, "");
const truncated = normalizedText.length > MAX_TEXT_ATTACHMENT_CHARS;
return {
kind: "text",
id: createAttachmentId(),
filename: file.name,
mimeType: file.type || "text/plain",
sizeBytes: file.size,
text: truncated ? normalizedText.slice(0, MAX_TEXT_ATTACHMENT_CHARS) : normalizedText,
truncated,
};
}
function buildAttachmentSummary(attachments: ChatAttachment[]) {
if (!attachments.length) return "";
const filenames = attachments.map((attachment) => attachment.filename).join(", ");
return attachments.length === 1 ? filenames : `Attached: ${filenames}`;
}
function getFilesFromDataTransfer(dataTransfer: DataTransfer | null) {
if (!dataTransfer) return [];
const fromItems = Array.from(dataTransfer.items ?? [])
.filter((item) => item.kind === "file")
.map((item) => item.getAsFile())
.filter((file): file is File => file instanceof File);
if (fromItems.length) return fromItems;
return Array.from(dataTransfer.files ?? []);
}
function hasFileTransfer(dataTransfer: DataTransfer | null) {
if (!dataTransfer) return false;
return Array.from(dataTransfer.types ?? []).includes("Files") || getFilesFromDataTransfer(dataTransfer).length > 0;
}
function loadStoredModelPreferences() { function loadStoredModelPreferences() {
if (typeof window === "undefined") return EMPTY_MODEL_PREFERENCES; if (typeof window === "undefined") return EMPTY_MODEL_PREFERENCES;
try { try {
@@ -347,8 +506,12 @@ function ModelCombobox({ options, value, onChange, disabled = false }: ModelComb
function getChatTitle(chat: Pick<ChatSummary, "title">, messages?: ChatDetail["messages"]) { function getChatTitle(chat: Pick<ChatSummary, "title">, messages?: ChatDetail["messages"]) {
if (chat.title?.trim()) return chat.title.trim(); if (chat.title?.trim()) return chat.title.trim();
const firstUserMessage = messages?.find((m) => m.role === "user")?.content.trim(); const firstUserMessage = messages?.find((message) => message.role === "user");
if (firstUserMessage) return firstUserMessage.slice(0, 48); const firstUserText = firstUserMessage?.content.trim();
if (firstUserText) return firstUserText.slice(0, 48);
const firstUserAttachments = firstUserMessage ? getMessageAttachments(firstUserMessage.metadata) : [];
const attachmentSummary = buildAttachmentSummary(firstUserAttachments);
if (attachmentSummary) return attachmentSummary.slice(0, 48);
return "New chat"; return "New chat";
} }
@@ -448,6 +611,8 @@ export default function App() {
const [isStartingSearchChat, setIsStartingSearchChat] = useState(false); const [isStartingSearchChat, setIsStartingSearchChat] = useState(false);
const [pendingChatState, setPendingChatState] = useState<{ chatId: string | null; messages: Message[] } | null>(null); const [pendingChatState, setPendingChatState] = useState<{ chatId: string | null; messages: Message[] } | null>(null);
const [composer, setComposer] = useState(""); const [composer, setComposer] = useState("");
const [pendingAttachments, setPendingAttachments] = useState<ChatAttachment[]>([]);
const [isComposerDropActive, setIsComposerDropActive] = useState(false);
const [provider, setProvider] = useState<Provider>("openai"); const [provider, setProvider] = useState<Provider>("openai");
const [modelCatalog, setModelCatalog] = useState<ModelCatalogResponse["providers"]>(EMPTY_MODEL_CATALOG); const [modelCatalog, setModelCatalog] = useState<ModelCatalogResponse["providers"]>(EMPTY_MODEL_CATALOG);
const [providerModelPreferences, setProviderModelPreferences] = useState<ProviderModelPreferences>(() => loadStoredModelPreferences()); const [providerModelPreferences, setProviderModelPreferences] = useState<ProviderModelPreferences>(() => loadStoredModelPreferences());
@@ -460,6 +625,9 @@ export default function App() {
const transcriptContainerRef = useRef<HTMLDivElement>(null); const transcriptContainerRef = useRef<HTMLDivElement>(null);
const transcriptEndRef = useRef<HTMLDivElement>(null); const transcriptEndRef = useRef<HTMLDivElement>(null);
const contextMenuRef = useRef<HTMLDivElement>(null); const contextMenuRef = useRef<HTMLDivElement>(null);
const fileInputRef = useRef<HTMLInputElement>(null);
const dragDepthRef = useRef(0);
const pendingAttachmentsRef = useRef<ChatAttachment[]>([]);
const selectedItemRef = useRef<SidebarSelection | null>(null); const selectedItemRef = useRef<SidebarSelection | null>(null);
const pendingTitleGenerationRef = useRef<Set<string>>(new Set()); const pendingTitleGenerationRef = useRef<Set<string>>(new Set());
const searchRunAbortRef = useRef<AbortController | null>(null); const searchRunAbortRef = useRef<AbortController | null>(null);
@@ -518,6 +686,10 @@ export default function App() {
textarea.style.height = `${textarea.scrollHeight}px`; textarea.style.height = `${textarea.scrollHeight}px`;
}, [composer]); }, [composer]);
useEffect(() => {
pendingAttachmentsRef.current = pendingAttachments;
}, [pendingAttachments]);
const sidebarItems = useMemo(() => buildSidebarItems(chats, searches), [chats, searches]); const sidebarItems = useMemo(() => buildSidebarItems(chats, searches), [chats, searches]);
const filteredSidebarItems = useMemo(() => { const filteredSidebarItems = useMemo(() => {
const query = sidebarQuery.trim().toLowerCase(); const query = sidebarQuery.trim().toLowerCase();
@@ -540,6 +712,7 @@ export default function App() {
setDraftKind(null); setDraftKind(null);
setPendingChatState(null); setPendingChatState(null);
setComposer(""); setComposer("");
setPendingAttachments([]);
setError(null); setError(null);
}; };
@@ -767,6 +940,16 @@ export default function App() {
const isSearchMode = draftKind ? draftKind === "search" : selectedItem?.kind === "search"; const isSearchMode = draftKind ? draftKind === "search" : selectedItem?.kind === "search";
const isSearchRunning = isSending && isSearchMode; const isSearchRunning = isSending && isSearchMode;
const isSendingActiveChat = isChatReplyStreamingInView; const isSendingActiveChat = isChatReplyStreamingInView;
useEffect(() => {
if (isSearchMode && pendingAttachments.length) {
setPendingAttachments([]);
}
if (isSearchMode) {
dragDepthRef.current = 0;
setIsComposerDropActive(false);
}
}, [isSearchMode, pendingAttachments.length]);
const displayMessages = useMemo(() => { const displayMessages = useMemo(() => {
if (!pendingChatState) return messages.filter(isDisplayableMessage); if (!pendingChatState) return messages.filter(isDisplayableMessage);
if (pendingChatState.chatId) { if (pendingChatState.chatId) {
@@ -837,6 +1020,7 @@ export default function App() {
setSelectedItem(null); setSelectedItem(null);
setSelectedChat(null); setSelectedChat(null);
setSelectedSearch(null); setSelectedSearch(null);
setPendingAttachments([]);
setIsMobileSidebarOpen(false); setIsMobileSidebarOpen(false);
}; };
@@ -847,6 +1031,7 @@ export default function App() {
setSelectedItem(null); setSelectedItem(null);
setSelectedChat(null); setSelectedChat(null);
setSelectedSearch(null); setSelectedSearch(null);
setPendingAttachments([]);
setIsMobileSidebarOpen(false); setIsMobileSidebarOpen(false);
}; };
@@ -899,7 +1084,88 @@ export default function App() {
}; };
}, [contextMenu]); }, [contextMenu]);
const handleSendChat = async (content: string) => { const handleOpenAttachmentPicker = () => {
fileInputRef.current?.click();
};
const handleRemovePendingAttachment = (attachmentId: string) => {
setPendingAttachments((current) => current.filter((attachment) => attachment.id !== attachmentId));
};
const appendPendingAttachments = async (files: File[]) => {
if (!files.length) return;
if (isSearchMode) {
setError("Attachments are only available in chat mode.");
return;
}
setError(null);
try {
const attachments = await Promise.all(files.map((file) => buildChatAttachment(file)));
if (pendingAttachmentsRef.current.length + attachments.length > MAX_CHAT_ATTACHMENTS) {
throw new Error(`You can attach up to ${MAX_CHAT_ATTACHMENTS} files per message.`);
}
setPendingAttachments((current) => current.concat(attachments));
focusComposer();
} catch (err) {
const message = err instanceof Error ? err.message : String(err);
setError(message);
}
};
const handleFileSelection = async (event: Event) => {
const input = event.currentTarget as HTMLInputElement;
const files = Array.from(input.files ?? []);
input.value = "";
await appendPendingAttachments(files);
};
const handleComposerPaste = async (event: ClipboardEvent) => {
const files = getFilesFromDataTransfer(event.clipboardData);
if (!files.length) return;
event.preventDefault();
await appendPendingAttachments(files);
};
const handleComposerDragEnter = (event: DragEvent) => {
if (!hasFileTransfer(event.dataTransfer)) return;
event.preventDefault();
if (isSearchMode) return;
dragDepthRef.current += 1;
setIsComposerDropActive(true);
};
const handleComposerDragOver = (event: DragEvent) => {
if (!hasFileTransfer(event.dataTransfer)) return;
event.preventDefault();
if (event.dataTransfer) {
event.dataTransfer.dropEffect = isSearchMode ? "none" : "copy";
}
if (!isSearchMode) {
setIsComposerDropActive(true);
}
};
const handleComposerDragLeave = (event: DragEvent) => {
if (!hasFileTransfer(event.dataTransfer)) return;
event.preventDefault();
if (isSearchMode) return;
dragDepthRef.current = Math.max(0, dragDepthRef.current - 1);
if (dragDepthRef.current === 0) {
setIsComposerDropActive(false);
}
};
const handleComposerDrop = async (event: DragEvent) => {
if (!hasFileTransfer(event.dataTransfer)) return;
event.preventDefault();
dragDepthRef.current = 0;
setIsComposerDropActive(false);
await appendPendingAttachments(getFilesFromDataTransfer(event.dataTransfer));
};
const handleSendChat = async (content: string, attachments: ChatAttachment[]) => {
pendingReplyScrollRef.current = true; pendingReplyScrollRef.current = true;
expandTranscriptTailSpacer(getReplyScrollBufferHeight()); expandTranscriptTailSpacer(getReplyScrollBufferHeight());
@@ -909,7 +1175,7 @@ export default function App() {
role: "user", role: "user",
content, content,
name: null, name: null,
metadata: null, metadata: attachments.length ? { attachments } : null,
}; };
const optimisticAssistantMessage: Message = { const optimisticAssistantMessage: Message = {
@@ -965,13 +1231,15 @@ export default function App() {
...baseChat.messages ...baseChat.messages
.filter((message) => !isToolCallLogMessage(message)) .filter((message) => !isToolCallLogMessage(message))
.map((message) => ({ .map((message) => ({
role: message.role, role: message.role,
content: message.content, content: message.content,
...(message.name ? { name: message.name } : {}), ...(message.name ? { name: message.name } : {}),
...(getMessageAttachments(message.metadata).length ? { attachments: getMessageAttachments(message.metadata) } : {}),
})), })),
{ {
role: "user", role: "user",
content, content,
...(attachments.length ? { attachments } : {}),
}, },
]; ];
@@ -984,7 +1252,8 @@ export default function App() {
const hasExistingTitle = Boolean(selectedChat?.id === chatId ? selectedChat.title?.trim() : chatSummary?.title?.trim()); const hasExistingTitle = Boolean(selectedChat?.id === chatId ? selectedChat.title?.trim() : chatSummary?.title?.trim());
if (!hasExistingTitle && !pendingTitleGenerationRef.current.has(chatId)) { if (!hasExistingTitle && !pendingTitleGenerationRef.current.has(chatId)) {
pendingTitleGenerationRef.current.add(chatId); pendingTitleGenerationRef.current.add(chatId);
void suggestChatTitle({ chatId, content }) const titleSeed = content || buildAttachmentSummary(attachments) || "Uploaded files";
void suggestChatTitle({ chatId, content: titleSeed })
.then((updatedChat) => { .then((updatedChat) => {
setChats((current) => setChats((current) =>
current.map((chat) => { current.map((chat) => {
@@ -1232,6 +1501,7 @@ export default function App() {
setDraftKind(null); setDraftKind(null);
setPendingChatState(null); setPendingChatState(null);
setComposer(""); setComposer("");
setPendingAttachments([]);
setChats((current) => { setChats((current) => {
const withoutExisting = current.filter((existing) => existing.id !== chat.id); const withoutExisting = current.filter((existing) => existing.id !== chat.id);
return [chat, ...withoutExisting]; return [chat, ...withoutExisting];
@@ -1265,9 +1535,15 @@ export default function App() {
const handleSend = async () => { const handleSend = async () => {
const content = composer.trim(); const content = composer.trim();
if (!content || isSending) return; const attachments = pendingAttachments;
if ((!content && !attachments.length) || isSending) return;
if (isSearchMode && attachments.length) {
setError("Attachments are only available in chat mode.");
return;
}
setComposer(""); setComposer("");
setPendingAttachments([]);
setError(null); setError(null);
setIsSending(true); setIsSending(true);
@@ -1275,7 +1551,7 @@ export default function App() {
if (isSearchMode) { if (isSearchMode) {
await handleSendSearch(content); await handleSendSearch(content);
} else { } else {
await handleSendChat(content); await handleSendChat(content, attachments);
} }
} catch (err) { } catch (err) {
const message = err instanceof Error ? err.message : String(err); const message = err instanceof Error ? err.message : String(err);
@@ -1286,6 +1562,8 @@ export default function App() {
} }
if (!isSearchMode) { if (!isSearchMode) {
setComposer(content);
setPendingAttachments(attachments);
setPendingChatState(null); setPendingChatState(null);
} }
@@ -1519,7 +1797,42 @@ export default function App() {
</div> </div>
<footer className="pointer-events-none absolute inset-x-0 bottom-0 z-10 bg-[linear-gradient(to_top,hsl(235_50%_4%)_0%,hsl(235_50%_4%_/_0.92)_58%,transparent)] p-3 pt-14 md:p-6 md:pt-20"> <footer className="pointer-events-none absolute inset-x-0 bottom-0 z-10 bg-[linear-gradient(to_top,hsl(235_50%_4%)_0%,hsl(235_50%_4%_/_0.92)_58%,transparent)] p-3 pt-14 md:p-6 md:pt-20">
<div className="pointer-events-auto mx-auto max-w-4xl rounded-2xl border border-violet-300/30 bg-[linear-gradient(135deg,hsl(235_48%_7%_/_0.96),hsl(258_48%_11%_/_0.94))] p-2 shadow-lg shadow-black/20"> <div
className={cn(
"pointer-events-auto mx-auto max-w-4xl rounded-2xl border bg-[linear-gradient(135deg,hsl(235_48%_7%_/_0.96),hsl(258_48%_11%_/_0.94))] p-2 shadow-lg shadow-black/20 transition",
isComposerDropActive
? "border-cyan-300/70 shadow-cyan-500/20"
: "border-violet-300/30"
)}
onDragEnter={handleComposerDragEnter}
onDragOver={handleComposerDragOver}
onDragLeave={handleComposerDragLeave}
onDrop={(event) => {
void handleComposerDrop(event);
}}
>
<input
ref={fileInputRef}
type="file"
multiple
accept={CHAT_FILE_ACCEPT}
className="hidden"
onChange={(event) => {
void handleFileSelection(event);
}}
/>
{!isSearchMode && pendingAttachments.length ? (
<div className="px-2 pb-2 pt-1">
<ChatAttachmentList attachments={pendingAttachments} onRemove={handleRemovePendingAttachment} />
</div>
) : null}
{!isSearchMode && isComposerDropActive ? (
<div className="px-3 pb-2">
<div className="rounded-xl border border-dashed border-cyan-300/55 bg-cyan-300/8 px-4 py-3 text-sm text-cyan-100">
Drop files to attach them
</div>
</div>
) : null}
<Textarea <Textarea
id="composer-input" id="composer-input"
rows={1} rows={1}
@@ -1530,6 +1843,9 @@ export default function App() {
textarea.style.height = `${textarea.scrollHeight}px`; textarea.style.height = `${textarea.scrollHeight}px`;
setComposer(textarea.value); setComposer(textarea.value);
}} }}
onPaste={(event) => {
void handleComposerPaste(event);
}}
onKeyDown={(event) => { onKeyDown={(event) => {
if (event.key === "Enter" && !event.shiftKey) { if (event.key === "Enter" && !event.shiftKey) {
event.preventDefault(); event.preventDefault();
@@ -1542,7 +1858,24 @@ export default function App() {
/> />
<div className={cn("flex items-center gap-3 px-2 pb-1", error ? "justify-between" : "justify-end")}> <div className={cn("flex items-center gap-3 px-2 pb-1", error ? "justify-between" : "justify-end")}>
{error ? <p className="min-w-0 truncate text-xs text-rose-300">{error}</p> : null} {error ? <p className="min-w-0 truncate text-xs text-rose-300">{error}</p> : null}
<Button className="h-10 w-10 rounded-lg" onClick={() => void handleSend()} size="icon" disabled={isSending || !composer.trim()}> {!isSearchMode ? (
<Button
className="h-10 w-10 rounded-lg"
onClick={handleOpenAttachmentPicker}
size="icon"
variant="secondary"
disabled={isSending || pendingAttachments.length >= MAX_CHAT_ATTACHMENTS}
aria-label="Attach files"
>
<Paperclip className="h-4 w-4" />
</Button>
) : null}
<Button
className="h-10 w-10 rounded-lg"
onClick={() => void handleSend()}
size="icon"
disabled={isSending || (!composer.trim() && !pendingAttachments.length)}
>
{isSearchMode ? <Search className="h-4 w-4" /> : <SendHorizontal className="h-4 w-4" />} {isSearchMode ? <Search className="h-4 w-4" /> : <SendHorizontal className="h-4 w-4" />}
</Button> </Button>
</div> </div>

View File

@@ -0,0 +1,103 @@
import { FileText, Image as ImageIcon, X } from "lucide-preact";
import type { ChatAttachment } from "@/lib/api";
import { cn } from "@/lib/utils";
type Props = {
attachments: ChatAttachment[];
tone?: "composer" | "user" | "assistant";
onRemove?: (id: string) => void;
};
function getTextPreview(value: string) {
const normalized = value.replace(/\r/g, "").trim();
if (!normalized) return "(empty file)";
return normalized.length <= 280 ? normalized : `${normalized.slice(0, 280).trimEnd()}...`;
}
function getSurfaceClasses(tone: Props["tone"]) {
if (tone === "user") {
return "border-white/12 bg-black/16 text-fuchsia-50";
}
if (tone === "assistant") {
return "border-violet-300/16 bg-violet-400/8 text-violet-50";
}
return "border-violet-300/18 bg-background/40 text-violet-50";
}
export function ChatAttachmentList({ attachments, tone = "composer", onRemove }: Props) {
if (!attachments.length) return null;
const surfaceClasses = getSurfaceClasses(tone);
return (
<div className="space-y-2">
{attachments.map((attachment) => {
const isImage = attachment.kind === "image";
return (
<div key={attachment.id} className={cn("overflow-hidden rounded-xl border", surfaceClasses)}>
{isImage ? (
<div className="grid gap-0 md:grid-cols-[minmax(0,220px)_minmax(0,1fr)]">
<div className="border-b border-white/10 bg-black/10 md:border-b-0 md:border-r">
<img src={attachment.dataUrl} alt={attachment.filename} className="block max-h-56 w-full object-cover" />
</div>
<div className="flex min-w-0 flex-col gap-2 p-3">
<div className="flex items-start gap-2">
<span className="mt-0.5 rounded-md border border-white/12 bg-white/5 p-1.5">
<ImageIcon className="h-3.5 w-3.5" />
</span>
<div className="min-w-0 flex-1">
<p className="truncate text-sm font-medium">{attachment.filename}</p>
<p className="text-xs text-muted-foreground">{attachment.mimeType}</p>
</div>
{onRemove ? (
<button
type="button"
className="rounded-md border border-white/10 p-1 text-muted-foreground transition hover:bg-white/8 hover:text-foreground"
onClick={() => onRemove(attachment.id)}
aria-label={`Remove ${attachment.filename}`}
>
<X className="h-3.5 w-3.5" />
</button>
) : null}
</div>
</div>
</div>
) : (
<div className="p-3">
<div className="flex items-start gap-2">
<span className="mt-0.5 rounded-md border border-white/12 bg-white/5 p-1.5">
<FileText className="h-3.5 w-3.5" />
</span>
<div className="min-w-0 flex-1">
<div className="flex items-start gap-2">
<div className="min-w-0 flex-1">
<p className="truncate text-sm font-medium">{attachment.filename}</p>
<p className="text-xs text-muted-foreground">
{attachment.mimeType}
{attachment.truncated ? " · truncated" : ""}
</p>
</div>
{onRemove ? (
<button
type="button"
className="rounded-md border border-white/10 p-1 text-muted-foreground transition hover:bg-white/8 hover:text-foreground"
onClick={() => onRemove(attachment.id)}
aria-label={`Remove ${attachment.filename}`}
>
<X className="h-3.5 w-3.5" />
</button>
) : null}
</div>
<pre className="mt-2 overflow-x-auto rounded-lg border border-white/8 bg-black/16 p-3 text-xs leading-5 text-inherit whitespace-pre-wrap">
{getTextPreview(attachment.text)}
</pre>
</div>
</div>
</div>
)}
</div>
);
})}
</div>
);
}

View File

@@ -1,5 +1,6 @@
import { cn } from "@/lib/utils"; import { cn } from "@/lib/utils";
import type { Message } from "@/lib/api"; import { ChatAttachmentList } from "@/components/chat/chat-attachment-list";
import { getMessageAttachments, type Message } from "@/lib/api";
import { MarkdownContent } from "@/components/markdown/markdown-content"; import { MarkdownContent } from "@/components/markdown/markdown-content";
import { Globe2, Link2, Wrench } from "lucide-preact"; import { Globe2, Link2, Wrench } from "lucide-preact";
@@ -68,28 +69,30 @@ export function ChatMessagesPanel({ messages, isLoading, isSending }: Props) {
const isUser = message.role === "user"; const isUser = message.role === "user";
const isPendingAssistant = message.id.startsWith("temp-assistant-") && isSending && message.content.trim().length === 0; const isPendingAssistant = message.id.startsWith("temp-assistant-") && isSending && message.content.trim().length === 0;
const attachments = getMessageAttachments(message.metadata);
return ( return (
<div key={message.id} className={cn("flex", isUser ? "justify-end" : "justify-start")}> <div key={message.id} className={cn("flex", isUser ? "justify-end" : "justify-start")}>
<div <div
className={cn( className={cn(
"max-w-[85%]", "max-w-[85%] space-y-3",
isUser isUser
? "rounded-xl border border-violet-300/24 bg-[linear-gradient(135deg,hsl(258_86%_48%_/_0.86),hsl(278_72%_29%_/_0.86))] px-4 py-3 text-sm leading-6 text-fuchsia-50 shadow-sm" ? "rounded-xl border border-violet-300/24 bg-[linear-gradient(135deg,hsl(258_86%_48%_/_0.86),hsl(278_72%_29%_/_0.86))] px-4 py-3 text-sm leading-6 text-fuchsia-50 shadow-sm"
: "text-base leading-7 text-violet-50" : "text-base leading-7 text-violet-50"
)} )}
> >
{attachments.length ? <ChatAttachmentList attachments={attachments} tone={isUser ? "user" : "assistant"} /> : null}
{isPendingAssistant ? ( {isPendingAssistant ? (
<span className="inline-flex items-center gap-1" aria-label="Assistant is typing" role="status"> <span className="inline-flex items-center gap-1" aria-label="Assistant is typing" role="status">
<span className="inline-block h-1.5 w-1.5 animate-bounce rounded-full bg-muted-foreground [animation-delay:0ms]" /> <span className="inline-block h-1.5 w-1.5 animate-bounce rounded-full bg-muted-foreground [animation-delay:0ms]" />
<span className="inline-block h-1.5 w-1.5 animate-bounce rounded-full bg-muted-foreground [animation-delay:140ms]" /> <span className="inline-block h-1.5 w-1.5 animate-bounce rounded-full bg-muted-foreground [animation-delay:140ms]" />
<span className="inline-block h-1.5 w-1.5 animate-bounce rounded-full bg-muted-foreground [animation-delay:280ms]" /> <span className="inline-block h-1.5 w-1.5 animate-bounce rounded-full bg-muted-foreground [animation-delay:280ms]" />
</span> </span>
) : ( ) : message.content.trim() ? (
<MarkdownContent <MarkdownContent
markdown={message.content} markdown={message.content}
className={cn("[&_a]:text-inherit [&_a]:underline", isUser ? "leading-[1.78] text-fuchsia-50" : "leading-[1.82] text-violet-50")} className={cn("[&_a]:text-inherit [&_a]:underline", isUser ? "leading-[1.78] text-fuchsia-50" : "leading-[1.82] text-violet-50")}
/> />
)} ) : null}
</div> </div>
</div> </div>
); );

View File

@@ -90,6 +90,27 @@ export type SearchDetail = {
results: SearchResultItem[]; results: SearchResultItem[];
}; };
export type ChatImageAttachment = {
kind: "image";
id: string;
filename: string;
mimeType: "image/png" | "image/jpeg";
sizeBytes: number;
dataUrl: string;
};
export type ChatTextAttachment = {
kind: "text";
id: string;
filename: string;
mimeType: string;
sizeBytes: number;
text: string;
truncated?: boolean;
};
export type ChatAttachment = ChatImageAttachment | ChatTextAttachment;
export type SearchRunRequest = { export type SearchRunRequest = {
query?: string; query?: string;
title?: string; title?: string;
@@ -103,6 +124,7 @@ export type CompletionRequestMessage = {
role: "system" | "user" | "assistant" | "tool"; role: "system" | "user" | "assistant" | "tool";
content: string; content: string;
name?: string; name?: string;
attachments?: ChatAttachment[];
}; };
export type Provider = "openai" | "anthropic" | "xai"; export type Provider = "openai" | "anthropic" | "xai";
@@ -251,6 +273,49 @@ export async function deleteSearch(searchId: string) {
await api<{ deleted: true }>(`/v1/searches/${searchId}`, { method: "DELETE" }); await api<{ deleted: true }>(`/v1/searches/${searchId}`, { method: "DELETE" });
} }
export function getMessageAttachments(metadata: unknown): ChatAttachment[] {
if (!metadata || typeof metadata !== "object" || Array.isArray(metadata)) return [];
const attachments = (metadata as Record<string, unknown>).attachments;
if (!Array.isArray(attachments)) return [];
const parsed: ChatAttachment[] = [];
for (const entry of attachments) {
if (!entry || typeof entry !== "object" || Array.isArray(entry)) continue;
const record = entry as Record<string, unknown>;
const kind = record.kind;
const id = typeof record.id === "string" ? record.id : "";
const filename = typeof record.filename === "string" ? record.filename : "";
const mimeType = typeof record.mimeType === "string" ? record.mimeType : "";
const sizeBytes = typeof record.sizeBytes === "number" ? record.sizeBytes : 0;
if (kind === "image" && typeof record.dataUrl === "string" && (mimeType === "image/png" || mimeType === "image/jpeg")) {
parsed.push({
kind,
id,
filename,
mimeType,
sizeBytes,
dataUrl: record.dataUrl,
} satisfies ChatImageAttachment);
continue;
}
if (kind === "text" && typeof record.text === "string") {
parsed.push({
kind,
id,
filename,
mimeType,
sizeBytes,
text: record.text,
truncated: record.truncated === true,
} satisfies ChatTextAttachment);
}
}
return parsed;
}
type RunSearchStreamHandlers = { type RunSearchStreamHandlers = {
onSearchResults?: (payload: { requestId: string | null; results: SearchResultItem[] }) => void; onSearchResults?: (payload: { requestId: string | null; results: SearchResultItem[] }) => void;
onSearchError?: (payload: { error: string }) => void; onSearchError?: (payload: { error: string }) => void;

View File

@@ -1 +1 @@
{"root":["./src/App.tsx","./src/main.tsx","./src/root-router.tsx","./src/vite-env.d.ts","./src/components/auth/auth-screen.tsx","./src/components/chat/chat-messages-panel.tsx","./src/components/markdown/markdown-content.tsx","./src/components/search/search-results-panel.tsx","./src/components/ui/button.tsx","./src/components/ui/input.tsx","./src/components/ui/scroll-area.tsx","./src/components/ui/separator.tsx","./src/components/ui/textarea.tsx","./src/hooks/use-session-auth.ts","./src/lib/api.ts","./src/lib/utils.ts","./src/pages/search-route-page.tsx"],"version":"5.9.3"} {"root":["./src/app.tsx","./src/main.tsx","./src/root-router.tsx","./src/vite-env.d.ts","./src/components/auth/auth-screen.tsx","./src/components/chat/chat-attachment-list.tsx","./src/components/chat/chat-messages-panel.tsx","./src/components/markdown/markdown-content.tsx","./src/components/search/search-results-panel.tsx","./src/components/ui/button.tsx","./src/components/ui/input.tsx","./src/components/ui/scroll-area.tsx","./src/components/ui/separator.tsx","./src/components/ui/textarea.tsx","./src/hooks/use-session-auth.ts","./src/lib/api.ts","./src/lib/utils.ts","./src/pages/search-route-page.tsx"],"version":"5.9.3"}