// Prisma schema for the personal chat DB + LLM call log generator client { provider = "prisma-client-js" } datasource db { provider = "sqlite" url = env("DATABASE_URL") } enum Provider { openai anthropic xai } enum MessageRole { system user assistant tool } model User { id String @id @default(cuid()) createdAt DateTime @default(now()) updatedAt DateTime @updatedAt // minimal for now (single-user is fine). Keep extensible. handle String? @unique chats Chat[] } model Chat { id String @id @default(cuid()) createdAt DateTime @default(now()) updatedAt DateTime @updatedAt title String? user User? @relation(fields: [userId], references: [id]) userId String? messages Message[] calls LlmCall[] @@index([userId]) } model Message { id String @id @default(cuid()) createdAt DateTime @default(now()) chat Chat @relation(fields: [chatId], references: [id], onDelete: Cascade) chatId String role MessageRole content String // for tool messages or attachments later name String? metadata Json? @@index([chatId, createdAt]) } model LlmCall { id String @id @default(cuid()) createdAt DateTime @default(now()) chat Chat @relation(fields: [chatId], references: [id], onDelete: Cascade) chatId String provider Provider model String // request/response snapshots for debugging + replay request Json response Json? // usage/cost basics inputTokens Int? outputTokens Int? totalTokens Int? latencyMs Int? error String? @@index([chatId, createdAt]) @@index([provider, model, createdAt]) }