{ "name": "llm-backend", "version": "0.1.0", "private": true, "type": "module", "description": "LLM multiplexer + personal chat database backend", "scripts": { "dev": "tsx watch src/index.ts", "start": "node dist/index.js", "build": "tsc -p tsconfig.json", "prisma:generate": "prisma generate", "db:migrate": "prisma migrate dev", "db:studio": "prisma studio" }, "dependencies": { "@anthropic-ai/sdk": "^0.71.2", "@fastify/cors": "^11.2.0", "@fastify/sensible": "^6.0.4", "@fastify/swagger": "^9.6.1", "@fastify/swagger-ui": "^5.2.5", "@prisma/client": "^6.16.1", "dotenv": "^17.2.3", "fastify": "^5.7.2", "openai": "^6.16.0", "pino-pretty": "^13.1.3", "zod": "^4.3.6" }, "devDependencies": { "@types/node": "^25.0.10", "prisma": "^6.16.1", "tsx": "^4.21.0", "typescript": "^5.9.3" } }