Files
llm-backend/generated/prisma/models/LlmCall.ts
2026-01-27 17:47:41 -08:00

1645 lines
59 KiB
TypeScript

/* !!! This is code generated by Prisma. Do not edit directly. !!! */
/* eslint-disable */
// biome-ignore-all lint: generated file
// @ts-nocheck
/*
* This file exports the `LlmCall` model and its related types.
*
* 🟢 You can import this file directly.
*/
import type * as runtime from "@prisma/client/runtime/client"
import type * as $Enums from "../enums.js"
import type * as Prisma from "../internal/prismaNamespace.js"
/**
* Model LlmCall
*
*/
export type LlmCallModel = runtime.Types.Result.DefaultSelection<Prisma.$LlmCallPayload>
export type AggregateLlmCall = {
_count: LlmCallCountAggregateOutputType | null
_avg: LlmCallAvgAggregateOutputType | null
_sum: LlmCallSumAggregateOutputType | null
_min: LlmCallMinAggregateOutputType | null
_max: LlmCallMaxAggregateOutputType | null
}
export type LlmCallAvgAggregateOutputType = {
inputTokens: number | null
outputTokens: number | null
totalTokens: number | null
latencyMs: number | null
}
export type LlmCallSumAggregateOutputType = {
inputTokens: number | null
outputTokens: number | null
totalTokens: number | null
latencyMs: number | null
}
export type LlmCallMinAggregateOutputType = {
id: string | null
createdAt: Date | null
chatId: string | null
provider: $Enums.Provider | null
model: string | null
inputTokens: number | null
outputTokens: number | null
totalTokens: number | null
latencyMs: number | null
error: string | null
}
export type LlmCallMaxAggregateOutputType = {
id: string | null
createdAt: Date | null
chatId: string | null
provider: $Enums.Provider | null
model: string | null
inputTokens: number | null
outputTokens: number | null
totalTokens: number | null
latencyMs: number | null
error: string | null
}
export type LlmCallCountAggregateOutputType = {
id: number
createdAt: number
chatId: number
provider: number
model: number
request: number
response: number
inputTokens: number
outputTokens: number
totalTokens: number
latencyMs: number
error: number
_all: number
}
export type LlmCallAvgAggregateInputType = {
inputTokens?: true
outputTokens?: true
totalTokens?: true
latencyMs?: true
}
export type LlmCallSumAggregateInputType = {
inputTokens?: true
outputTokens?: true
totalTokens?: true
latencyMs?: true
}
export type LlmCallMinAggregateInputType = {
id?: true
createdAt?: true
chatId?: true
provider?: true
model?: true
inputTokens?: true
outputTokens?: true
totalTokens?: true
latencyMs?: true
error?: true
}
export type LlmCallMaxAggregateInputType = {
id?: true
createdAt?: true
chatId?: true
provider?: true
model?: true
inputTokens?: true
outputTokens?: true
totalTokens?: true
latencyMs?: true
error?: true
}
export type LlmCallCountAggregateInputType = {
id?: true
createdAt?: true
chatId?: true
provider?: true
model?: true
request?: true
response?: true
inputTokens?: true
outputTokens?: true
totalTokens?: true
latencyMs?: true
error?: true
_all?: true
}
export type LlmCallAggregateArgs<ExtArgs extends runtime.Types.Extensions.InternalArgs = runtime.Types.Extensions.DefaultArgs> = {
/**
* Filter which LlmCall to aggregate.
*/
where?: Prisma.LlmCallWhereInput
/**
* {@link https://www.prisma.io/docs/concepts/components/prisma-client/sorting Sorting Docs}
*
* Determine the order of LlmCalls to fetch.
*/
orderBy?: Prisma.LlmCallOrderByWithRelationInput | Prisma.LlmCallOrderByWithRelationInput[]
/**
* {@link https://www.prisma.io/docs/concepts/components/prisma-client/pagination#cursor-based-pagination Cursor Docs}
*
* Sets the start position
*/
cursor?: Prisma.LlmCallWhereUniqueInput
/**
* {@link https://www.prisma.io/docs/concepts/components/prisma-client/pagination Pagination Docs}
*
* Take `±n` LlmCalls from the position of the cursor.
*/
take?: number
/**
* {@link https://www.prisma.io/docs/concepts/components/prisma-client/pagination Pagination Docs}
*
* Skip the first `n` LlmCalls.
*/
skip?: number
/**
* {@link https://www.prisma.io/docs/concepts/components/prisma-client/aggregations Aggregation Docs}
*
* Count returned LlmCalls
**/
_count?: true | LlmCallCountAggregateInputType
/**
* {@link https://www.prisma.io/docs/concepts/components/prisma-client/aggregations Aggregation Docs}
*
* Select which fields to average
**/
_avg?: LlmCallAvgAggregateInputType
/**
* {@link https://www.prisma.io/docs/concepts/components/prisma-client/aggregations Aggregation Docs}
*
* Select which fields to sum
**/
_sum?: LlmCallSumAggregateInputType
/**
* {@link https://www.prisma.io/docs/concepts/components/prisma-client/aggregations Aggregation Docs}
*
* Select which fields to find the minimum value
**/
_min?: LlmCallMinAggregateInputType
/**
* {@link https://www.prisma.io/docs/concepts/components/prisma-client/aggregations Aggregation Docs}
*
* Select which fields to find the maximum value
**/
_max?: LlmCallMaxAggregateInputType
}
export type GetLlmCallAggregateType<T extends LlmCallAggregateArgs> = {
[P in keyof T & keyof AggregateLlmCall]: P extends '_count' | 'count'
? T[P] extends true
? number
: Prisma.GetScalarType<T[P], AggregateLlmCall[P]>
: Prisma.GetScalarType<T[P], AggregateLlmCall[P]>
}
export type LlmCallGroupByArgs<ExtArgs extends runtime.Types.Extensions.InternalArgs = runtime.Types.Extensions.DefaultArgs> = {
where?: Prisma.LlmCallWhereInput
orderBy?: Prisma.LlmCallOrderByWithAggregationInput | Prisma.LlmCallOrderByWithAggregationInput[]
by: Prisma.LlmCallScalarFieldEnum[] | Prisma.LlmCallScalarFieldEnum
having?: Prisma.LlmCallScalarWhereWithAggregatesInput
take?: number
skip?: number
_count?: LlmCallCountAggregateInputType | true
_avg?: LlmCallAvgAggregateInputType
_sum?: LlmCallSumAggregateInputType
_min?: LlmCallMinAggregateInputType
_max?: LlmCallMaxAggregateInputType
}
export type LlmCallGroupByOutputType = {
id: string
createdAt: Date
chatId: string
provider: $Enums.Provider
model: string
request: runtime.JsonValue
response: runtime.JsonValue | null
inputTokens: number | null
outputTokens: number | null
totalTokens: number | null
latencyMs: number | null
error: string | null
_count: LlmCallCountAggregateOutputType | null
_avg: LlmCallAvgAggregateOutputType | null
_sum: LlmCallSumAggregateOutputType | null
_min: LlmCallMinAggregateOutputType | null
_max: LlmCallMaxAggregateOutputType | null
}
type GetLlmCallGroupByPayload<T extends LlmCallGroupByArgs> = Prisma.PrismaPromise<
Array<
Prisma.PickEnumerable<LlmCallGroupByOutputType, T['by']> &
{
[P in ((keyof T) & (keyof LlmCallGroupByOutputType))]: P extends '_count'
? T[P] extends boolean
? number
: Prisma.GetScalarType<T[P], LlmCallGroupByOutputType[P]>
: Prisma.GetScalarType<T[P], LlmCallGroupByOutputType[P]>
}
>
>
export type LlmCallWhereInput = {
AND?: Prisma.LlmCallWhereInput | Prisma.LlmCallWhereInput[]
OR?: Prisma.LlmCallWhereInput[]
NOT?: Prisma.LlmCallWhereInput | Prisma.LlmCallWhereInput[]
id?: Prisma.StringFilter<"LlmCall"> | string
createdAt?: Prisma.DateTimeFilter<"LlmCall"> | Date | string
chatId?: Prisma.StringFilter<"LlmCall"> | string
provider?: Prisma.EnumProviderFilter<"LlmCall"> | $Enums.Provider
model?: Prisma.StringFilter<"LlmCall"> | string
request?: Prisma.JsonFilter<"LlmCall">
response?: Prisma.JsonNullableFilter<"LlmCall">
inputTokens?: Prisma.IntNullableFilter<"LlmCall"> | number | null
outputTokens?: Prisma.IntNullableFilter<"LlmCall"> | number | null
totalTokens?: Prisma.IntNullableFilter<"LlmCall"> | number | null
latencyMs?: Prisma.IntNullableFilter<"LlmCall"> | number | null
error?: Prisma.StringNullableFilter<"LlmCall"> | string | null
chat?: Prisma.XOR<Prisma.ChatScalarRelationFilter, Prisma.ChatWhereInput>
}
export type LlmCallOrderByWithRelationInput = {
id?: Prisma.SortOrder
createdAt?: Prisma.SortOrder
chatId?: Prisma.SortOrder
provider?: Prisma.SortOrder
model?: Prisma.SortOrder
request?: Prisma.SortOrder
response?: Prisma.SortOrderInput | Prisma.SortOrder
inputTokens?: Prisma.SortOrderInput | Prisma.SortOrder
outputTokens?: Prisma.SortOrderInput | Prisma.SortOrder
totalTokens?: Prisma.SortOrderInput | Prisma.SortOrder
latencyMs?: Prisma.SortOrderInput | Prisma.SortOrder
error?: Prisma.SortOrderInput | Prisma.SortOrder
chat?: Prisma.ChatOrderByWithRelationInput
}
export type LlmCallWhereUniqueInput = Prisma.AtLeast<{
id?: string
AND?: Prisma.LlmCallWhereInput | Prisma.LlmCallWhereInput[]
OR?: Prisma.LlmCallWhereInput[]
NOT?: Prisma.LlmCallWhereInput | Prisma.LlmCallWhereInput[]
createdAt?: Prisma.DateTimeFilter<"LlmCall"> | Date | string
chatId?: Prisma.StringFilter<"LlmCall"> | string
provider?: Prisma.EnumProviderFilter<"LlmCall"> | $Enums.Provider
model?: Prisma.StringFilter<"LlmCall"> | string
request?: Prisma.JsonFilter<"LlmCall">
response?: Prisma.JsonNullableFilter<"LlmCall">
inputTokens?: Prisma.IntNullableFilter<"LlmCall"> | number | null
outputTokens?: Prisma.IntNullableFilter<"LlmCall"> | number | null
totalTokens?: Prisma.IntNullableFilter<"LlmCall"> | number | null
latencyMs?: Prisma.IntNullableFilter<"LlmCall"> | number | null
error?: Prisma.StringNullableFilter<"LlmCall"> | string | null
chat?: Prisma.XOR<Prisma.ChatScalarRelationFilter, Prisma.ChatWhereInput>
}, "id">
export type LlmCallOrderByWithAggregationInput = {
id?: Prisma.SortOrder
createdAt?: Prisma.SortOrder
chatId?: Prisma.SortOrder
provider?: Prisma.SortOrder
model?: Prisma.SortOrder
request?: Prisma.SortOrder
response?: Prisma.SortOrderInput | Prisma.SortOrder
inputTokens?: Prisma.SortOrderInput | Prisma.SortOrder
outputTokens?: Prisma.SortOrderInput | Prisma.SortOrder
totalTokens?: Prisma.SortOrderInput | Prisma.SortOrder
latencyMs?: Prisma.SortOrderInput | Prisma.SortOrder
error?: Prisma.SortOrderInput | Prisma.SortOrder
_count?: Prisma.LlmCallCountOrderByAggregateInput
_avg?: Prisma.LlmCallAvgOrderByAggregateInput
_max?: Prisma.LlmCallMaxOrderByAggregateInput
_min?: Prisma.LlmCallMinOrderByAggregateInput
_sum?: Prisma.LlmCallSumOrderByAggregateInput
}
export type LlmCallScalarWhereWithAggregatesInput = {
AND?: Prisma.LlmCallScalarWhereWithAggregatesInput | Prisma.LlmCallScalarWhereWithAggregatesInput[]
OR?: Prisma.LlmCallScalarWhereWithAggregatesInput[]
NOT?: Prisma.LlmCallScalarWhereWithAggregatesInput | Prisma.LlmCallScalarWhereWithAggregatesInput[]
id?: Prisma.StringWithAggregatesFilter<"LlmCall"> | string
createdAt?: Prisma.DateTimeWithAggregatesFilter<"LlmCall"> | Date | string
chatId?: Prisma.StringWithAggregatesFilter<"LlmCall"> | string
provider?: Prisma.EnumProviderWithAggregatesFilter<"LlmCall"> | $Enums.Provider
model?: Prisma.StringWithAggregatesFilter<"LlmCall"> | string
request?: Prisma.JsonWithAggregatesFilter<"LlmCall">
response?: Prisma.JsonNullableWithAggregatesFilter<"LlmCall">
inputTokens?: Prisma.IntNullableWithAggregatesFilter<"LlmCall"> | number | null
outputTokens?: Prisma.IntNullableWithAggregatesFilter<"LlmCall"> | number | null
totalTokens?: Prisma.IntNullableWithAggregatesFilter<"LlmCall"> | number | null
latencyMs?: Prisma.IntNullableWithAggregatesFilter<"LlmCall"> | number | null
error?: Prisma.StringNullableWithAggregatesFilter<"LlmCall"> | string | null
}
export type LlmCallCreateInput = {
id?: string
createdAt?: Date | string
provider: $Enums.Provider
model: string
request: Prisma.JsonNullValueInput | runtime.InputJsonValue
response?: Prisma.NullableJsonNullValueInput | runtime.InputJsonValue
inputTokens?: number | null
outputTokens?: number | null
totalTokens?: number | null
latencyMs?: number | null
error?: string | null
chat: Prisma.ChatCreateNestedOneWithoutCallsInput
}
export type LlmCallUncheckedCreateInput = {
id?: string
createdAt?: Date | string
chatId: string
provider: $Enums.Provider
model: string
request: Prisma.JsonNullValueInput | runtime.InputJsonValue
response?: Prisma.NullableJsonNullValueInput | runtime.InputJsonValue
inputTokens?: number | null
outputTokens?: number | null
totalTokens?: number | null
latencyMs?: number | null
error?: string | null
}
export type LlmCallUpdateInput = {
id?: Prisma.StringFieldUpdateOperationsInput | string
createdAt?: Prisma.DateTimeFieldUpdateOperationsInput | Date | string
provider?: Prisma.EnumProviderFieldUpdateOperationsInput | $Enums.Provider
model?: Prisma.StringFieldUpdateOperationsInput | string
request?: Prisma.JsonNullValueInput | runtime.InputJsonValue
response?: Prisma.NullableJsonNullValueInput | runtime.InputJsonValue
inputTokens?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
outputTokens?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
totalTokens?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
latencyMs?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
error?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
chat?: Prisma.ChatUpdateOneRequiredWithoutCallsNestedInput
}
export type LlmCallUncheckedUpdateInput = {
id?: Prisma.StringFieldUpdateOperationsInput | string
createdAt?: Prisma.DateTimeFieldUpdateOperationsInput | Date | string
chatId?: Prisma.StringFieldUpdateOperationsInput | string
provider?: Prisma.EnumProviderFieldUpdateOperationsInput | $Enums.Provider
model?: Prisma.StringFieldUpdateOperationsInput | string
request?: Prisma.JsonNullValueInput | runtime.InputJsonValue
response?: Prisma.NullableJsonNullValueInput | runtime.InputJsonValue
inputTokens?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
outputTokens?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
totalTokens?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
latencyMs?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
error?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
}
export type LlmCallCreateManyInput = {
id?: string
createdAt?: Date | string
chatId: string
provider: $Enums.Provider
model: string
request: Prisma.JsonNullValueInput | runtime.InputJsonValue
response?: Prisma.NullableJsonNullValueInput | runtime.InputJsonValue
inputTokens?: number | null
outputTokens?: number | null
totalTokens?: number | null
latencyMs?: number | null
error?: string | null
}
export type LlmCallUpdateManyMutationInput = {
id?: Prisma.StringFieldUpdateOperationsInput | string
createdAt?: Prisma.DateTimeFieldUpdateOperationsInput | Date | string
provider?: Prisma.EnumProviderFieldUpdateOperationsInput | $Enums.Provider
model?: Prisma.StringFieldUpdateOperationsInput | string
request?: Prisma.JsonNullValueInput | runtime.InputJsonValue
response?: Prisma.NullableJsonNullValueInput | runtime.InputJsonValue
inputTokens?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
outputTokens?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
totalTokens?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
latencyMs?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
error?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
}
export type LlmCallUncheckedUpdateManyInput = {
id?: Prisma.StringFieldUpdateOperationsInput | string
createdAt?: Prisma.DateTimeFieldUpdateOperationsInput | Date | string
chatId?: Prisma.StringFieldUpdateOperationsInput | string
provider?: Prisma.EnumProviderFieldUpdateOperationsInput | $Enums.Provider
model?: Prisma.StringFieldUpdateOperationsInput | string
request?: Prisma.JsonNullValueInput | runtime.InputJsonValue
response?: Prisma.NullableJsonNullValueInput | runtime.InputJsonValue
inputTokens?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
outputTokens?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
totalTokens?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
latencyMs?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
error?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
}
export type LlmCallListRelationFilter = {
every?: Prisma.LlmCallWhereInput
some?: Prisma.LlmCallWhereInput
none?: Prisma.LlmCallWhereInput
}
export type LlmCallOrderByRelationAggregateInput = {
_count?: Prisma.SortOrder
}
export type LlmCallCountOrderByAggregateInput = {
id?: Prisma.SortOrder
createdAt?: Prisma.SortOrder
chatId?: Prisma.SortOrder
provider?: Prisma.SortOrder
model?: Prisma.SortOrder
request?: Prisma.SortOrder
response?: Prisma.SortOrder
inputTokens?: Prisma.SortOrder
outputTokens?: Prisma.SortOrder
totalTokens?: Prisma.SortOrder
latencyMs?: Prisma.SortOrder
error?: Prisma.SortOrder
}
export type LlmCallAvgOrderByAggregateInput = {
inputTokens?: Prisma.SortOrder
outputTokens?: Prisma.SortOrder
totalTokens?: Prisma.SortOrder
latencyMs?: Prisma.SortOrder
}
export type LlmCallMaxOrderByAggregateInput = {
id?: Prisma.SortOrder
createdAt?: Prisma.SortOrder
chatId?: Prisma.SortOrder
provider?: Prisma.SortOrder
model?: Prisma.SortOrder
inputTokens?: Prisma.SortOrder
outputTokens?: Prisma.SortOrder
totalTokens?: Prisma.SortOrder
latencyMs?: Prisma.SortOrder
error?: Prisma.SortOrder
}
export type LlmCallMinOrderByAggregateInput = {
id?: Prisma.SortOrder
createdAt?: Prisma.SortOrder
chatId?: Prisma.SortOrder
provider?: Prisma.SortOrder
model?: Prisma.SortOrder
inputTokens?: Prisma.SortOrder
outputTokens?: Prisma.SortOrder
totalTokens?: Prisma.SortOrder
latencyMs?: Prisma.SortOrder
error?: Prisma.SortOrder
}
export type LlmCallSumOrderByAggregateInput = {
inputTokens?: Prisma.SortOrder
outputTokens?: Prisma.SortOrder
totalTokens?: Prisma.SortOrder
latencyMs?: Prisma.SortOrder
}
export type LlmCallCreateNestedManyWithoutChatInput = {
create?: Prisma.XOR<Prisma.LlmCallCreateWithoutChatInput, Prisma.LlmCallUncheckedCreateWithoutChatInput> | Prisma.LlmCallCreateWithoutChatInput[] | Prisma.LlmCallUncheckedCreateWithoutChatInput[]
connectOrCreate?: Prisma.LlmCallCreateOrConnectWithoutChatInput | Prisma.LlmCallCreateOrConnectWithoutChatInput[]
createMany?: Prisma.LlmCallCreateManyChatInputEnvelope
connect?: Prisma.LlmCallWhereUniqueInput | Prisma.LlmCallWhereUniqueInput[]
}
export type LlmCallUncheckedCreateNestedManyWithoutChatInput = {
create?: Prisma.XOR<Prisma.LlmCallCreateWithoutChatInput, Prisma.LlmCallUncheckedCreateWithoutChatInput> | Prisma.LlmCallCreateWithoutChatInput[] | Prisma.LlmCallUncheckedCreateWithoutChatInput[]
connectOrCreate?: Prisma.LlmCallCreateOrConnectWithoutChatInput | Prisma.LlmCallCreateOrConnectWithoutChatInput[]
createMany?: Prisma.LlmCallCreateManyChatInputEnvelope
connect?: Prisma.LlmCallWhereUniqueInput | Prisma.LlmCallWhereUniqueInput[]
}
export type LlmCallUpdateManyWithoutChatNestedInput = {
create?: Prisma.XOR<Prisma.LlmCallCreateWithoutChatInput, Prisma.LlmCallUncheckedCreateWithoutChatInput> | Prisma.LlmCallCreateWithoutChatInput[] | Prisma.LlmCallUncheckedCreateWithoutChatInput[]
connectOrCreate?: Prisma.LlmCallCreateOrConnectWithoutChatInput | Prisma.LlmCallCreateOrConnectWithoutChatInput[]
upsert?: Prisma.LlmCallUpsertWithWhereUniqueWithoutChatInput | Prisma.LlmCallUpsertWithWhereUniqueWithoutChatInput[]
createMany?: Prisma.LlmCallCreateManyChatInputEnvelope
set?: Prisma.LlmCallWhereUniqueInput | Prisma.LlmCallWhereUniqueInput[]
disconnect?: Prisma.LlmCallWhereUniqueInput | Prisma.LlmCallWhereUniqueInput[]
delete?: Prisma.LlmCallWhereUniqueInput | Prisma.LlmCallWhereUniqueInput[]
connect?: Prisma.LlmCallWhereUniqueInput | Prisma.LlmCallWhereUniqueInput[]
update?: Prisma.LlmCallUpdateWithWhereUniqueWithoutChatInput | Prisma.LlmCallUpdateWithWhereUniqueWithoutChatInput[]
updateMany?: Prisma.LlmCallUpdateManyWithWhereWithoutChatInput | Prisma.LlmCallUpdateManyWithWhereWithoutChatInput[]
deleteMany?: Prisma.LlmCallScalarWhereInput | Prisma.LlmCallScalarWhereInput[]
}
export type LlmCallUncheckedUpdateManyWithoutChatNestedInput = {
create?: Prisma.XOR<Prisma.LlmCallCreateWithoutChatInput, Prisma.LlmCallUncheckedCreateWithoutChatInput> | Prisma.LlmCallCreateWithoutChatInput[] | Prisma.LlmCallUncheckedCreateWithoutChatInput[]
connectOrCreate?: Prisma.LlmCallCreateOrConnectWithoutChatInput | Prisma.LlmCallCreateOrConnectWithoutChatInput[]
upsert?: Prisma.LlmCallUpsertWithWhereUniqueWithoutChatInput | Prisma.LlmCallUpsertWithWhereUniqueWithoutChatInput[]
createMany?: Prisma.LlmCallCreateManyChatInputEnvelope
set?: Prisma.LlmCallWhereUniqueInput | Prisma.LlmCallWhereUniqueInput[]
disconnect?: Prisma.LlmCallWhereUniqueInput | Prisma.LlmCallWhereUniqueInput[]
delete?: Prisma.LlmCallWhereUniqueInput | Prisma.LlmCallWhereUniqueInput[]
connect?: Prisma.LlmCallWhereUniqueInput | Prisma.LlmCallWhereUniqueInput[]
update?: Prisma.LlmCallUpdateWithWhereUniqueWithoutChatInput | Prisma.LlmCallUpdateWithWhereUniqueWithoutChatInput[]
updateMany?: Prisma.LlmCallUpdateManyWithWhereWithoutChatInput | Prisma.LlmCallUpdateManyWithWhereWithoutChatInput[]
deleteMany?: Prisma.LlmCallScalarWhereInput | Prisma.LlmCallScalarWhereInput[]
}
export type EnumProviderFieldUpdateOperationsInput = {
set?: $Enums.Provider
}
export type NullableIntFieldUpdateOperationsInput = {
set?: number | null
increment?: number
decrement?: number
multiply?: number
divide?: number
}
export type LlmCallCreateWithoutChatInput = {
id?: string
createdAt?: Date | string
provider: $Enums.Provider
model: string
request: Prisma.JsonNullValueInput | runtime.InputJsonValue
response?: Prisma.NullableJsonNullValueInput | runtime.InputJsonValue
inputTokens?: number | null
outputTokens?: number | null
totalTokens?: number | null
latencyMs?: number | null
error?: string | null
}
export type LlmCallUncheckedCreateWithoutChatInput = {
id?: string
createdAt?: Date | string
provider: $Enums.Provider
model: string
request: Prisma.JsonNullValueInput | runtime.InputJsonValue
response?: Prisma.NullableJsonNullValueInput | runtime.InputJsonValue
inputTokens?: number | null
outputTokens?: number | null
totalTokens?: number | null
latencyMs?: number | null
error?: string | null
}
export type LlmCallCreateOrConnectWithoutChatInput = {
where: Prisma.LlmCallWhereUniqueInput
create: Prisma.XOR<Prisma.LlmCallCreateWithoutChatInput, Prisma.LlmCallUncheckedCreateWithoutChatInput>
}
export type LlmCallCreateManyChatInputEnvelope = {
data: Prisma.LlmCallCreateManyChatInput | Prisma.LlmCallCreateManyChatInput[]
}
export type LlmCallUpsertWithWhereUniqueWithoutChatInput = {
where: Prisma.LlmCallWhereUniqueInput
update: Prisma.XOR<Prisma.LlmCallUpdateWithoutChatInput, Prisma.LlmCallUncheckedUpdateWithoutChatInput>
create: Prisma.XOR<Prisma.LlmCallCreateWithoutChatInput, Prisma.LlmCallUncheckedCreateWithoutChatInput>
}
export type LlmCallUpdateWithWhereUniqueWithoutChatInput = {
where: Prisma.LlmCallWhereUniqueInput
data: Prisma.XOR<Prisma.LlmCallUpdateWithoutChatInput, Prisma.LlmCallUncheckedUpdateWithoutChatInput>
}
export type LlmCallUpdateManyWithWhereWithoutChatInput = {
where: Prisma.LlmCallScalarWhereInput
data: Prisma.XOR<Prisma.LlmCallUpdateManyMutationInput, Prisma.LlmCallUncheckedUpdateManyWithoutChatInput>
}
export type LlmCallScalarWhereInput = {
AND?: Prisma.LlmCallScalarWhereInput | Prisma.LlmCallScalarWhereInput[]
OR?: Prisma.LlmCallScalarWhereInput[]
NOT?: Prisma.LlmCallScalarWhereInput | Prisma.LlmCallScalarWhereInput[]
id?: Prisma.StringFilter<"LlmCall"> | string
createdAt?: Prisma.DateTimeFilter<"LlmCall"> | Date | string
chatId?: Prisma.StringFilter<"LlmCall"> | string
provider?: Prisma.EnumProviderFilter<"LlmCall"> | $Enums.Provider
model?: Prisma.StringFilter<"LlmCall"> | string
request?: Prisma.JsonFilter<"LlmCall">
response?: Prisma.JsonNullableFilter<"LlmCall">
inputTokens?: Prisma.IntNullableFilter<"LlmCall"> | number | null
outputTokens?: Prisma.IntNullableFilter<"LlmCall"> | number | null
totalTokens?: Prisma.IntNullableFilter<"LlmCall"> | number | null
latencyMs?: Prisma.IntNullableFilter<"LlmCall"> | number | null
error?: Prisma.StringNullableFilter<"LlmCall"> | string | null
}
export type LlmCallCreateManyChatInput = {
id?: string
createdAt?: Date | string
provider: $Enums.Provider
model: string
request: Prisma.JsonNullValueInput | runtime.InputJsonValue
response?: Prisma.NullableJsonNullValueInput | runtime.InputJsonValue
inputTokens?: number | null
outputTokens?: number | null
totalTokens?: number | null
latencyMs?: number | null
error?: string | null
}
export type LlmCallUpdateWithoutChatInput = {
id?: Prisma.StringFieldUpdateOperationsInput | string
createdAt?: Prisma.DateTimeFieldUpdateOperationsInput | Date | string
provider?: Prisma.EnumProviderFieldUpdateOperationsInput | $Enums.Provider
model?: Prisma.StringFieldUpdateOperationsInput | string
request?: Prisma.JsonNullValueInput | runtime.InputJsonValue
response?: Prisma.NullableJsonNullValueInput | runtime.InputJsonValue
inputTokens?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
outputTokens?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
totalTokens?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
latencyMs?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
error?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
}
export type LlmCallUncheckedUpdateWithoutChatInput = {
id?: Prisma.StringFieldUpdateOperationsInput | string
createdAt?: Prisma.DateTimeFieldUpdateOperationsInput | Date | string
provider?: Prisma.EnumProviderFieldUpdateOperationsInput | $Enums.Provider
model?: Prisma.StringFieldUpdateOperationsInput | string
request?: Prisma.JsonNullValueInput | runtime.InputJsonValue
response?: Prisma.NullableJsonNullValueInput | runtime.InputJsonValue
inputTokens?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
outputTokens?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
totalTokens?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
latencyMs?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
error?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
}
export type LlmCallUncheckedUpdateManyWithoutChatInput = {
id?: Prisma.StringFieldUpdateOperationsInput | string
createdAt?: Prisma.DateTimeFieldUpdateOperationsInput | Date | string
provider?: Prisma.EnumProviderFieldUpdateOperationsInput | $Enums.Provider
model?: Prisma.StringFieldUpdateOperationsInput | string
request?: Prisma.JsonNullValueInput | runtime.InputJsonValue
response?: Prisma.NullableJsonNullValueInput | runtime.InputJsonValue
inputTokens?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
outputTokens?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
totalTokens?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
latencyMs?: Prisma.NullableIntFieldUpdateOperationsInput | number | null
error?: Prisma.NullableStringFieldUpdateOperationsInput | string | null
}
export type LlmCallSelect<ExtArgs extends runtime.Types.Extensions.InternalArgs = runtime.Types.Extensions.DefaultArgs> = runtime.Types.Extensions.GetSelect<{
id?: boolean
createdAt?: boolean
chatId?: boolean
provider?: boolean
model?: boolean
request?: boolean
response?: boolean
inputTokens?: boolean
outputTokens?: boolean
totalTokens?: boolean
latencyMs?: boolean
error?: boolean
chat?: boolean | Prisma.ChatDefaultArgs<ExtArgs>
}, ExtArgs["result"]["llmCall"]>
export type LlmCallSelectCreateManyAndReturn<ExtArgs extends runtime.Types.Extensions.InternalArgs = runtime.Types.Extensions.DefaultArgs> = runtime.Types.Extensions.GetSelect<{
id?: boolean
createdAt?: boolean
chatId?: boolean
provider?: boolean
model?: boolean
request?: boolean
response?: boolean
inputTokens?: boolean
outputTokens?: boolean
totalTokens?: boolean
latencyMs?: boolean
error?: boolean
chat?: boolean | Prisma.ChatDefaultArgs<ExtArgs>
}, ExtArgs["result"]["llmCall"]>
export type LlmCallSelectUpdateManyAndReturn<ExtArgs extends runtime.Types.Extensions.InternalArgs = runtime.Types.Extensions.DefaultArgs> = runtime.Types.Extensions.GetSelect<{
id?: boolean
createdAt?: boolean
chatId?: boolean
provider?: boolean
model?: boolean
request?: boolean
response?: boolean
inputTokens?: boolean
outputTokens?: boolean
totalTokens?: boolean
latencyMs?: boolean
error?: boolean
chat?: boolean | Prisma.ChatDefaultArgs<ExtArgs>
}, ExtArgs["result"]["llmCall"]>
export type LlmCallSelectScalar = {
id?: boolean
createdAt?: boolean
chatId?: boolean
provider?: boolean
model?: boolean
request?: boolean
response?: boolean
inputTokens?: boolean
outputTokens?: boolean
totalTokens?: boolean
latencyMs?: boolean
error?: boolean
}
export type LlmCallOmit<ExtArgs extends runtime.Types.Extensions.InternalArgs = runtime.Types.Extensions.DefaultArgs> = runtime.Types.Extensions.GetOmit<"id" | "createdAt" | "chatId" | "provider" | "model" | "request" | "response" | "inputTokens" | "outputTokens" | "totalTokens" | "latencyMs" | "error", ExtArgs["result"]["llmCall"]>
export type LlmCallInclude<ExtArgs extends runtime.Types.Extensions.InternalArgs = runtime.Types.Extensions.DefaultArgs> = {
chat?: boolean | Prisma.ChatDefaultArgs<ExtArgs>
}
export type LlmCallIncludeCreateManyAndReturn<ExtArgs extends runtime.Types.Extensions.InternalArgs = runtime.Types.Extensions.DefaultArgs> = {
chat?: boolean | Prisma.ChatDefaultArgs<ExtArgs>
}
export type LlmCallIncludeUpdateManyAndReturn<ExtArgs extends runtime.Types.Extensions.InternalArgs = runtime.Types.Extensions.DefaultArgs> = {
chat?: boolean | Prisma.ChatDefaultArgs<ExtArgs>
}
export type $LlmCallPayload<ExtArgs extends runtime.Types.Extensions.InternalArgs = runtime.Types.Extensions.DefaultArgs> = {
name: "LlmCall"
objects: {
chat: Prisma.$ChatPayload<ExtArgs>
}
scalars: runtime.Types.Extensions.GetPayloadResult<{
id: string
createdAt: Date
chatId: string
provider: $Enums.Provider
model: string
request: runtime.JsonValue
response: runtime.JsonValue | null
inputTokens: number | null
outputTokens: number | null
totalTokens: number | null
latencyMs: number | null
error: string | null
}, ExtArgs["result"]["llmCall"]>
composites: {}
}
export type LlmCallGetPayload<S extends boolean | null | undefined | LlmCallDefaultArgs> = runtime.Types.Result.GetResult<Prisma.$LlmCallPayload, S>
export type LlmCallCountArgs<ExtArgs extends runtime.Types.Extensions.InternalArgs = runtime.Types.Extensions.DefaultArgs> =
Omit<LlmCallFindManyArgs, 'select' | 'include' | 'distinct' | 'omit'> & {
select?: LlmCallCountAggregateInputType | true
}
export interface LlmCallDelegate<ExtArgs extends runtime.Types.Extensions.InternalArgs = runtime.Types.Extensions.DefaultArgs, GlobalOmitOptions = {}> {
[K: symbol]: { types: Prisma.TypeMap<ExtArgs>['model']['LlmCall'], meta: { name: 'LlmCall' } }
/**
* Find zero or one LlmCall that matches the filter.
* @param {LlmCallFindUniqueArgs} args - Arguments to find a LlmCall
* @example
* // Get one LlmCall
* const llmCall = await prisma.llmCall.findUnique({
* where: {
* // ... provide filter here
* }
* })
*/
findUnique<T extends LlmCallFindUniqueArgs>(args: Prisma.SelectSubset<T, LlmCallFindUniqueArgs<ExtArgs>>): Prisma.Prisma__LlmCallClient<runtime.Types.Result.GetResult<Prisma.$LlmCallPayload<ExtArgs>, T, "findUnique", GlobalOmitOptions> | null, null, ExtArgs, GlobalOmitOptions>
/**
* Find one LlmCall that matches the filter or throw an error with `error.code='P2025'`
* if no matches were found.
* @param {LlmCallFindUniqueOrThrowArgs} args - Arguments to find a LlmCall
* @example
* // Get one LlmCall
* const llmCall = await prisma.llmCall.findUniqueOrThrow({
* where: {
* // ... provide filter here
* }
* })
*/
findUniqueOrThrow<T extends LlmCallFindUniqueOrThrowArgs>(args: Prisma.SelectSubset<T, LlmCallFindUniqueOrThrowArgs<ExtArgs>>): Prisma.Prisma__LlmCallClient<runtime.Types.Result.GetResult<Prisma.$LlmCallPayload<ExtArgs>, T, "findUniqueOrThrow", GlobalOmitOptions>, never, ExtArgs, GlobalOmitOptions>
/**
* Find the first LlmCall that matches the filter.
* Note, that providing `undefined` is treated as the value not being there.
* Read more here: https://pris.ly/d/null-undefined
* @param {LlmCallFindFirstArgs} args - Arguments to find a LlmCall
* @example
* // Get one LlmCall
* const llmCall = await prisma.llmCall.findFirst({
* where: {
* // ... provide filter here
* }
* })
*/
findFirst<T extends LlmCallFindFirstArgs>(args?: Prisma.SelectSubset<T, LlmCallFindFirstArgs<ExtArgs>>): Prisma.Prisma__LlmCallClient<runtime.Types.Result.GetResult<Prisma.$LlmCallPayload<ExtArgs>, T, "findFirst", GlobalOmitOptions> | null, null, ExtArgs, GlobalOmitOptions>
/**
* Find the first LlmCall that matches the filter or
* throw `PrismaKnownClientError` with `P2025` code if no matches were found.
* Note, that providing `undefined` is treated as the value not being there.
* Read more here: https://pris.ly/d/null-undefined
* @param {LlmCallFindFirstOrThrowArgs} args - Arguments to find a LlmCall
* @example
* // Get one LlmCall
* const llmCall = await prisma.llmCall.findFirstOrThrow({
* where: {
* // ... provide filter here
* }
* })
*/
findFirstOrThrow<T extends LlmCallFindFirstOrThrowArgs>(args?: Prisma.SelectSubset<T, LlmCallFindFirstOrThrowArgs<ExtArgs>>): Prisma.Prisma__LlmCallClient<runtime.Types.Result.GetResult<Prisma.$LlmCallPayload<ExtArgs>, T, "findFirstOrThrow", GlobalOmitOptions>, never, ExtArgs, GlobalOmitOptions>
/**
* Find zero or more LlmCalls that matches the filter.
* Note, that providing `undefined` is treated as the value not being there.
* Read more here: https://pris.ly/d/null-undefined
* @param {LlmCallFindManyArgs} args - Arguments to filter and select certain fields only.
* @example
* // Get all LlmCalls
* const llmCalls = await prisma.llmCall.findMany()
*
* // Get first 10 LlmCalls
* const llmCalls = await prisma.llmCall.findMany({ take: 10 })
*
* // Only select the `id`
* const llmCallWithIdOnly = await prisma.llmCall.findMany({ select: { id: true } })
*
*/
findMany<T extends LlmCallFindManyArgs>(args?: Prisma.SelectSubset<T, LlmCallFindManyArgs<ExtArgs>>): Prisma.PrismaPromise<runtime.Types.Result.GetResult<Prisma.$LlmCallPayload<ExtArgs>, T, "findMany", GlobalOmitOptions>>
/**
* Create a LlmCall.
* @param {LlmCallCreateArgs} args - Arguments to create a LlmCall.
* @example
* // Create one LlmCall
* const LlmCall = await prisma.llmCall.create({
* data: {
* // ... data to create a LlmCall
* }
* })
*
*/
create<T extends LlmCallCreateArgs>(args: Prisma.SelectSubset<T, LlmCallCreateArgs<ExtArgs>>): Prisma.Prisma__LlmCallClient<runtime.Types.Result.GetResult<Prisma.$LlmCallPayload<ExtArgs>, T, "create", GlobalOmitOptions>, never, ExtArgs, GlobalOmitOptions>
/**
* Create many LlmCalls.
* @param {LlmCallCreateManyArgs} args - Arguments to create many LlmCalls.
* @example
* // Create many LlmCalls
* const llmCall = await prisma.llmCall.createMany({
* data: [
* // ... provide data here
* ]
* })
*
*/
createMany<T extends LlmCallCreateManyArgs>(args?: Prisma.SelectSubset<T, LlmCallCreateManyArgs<ExtArgs>>): Prisma.PrismaPromise<Prisma.BatchPayload>
/**
* Create many LlmCalls and returns the data saved in the database.
* @param {LlmCallCreateManyAndReturnArgs} args - Arguments to create many LlmCalls.
* @example
* // Create many LlmCalls
* const llmCall = await prisma.llmCall.createManyAndReturn({
* data: [
* // ... provide data here
* ]
* })
*
* // Create many LlmCalls and only return the `id`
* const llmCallWithIdOnly = await prisma.llmCall.createManyAndReturn({
* select: { id: true },
* data: [
* // ... provide data here
* ]
* })
* Note, that providing `undefined` is treated as the value not being there.
* Read more here: https://pris.ly/d/null-undefined
*
*/
createManyAndReturn<T extends LlmCallCreateManyAndReturnArgs>(args?: Prisma.SelectSubset<T, LlmCallCreateManyAndReturnArgs<ExtArgs>>): Prisma.PrismaPromise<runtime.Types.Result.GetResult<Prisma.$LlmCallPayload<ExtArgs>, T, "createManyAndReturn", GlobalOmitOptions>>
/**
* Delete a LlmCall.
* @param {LlmCallDeleteArgs} args - Arguments to delete one LlmCall.
* @example
* // Delete one LlmCall
* const LlmCall = await prisma.llmCall.delete({
* where: {
* // ... filter to delete one LlmCall
* }
* })
*
*/
delete<T extends LlmCallDeleteArgs>(args: Prisma.SelectSubset<T, LlmCallDeleteArgs<ExtArgs>>): Prisma.Prisma__LlmCallClient<runtime.Types.Result.GetResult<Prisma.$LlmCallPayload<ExtArgs>, T, "delete", GlobalOmitOptions>, never, ExtArgs, GlobalOmitOptions>
/**
* Update one LlmCall.
* @param {LlmCallUpdateArgs} args - Arguments to update one LlmCall.
* @example
* // Update one LlmCall
* const llmCall = await prisma.llmCall.update({
* where: {
* // ... provide filter here
* },
* data: {
* // ... provide data here
* }
* })
*
*/
update<T extends LlmCallUpdateArgs>(args: Prisma.SelectSubset<T, LlmCallUpdateArgs<ExtArgs>>): Prisma.Prisma__LlmCallClient<runtime.Types.Result.GetResult<Prisma.$LlmCallPayload<ExtArgs>, T, "update", GlobalOmitOptions>, never, ExtArgs, GlobalOmitOptions>
/**
* Delete zero or more LlmCalls.
* @param {LlmCallDeleteManyArgs} args - Arguments to filter LlmCalls to delete.
* @example
* // Delete a few LlmCalls
* const { count } = await prisma.llmCall.deleteMany({
* where: {
* // ... provide filter here
* }
* })
*
*/
deleteMany<T extends LlmCallDeleteManyArgs>(args?: Prisma.SelectSubset<T, LlmCallDeleteManyArgs<ExtArgs>>): Prisma.PrismaPromise<Prisma.BatchPayload>
/**
* Update zero or more LlmCalls.
* Note, that providing `undefined` is treated as the value not being there.
* Read more here: https://pris.ly/d/null-undefined
* @param {LlmCallUpdateManyArgs} args - Arguments to update one or more rows.
* @example
* // Update many LlmCalls
* const llmCall = await prisma.llmCall.updateMany({
* where: {
* // ... provide filter here
* },
* data: {
* // ... provide data here
* }
* })
*
*/
updateMany<T extends LlmCallUpdateManyArgs>(args: Prisma.SelectSubset<T, LlmCallUpdateManyArgs<ExtArgs>>): Prisma.PrismaPromise<Prisma.BatchPayload>
/**
* Update zero or more LlmCalls and returns the data updated in the database.
* @param {LlmCallUpdateManyAndReturnArgs} args - Arguments to update many LlmCalls.
* @example
* // Update many LlmCalls
* const llmCall = await prisma.llmCall.updateManyAndReturn({
* where: {
* // ... provide filter here
* },
* data: [
* // ... provide data here
* ]
* })
*
* // Update zero or more LlmCalls and only return the `id`
* const llmCallWithIdOnly = await prisma.llmCall.updateManyAndReturn({
* select: { id: true },
* where: {
* // ... provide filter here
* },
* data: [
* // ... provide data here
* ]
* })
* Note, that providing `undefined` is treated as the value not being there.
* Read more here: https://pris.ly/d/null-undefined
*
*/
updateManyAndReturn<T extends LlmCallUpdateManyAndReturnArgs>(args: Prisma.SelectSubset<T, LlmCallUpdateManyAndReturnArgs<ExtArgs>>): Prisma.PrismaPromise<runtime.Types.Result.GetResult<Prisma.$LlmCallPayload<ExtArgs>, T, "updateManyAndReturn", GlobalOmitOptions>>
/**
* Create or update one LlmCall.
* @param {LlmCallUpsertArgs} args - Arguments to update or create a LlmCall.
* @example
* // Update or create a LlmCall
* const llmCall = await prisma.llmCall.upsert({
* create: {
* // ... data to create a LlmCall
* },
* update: {
* // ... in case it already exists, update
* },
* where: {
* // ... the filter for the LlmCall we want to update
* }
* })
*/
upsert<T extends LlmCallUpsertArgs>(args: Prisma.SelectSubset<T, LlmCallUpsertArgs<ExtArgs>>): Prisma.Prisma__LlmCallClient<runtime.Types.Result.GetResult<Prisma.$LlmCallPayload<ExtArgs>, T, "upsert", GlobalOmitOptions>, never, ExtArgs, GlobalOmitOptions>
/**
* Count the number of LlmCalls.
* Note, that providing `undefined` is treated as the value not being there.
* Read more here: https://pris.ly/d/null-undefined
* @param {LlmCallCountArgs} args - Arguments to filter LlmCalls to count.
* @example
* // Count the number of LlmCalls
* const count = await prisma.llmCall.count({
* where: {
* // ... the filter for the LlmCalls we want to count
* }
* })
**/
count<T extends LlmCallCountArgs>(
args?: Prisma.Subset<T, LlmCallCountArgs>,
): Prisma.PrismaPromise<
T extends runtime.Types.Utils.Record<'select', any>
? T['select'] extends true
? number
: Prisma.GetScalarType<T['select'], LlmCallCountAggregateOutputType>
: number
>
/**
* Allows you to perform aggregations operations on a LlmCall.
* Note, that providing `undefined` is treated as the value not being there.
* Read more here: https://pris.ly/d/null-undefined
* @param {LlmCallAggregateArgs} args - Select which aggregations you would like to apply and on what fields.
* @example
* // Ordered by age ascending
* // Where email contains prisma.io
* // Limited to the 10 users
* const aggregations = await prisma.user.aggregate({
* _avg: {
* age: true,
* },
* where: {
* email: {
* contains: "prisma.io",
* },
* },
* orderBy: {
* age: "asc",
* },
* take: 10,
* })
**/
aggregate<T extends LlmCallAggregateArgs>(args: Prisma.Subset<T, LlmCallAggregateArgs>): Prisma.PrismaPromise<GetLlmCallAggregateType<T>>
/**
* Group by LlmCall.
* Note, that providing `undefined` is treated as the value not being there.
* Read more here: https://pris.ly/d/null-undefined
* @param {LlmCallGroupByArgs} args - Group by arguments.
* @example
* // Group by city, order by createdAt, get count
* const result = await prisma.user.groupBy({
* by: ['city', 'createdAt'],
* orderBy: {
* createdAt: true
* },
* _count: {
* _all: true
* },
* })
*
**/
groupBy<
T extends LlmCallGroupByArgs,
HasSelectOrTake extends Prisma.Or<
Prisma.Extends<'skip', Prisma.Keys<T>>,
Prisma.Extends<'take', Prisma.Keys<T>>
>,
OrderByArg extends Prisma.True extends HasSelectOrTake
? { orderBy: LlmCallGroupByArgs['orderBy'] }
: { orderBy?: LlmCallGroupByArgs['orderBy'] },
OrderFields extends Prisma.ExcludeUnderscoreKeys<Prisma.Keys<Prisma.MaybeTupleToUnion<T['orderBy']>>>,
ByFields extends Prisma.MaybeTupleToUnion<T['by']>,
ByValid extends Prisma.Has<ByFields, OrderFields>,
HavingFields extends Prisma.GetHavingFields<T['having']>,
HavingValid extends Prisma.Has<ByFields, HavingFields>,
ByEmpty extends T['by'] extends never[] ? Prisma.True : Prisma.False,
InputErrors extends ByEmpty extends Prisma.True
? `Error: "by" must not be empty.`
: HavingValid extends Prisma.False
? {
[P in HavingFields]: P extends ByFields
? never
: P extends string
? `Error: Field "${P}" used in "having" needs to be provided in "by".`
: [
Error,
'Field ',
P,
` in "having" needs to be provided in "by"`,
]
}[HavingFields]
: 'take' extends Prisma.Keys<T>
? 'orderBy' extends Prisma.Keys<T>
? ByValid extends Prisma.True
? {}
: {
[P in OrderFields]: P extends ByFields
? never
: `Error: Field "${P}" in "orderBy" needs to be provided in "by"`
}[OrderFields]
: 'Error: If you provide "take", you also need to provide "orderBy"'
: 'skip' extends Prisma.Keys<T>
? 'orderBy' extends Prisma.Keys<T>
? ByValid extends Prisma.True
? {}
: {
[P in OrderFields]: P extends ByFields
? never
: `Error: Field "${P}" in "orderBy" needs to be provided in "by"`
}[OrderFields]
: 'Error: If you provide "skip", you also need to provide "orderBy"'
: ByValid extends Prisma.True
? {}
: {
[P in OrderFields]: P extends ByFields
? never
: `Error: Field "${P}" in "orderBy" needs to be provided in "by"`
}[OrderFields]
>(args: Prisma.SubsetIntersection<T, LlmCallGroupByArgs, OrderByArg> & InputErrors): {} extends InputErrors ? GetLlmCallGroupByPayload<T> : Prisma.PrismaPromise<InputErrors>
/**
* Fields of the LlmCall model
*/
readonly fields: LlmCallFieldRefs;
}
/**
* The delegate class that acts as a "Promise-like" for LlmCall.
* Why is this prefixed with `Prisma__`?
* Because we want to prevent naming conflicts as mentioned in
* https://github.com/prisma/prisma-client-js/issues/707
*/
export interface Prisma__LlmCallClient<T, Null = never, ExtArgs extends runtime.Types.Extensions.InternalArgs = runtime.Types.Extensions.DefaultArgs, GlobalOmitOptions = {}> extends Prisma.PrismaPromise<T> {
readonly [Symbol.toStringTag]: "PrismaPromise"
chat<T extends Prisma.ChatDefaultArgs<ExtArgs> = {}>(args?: Prisma.Subset<T, Prisma.ChatDefaultArgs<ExtArgs>>): Prisma.Prisma__ChatClient<runtime.Types.Result.GetResult<Prisma.$ChatPayload<ExtArgs>, T, "findUniqueOrThrow", GlobalOmitOptions> | Null, Null, ExtArgs, GlobalOmitOptions>
/**
* Attaches callbacks for the resolution and/or rejection of the Promise.
* @param onfulfilled The callback to execute when the Promise is resolved.
* @param onrejected The callback to execute when the Promise is rejected.
* @returns A Promise for the completion of which ever callback is executed.
*/
then<TResult1 = T, TResult2 = never>(onfulfilled?: ((value: T) => TResult1 | PromiseLike<TResult1>) | undefined | null, onrejected?: ((reason: any) => TResult2 | PromiseLike<TResult2>) | undefined | null): runtime.Types.Utils.JsPromise<TResult1 | TResult2>
/**
* Attaches a callback for only the rejection of the Promise.
* @param onrejected The callback to execute when the Promise is rejected.
* @returns A Promise for the completion of the callback.
*/
catch<TResult = never>(onrejected?: ((reason: any) => TResult | PromiseLike<TResult>) | undefined | null): runtime.Types.Utils.JsPromise<T | TResult>
/**
* Attaches a callback that is invoked when the Promise is settled (fulfilled or rejected). The
* resolved value cannot be modified from the callback.
* @param onfinally The callback to execute when the Promise is settled (fulfilled or rejected).
* @returns A Promise for the completion of the callback.
*/
finally(onfinally?: (() => void) | undefined | null): runtime.Types.Utils.JsPromise<T>
}
/**
* Fields of the LlmCall model
*/
export interface LlmCallFieldRefs {
readonly id: Prisma.FieldRef<"LlmCall", 'String'>
readonly createdAt: Prisma.FieldRef<"LlmCall", 'DateTime'>
readonly chatId: Prisma.FieldRef<"LlmCall", 'String'>
readonly provider: Prisma.FieldRef<"LlmCall", 'Provider'>
readonly model: Prisma.FieldRef<"LlmCall", 'String'>
readonly request: Prisma.FieldRef<"LlmCall", 'Json'>
readonly response: Prisma.FieldRef<"LlmCall", 'Json'>
readonly inputTokens: Prisma.FieldRef<"LlmCall", 'Int'>
readonly outputTokens: Prisma.FieldRef<"LlmCall", 'Int'>
readonly totalTokens: Prisma.FieldRef<"LlmCall", 'Int'>
readonly latencyMs: Prisma.FieldRef<"LlmCall", 'Int'>
readonly error: Prisma.FieldRef<"LlmCall", 'String'>
}
// Custom InputTypes
/**
* LlmCall findUnique
*/
export type LlmCallFindUniqueArgs<ExtArgs extends runtime.Types.Extensions.InternalArgs = runtime.Types.Extensions.DefaultArgs> = {
/**
* Select specific fields to fetch from the LlmCall
*/
select?: Prisma.LlmCallSelect<ExtArgs> | null
/**
* Omit specific fields from the LlmCall
*/
omit?: Prisma.LlmCallOmit<ExtArgs> | null
/**
* Choose, which related nodes to fetch as well
*/
include?: Prisma.LlmCallInclude<ExtArgs> | null
/**
* Filter, which LlmCall to fetch.
*/
where: Prisma.LlmCallWhereUniqueInput
}
/**
* LlmCall findUniqueOrThrow
*/
export type LlmCallFindUniqueOrThrowArgs<ExtArgs extends runtime.Types.Extensions.InternalArgs = runtime.Types.Extensions.DefaultArgs> = {
/**
* Select specific fields to fetch from the LlmCall
*/
select?: Prisma.LlmCallSelect<ExtArgs> | null
/**
* Omit specific fields from the LlmCall
*/
omit?: Prisma.LlmCallOmit<ExtArgs> | null
/**
* Choose, which related nodes to fetch as well
*/
include?: Prisma.LlmCallInclude<ExtArgs> | null
/**
* Filter, which LlmCall to fetch.
*/
where: Prisma.LlmCallWhereUniqueInput
}
/**
* LlmCall findFirst
*/
export type LlmCallFindFirstArgs<ExtArgs extends runtime.Types.Extensions.InternalArgs = runtime.Types.Extensions.DefaultArgs> = {
/**
* Select specific fields to fetch from the LlmCall
*/
select?: Prisma.LlmCallSelect<ExtArgs> | null
/**
* Omit specific fields from the LlmCall
*/
omit?: Prisma.LlmCallOmit<ExtArgs> | null
/**
* Choose, which related nodes to fetch as well
*/
include?: Prisma.LlmCallInclude<ExtArgs> | null
/**
* Filter, which LlmCall to fetch.
*/
where?: Prisma.LlmCallWhereInput
/**
* {@link https://www.prisma.io/docs/concepts/components/prisma-client/sorting Sorting Docs}
*
* Determine the order of LlmCalls to fetch.
*/
orderBy?: Prisma.LlmCallOrderByWithRelationInput | Prisma.LlmCallOrderByWithRelationInput[]
/**
* {@link https://www.prisma.io/docs/concepts/components/prisma-client/pagination#cursor-based-pagination Cursor Docs}
*
* Sets the position for searching for LlmCalls.
*/
cursor?: Prisma.LlmCallWhereUniqueInput
/**
* {@link https://www.prisma.io/docs/concepts/components/prisma-client/pagination Pagination Docs}
*
* Take `±n` LlmCalls from the position of the cursor.
*/
take?: number
/**
* {@link https://www.prisma.io/docs/concepts/components/prisma-client/pagination Pagination Docs}
*
* Skip the first `n` LlmCalls.
*/
skip?: number
/**
* {@link https://www.prisma.io/docs/concepts/components/prisma-client/distinct Distinct Docs}
*
* Filter by unique combinations of LlmCalls.
*/
distinct?: Prisma.LlmCallScalarFieldEnum | Prisma.LlmCallScalarFieldEnum[]
}
/**
* LlmCall findFirstOrThrow
*/
export type LlmCallFindFirstOrThrowArgs<ExtArgs extends runtime.Types.Extensions.InternalArgs = runtime.Types.Extensions.DefaultArgs> = {
/**
* Select specific fields to fetch from the LlmCall
*/
select?: Prisma.LlmCallSelect<ExtArgs> | null
/**
* Omit specific fields from the LlmCall
*/
omit?: Prisma.LlmCallOmit<ExtArgs> | null
/**
* Choose, which related nodes to fetch as well
*/
include?: Prisma.LlmCallInclude<ExtArgs> | null
/**
* Filter, which LlmCall to fetch.
*/
where?: Prisma.LlmCallWhereInput
/**
* {@link https://www.prisma.io/docs/concepts/components/prisma-client/sorting Sorting Docs}
*
* Determine the order of LlmCalls to fetch.
*/
orderBy?: Prisma.LlmCallOrderByWithRelationInput | Prisma.LlmCallOrderByWithRelationInput[]
/**
* {@link https://www.prisma.io/docs/concepts/components/prisma-client/pagination#cursor-based-pagination Cursor Docs}
*
* Sets the position for searching for LlmCalls.
*/
cursor?: Prisma.LlmCallWhereUniqueInput
/**
* {@link https://www.prisma.io/docs/concepts/components/prisma-client/pagination Pagination Docs}
*
* Take `±n` LlmCalls from the position of the cursor.
*/
take?: number
/**
* {@link https://www.prisma.io/docs/concepts/components/prisma-client/pagination Pagination Docs}
*
* Skip the first `n` LlmCalls.
*/
skip?: number
/**
* {@link https://www.prisma.io/docs/concepts/components/prisma-client/distinct Distinct Docs}
*
* Filter by unique combinations of LlmCalls.
*/
distinct?: Prisma.LlmCallScalarFieldEnum | Prisma.LlmCallScalarFieldEnum[]
}
/**
* LlmCall findMany
*/
export type LlmCallFindManyArgs<ExtArgs extends runtime.Types.Extensions.InternalArgs = runtime.Types.Extensions.DefaultArgs> = {
/**
* Select specific fields to fetch from the LlmCall
*/
select?: Prisma.LlmCallSelect<ExtArgs> | null
/**
* Omit specific fields from the LlmCall
*/
omit?: Prisma.LlmCallOmit<ExtArgs> | null
/**
* Choose, which related nodes to fetch as well
*/
include?: Prisma.LlmCallInclude<ExtArgs> | null
/**
* Filter, which LlmCalls to fetch.
*/
where?: Prisma.LlmCallWhereInput
/**
* {@link https://www.prisma.io/docs/concepts/components/prisma-client/sorting Sorting Docs}
*
* Determine the order of LlmCalls to fetch.
*/
orderBy?: Prisma.LlmCallOrderByWithRelationInput | Prisma.LlmCallOrderByWithRelationInput[]
/**
* {@link https://www.prisma.io/docs/concepts/components/prisma-client/pagination#cursor-based-pagination Cursor Docs}
*
* Sets the position for listing LlmCalls.
*/
cursor?: Prisma.LlmCallWhereUniqueInput
/**
* {@link https://www.prisma.io/docs/concepts/components/prisma-client/pagination Pagination Docs}
*
* Take `±n` LlmCalls from the position of the cursor.
*/
take?: number
/**
* {@link https://www.prisma.io/docs/concepts/components/prisma-client/pagination Pagination Docs}
*
* Skip the first `n` LlmCalls.
*/
skip?: number
distinct?: Prisma.LlmCallScalarFieldEnum | Prisma.LlmCallScalarFieldEnum[]
}
/**
* LlmCall create
*/
export type LlmCallCreateArgs<ExtArgs extends runtime.Types.Extensions.InternalArgs = runtime.Types.Extensions.DefaultArgs> = {
/**
* Select specific fields to fetch from the LlmCall
*/
select?: Prisma.LlmCallSelect<ExtArgs> | null
/**
* Omit specific fields from the LlmCall
*/
omit?: Prisma.LlmCallOmit<ExtArgs> | null
/**
* Choose, which related nodes to fetch as well
*/
include?: Prisma.LlmCallInclude<ExtArgs> | null
/**
* The data needed to create a LlmCall.
*/
data: Prisma.XOR<Prisma.LlmCallCreateInput, Prisma.LlmCallUncheckedCreateInput>
}
/**
* LlmCall createMany
*/
export type LlmCallCreateManyArgs<ExtArgs extends runtime.Types.Extensions.InternalArgs = runtime.Types.Extensions.DefaultArgs> = {
/**
* The data used to create many LlmCalls.
*/
data: Prisma.LlmCallCreateManyInput | Prisma.LlmCallCreateManyInput[]
}
/**
* LlmCall createManyAndReturn
*/
export type LlmCallCreateManyAndReturnArgs<ExtArgs extends runtime.Types.Extensions.InternalArgs = runtime.Types.Extensions.DefaultArgs> = {
/**
* Select specific fields to fetch from the LlmCall
*/
select?: Prisma.LlmCallSelectCreateManyAndReturn<ExtArgs> | null
/**
* Omit specific fields from the LlmCall
*/
omit?: Prisma.LlmCallOmit<ExtArgs> | null
/**
* The data used to create many LlmCalls.
*/
data: Prisma.LlmCallCreateManyInput | Prisma.LlmCallCreateManyInput[]
/**
* Choose, which related nodes to fetch as well
*/
include?: Prisma.LlmCallIncludeCreateManyAndReturn<ExtArgs> | null
}
/**
* LlmCall update
*/
export type LlmCallUpdateArgs<ExtArgs extends runtime.Types.Extensions.InternalArgs = runtime.Types.Extensions.DefaultArgs> = {
/**
* Select specific fields to fetch from the LlmCall
*/
select?: Prisma.LlmCallSelect<ExtArgs> | null
/**
* Omit specific fields from the LlmCall
*/
omit?: Prisma.LlmCallOmit<ExtArgs> | null
/**
* Choose, which related nodes to fetch as well
*/
include?: Prisma.LlmCallInclude<ExtArgs> | null
/**
* The data needed to update a LlmCall.
*/
data: Prisma.XOR<Prisma.LlmCallUpdateInput, Prisma.LlmCallUncheckedUpdateInput>
/**
* Choose, which LlmCall to update.
*/
where: Prisma.LlmCallWhereUniqueInput
}
/**
* LlmCall updateMany
*/
export type LlmCallUpdateManyArgs<ExtArgs extends runtime.Types.Extensions.InternalArgs = runtime.Types.Extensions.DefaultArgs> = {
/**
* The data used to update LlmCalls.
*/
data: Prisma.XOR<Prisma.LlmCallUpdateManyMutationInput, Prisma.LlmCallUncheckedUpdateManyInput>
/**
* Filter which LlmCalls to update
*/
where?: Prisma.LlmCallWhereInput
/**
* Limit how many LlmCalls to update.
*/
limit?: number
}
/**
* LlmCall updateManyAndReturn
*/
export type LlmCallUpdateManyAndReturnArgs<ExtArgs extends runtime.Types.Extensions.InternalArgs = runtime.Types.Extensions.DefaultArgs> = {
/**
* Select specific fields to fetch from the LlmCall
*/
select?: Prisma.LlmCallSelectUpdateManyAndReturn<ExtArgs> | null
/**
* Omit specific fields from the LlmCall
*/
omit?: Prisma.LlmCallOmit<ExtArgs> | null
/**
* The data used to update LlmCalls.
*/
data: Prisma.XOR<Prisma.LlmCallUpdateManyMutationInput, Prisma.LlmCallUncheckedUpdateManyInput>
/**
* Filter which LlmCalls to update
*/
where?: Prisma.LlmCallWhereInput
/**
* Limit how many LlmCalls to update.
*/
limit?: number
/**
* Choose, which related nodes to fetch as well
*/
include?: Prisma.LlmCallIncludeUpdateManyAndReturn<ExtArgs> | null
}
/**
* LlmCall upsert
*/
export type LlmCallUpsertArgs<ExtArgs extends runtime.Types.Extensions.InternalArgs = runtime.Types.Extensions.DefaultArgs> = {
/**
* Select specific fields to fetch from the LlmCall
*/
select?: Prisma.LlmCallSelect<ExtArgs> | null
/**
* Omit specific fields from the LlmCall
*/
omit?: Prisma.LlmCallOmit<ExtArgs> | null
/**
* Choose, which related nodes to fetch as well
*/
include?: Prisma.LlmCallInclude<ExtArgs> | null
/**
* The filter to search for the LlmCall to update in case it exists.
*/
where: Prisma.LlmCallWhereUniqueInput
/**
* In case the LlmCall found by the `where` argument doesn't exist, create a new LlmCall with this data.
*/
create: Prisma.XOR<Prisma.LlmCallCreateInput, Prisma.LlmCallUncheckedCreateInput>
/**
* In case the LlmCall was found with the provided `where` argument, update it with this data.
*/
update: Prisma.XOR<Prisma.LlmCallUpdateInput, Prisma.LlmCallUncheckedUpdateInput>
}
/**
* LlmCall delete
*/
export type LlmCallDeleteArgs<ExtArgs extends runtime.Types.Extensions.InternalArgs = runtime.Types.Extensions.DefaultArgs> = {
/**
* Select specific fields to fetch from the LlmCall
*/
select?: Prisma.LlmCallSelect<ExtArgs> | null
/**
* Omit specific fields from the LlmCall
*/
omit?: Prisma.LlmCallOmit<ExtArgs> | null
/**
* Choose, which related nodes to fetch as well
*/
include?: Prisma.LlmCallInclude<ExtArgs> | null
/**
* Filter which LlmCall to delete.
*/
where: Prisma.LlmCallWhereUniqueInput
}
/**
* LlmCall deleteMany
*/
export type LlmCallDeleteManyArgs<ExtArgs extends runtime.Types.Extensions.InternalArgs = runtime.Types.Extensions.DefaultArgs> = {
/**
* Filter which LlmCalls to delete
*/
where?: Prisma.LlmCallWhereInput
/**
* Limit how many LlmCalls to delete.
*/
limit?: number
}
/**
* LlmCall without action
*/
export type LlmCallDefaultArgs<ExtArgs extends runtime.Types.Extensions.InternalArgs = runtime.Types.Extensions.DefaultArgs> = {
/**
* Select specific fields to fetch from the LlmCall
*/
select?: Prisma.LlmCallSelect<ExtArgs> | null
/**
* Omit specific fields from the LlmCall
*/
omit?: Prisma.LlmCallOmit<ExtArgs> | null
/**
* Choose, which related nodes to fetch as well
*/
include?: Prisma.LlmCallInclude<ExtArgs> | null
}