/* !!! This is code generated by Prisma. Do not edit directly. !!! */ /* eslint-disable */ // biome-ignore-all lint: generated file // @ts-nocheck /* * This file exports the `LlmCall` model and its related types. * * 🟢 You can import this file directly. */ import type * as runtime from "@prisma/client/runtime/client" import type * as $Enums from "../enums.js" import type * as Prisma from "../internal/prismaNamespace.js" /** * Model LlmCall * */ export type LlmCallModel = runtime.Types.Result.DefaultSelection export type AggregateLlmCall = { _count: LlmCallCountAggregateOutputType | null _avg: LlmCallAvgAggregateOutputType | null _sum: LlmCallSumAggregateOutputType | null _min: LlmCallMinAggregateOutputType | null _max: LlmCallMaxAggregateOutputType | null } export type LlmCallAvgAggregateOutputType = { inputTokens: number | null outputTokens: number | null totalTokens: number | null latencyMs: number | null } export type LlmCallSumAggregateOutputType = { inputTokens: number | null outputTokens: number | null totalTokens: number | null latencyMs: number | null } export type LlmCallMinAggregateOutputType = { id: string | null createdAt: Date | null chatId: string | null provider: $Enums.Provider | null model: string | null inputTokens: number | null outputTokens: number | null totalTokens: number | null latencyMs: number | null error: string | null } export type LlmCallMaxAggregateOutputType = { id: string | null createdAt: Date | null chatId: string | null provider: $Enums.Provider | null model: string | null inputTokens: number | null outputTokens: number | null totalTokens: number | null latencyMs: number | null error: string | null } export type LlmCallCountAggregateOutputType = { id: number createdAt: number chatId: number provider: number model: number request: number response: number inputTokens: number outputTokens: number totalTokens: number latencyMs: number error: number _all: number } export type LlmCallAvgAggregateInputType = { inputTokens?: true outputTokens?: true totalTokens?: true latencyMs?: true } export type LlmCallSumAggregateInputType = { inputTokens?: true outputTokens?: true totalTokens?: true latencyMs?: true } export type LlmCallMinAggregateInputType = { id?: true createdAt?: true chatId?: true provider?: true model?: true inputTokens?: true outputTokens?: true totalTokens?: true latencyMs?: true error?: true } export type LlmCallMaxAggregateInputType = { id?: true createdAt?: true chatId?: true provider?: true model?: true inputTokens?: true outputTokens?: true totalTokens?: true latencyMs?: true error?: true } export type LlmCallCountAggregateInputType = { id?: true createdAt?: true chatId?: true provider?: true model?: true request?: true response?: true inputTokens?: true outputTokens?: true totalTokens?: true latencyMs?: true error?: true _all?: true } export type LlmCallAggregateArgs = { /** * Filter which LlmCall to aggregate. */ where?: Prisma.LlmCallWhereInput /** * {@link https://www.prisma.io/docs/concepts/components/prisma-client/sorting Sorting Docs} * * Determine the order of LlmCalls to fetch. */ orderBy?: Prisma.LlmCallOrderByWithRelationInput | Prisma.LlmCallOrderByWithRelationInput[] /** * {@link https://www.prisma.io/docs/concepts/components/prisma-client/pagination#cursor-based-pagination Cursor Docs} * * Sets the start position */ cursor?: Prisma.LlmCallWhereUniqueInput /** * {@link https://www.prisma.io/docs/concepts/components/prisma-client/pagination Pagination Docs} * * Take `±n` LlmCalls from the position of the cursor. */ take?: number /** * {@link https://www.prisma.io/docs/concepts/components/prisma-client/pagination Pagination Docs} * * Skip the first `n` LlmCalls. */ skip?: number /** * {@link https://www.prisma.io/docs/concepts/components/prisma-client/aggregations Aggregation Docs} * * Count returned LlmCalls **/ _count?: true | LlmCallCountAggregateInputType /** * {@link https://www.prisma.io/docs/concepts/components/prisma-client/aggregations Aggregation Docs} * * Select which fields to average **/ _avg?: LlmCallAvgAggregateInputType /** * {@link https://www.prisma.io/docs/concepts/components/prisma-client/aggregations Aggregation Docs} * * Select which fields to sum **/ _sum?: LlmCallSumAggregateInputType /** * {@link https://www.prisma.io/docs/concepts/components/prisma-client/aggregations Aggregation Docs} * * Select which fields to find the minimum value **/ _min?: LlmCallMinAggregateInputType /** * {@link https://www.prisma.io/docs/concepts/components/prisma-client/aggregations Aggregation Docs} * * Select which fields to find the maximum value **/ _max?: LlmCallMaxAggregateInputType } export type GetLlmCallAggregateType = { [P in keyof T & keyof AggregateLlmCall]: P extends '_count' | 'count' ? T[P] extends true ? number : Prisma.GetScalarType : Prisma.GetScalarType } export type LlmCallGroupByArgs = { where?: Prisma.LlmCallWhereInput orderBy?: Prisma.LlmCallOrderByWithAggregationInput | Prisma.LlmCallOrderByWithAggregationInput[] by: Prisma.LlmCallScalarFieldEnum[] | Prisma.LlmCallScalarFieldEnum having?: Prisma.LlmCallScalarWhereWithAggregatesInput take?: number skip?: number _count?: LlmCallCountAggregateInputType | true _avg?: LlmCallAvgAggregateInputType _sum?: LlmCallSumAggregateInputType _min?: LlmCallMinAggregateInputType _max?: LlmCallMaxAggregateInputType } export type LlmCallGroupByOutputType = { id: string createdAt: Date chatId: string provider: $Enums.Provider model: string request: runtime.JsonValue response: runtime.JsonValue | null inputTokens: number | null outputTokens: number | null totalTokens: number | null latencyMs: number | null error: string | null _count: LlmCallCountAggregateOutputType | null _avg: LlmCallAvgAggregateOutputType | null _sum: LlmCallSumAggregateOutputType | null _min: LlmCallMinAggregateOutputType | null _max: LlmCallMaxAggregateOutputType | null } type GetLlmCallGroupByPayload = Prisma.PrismaPromise< Array< Prisma.PickEnumerable & { [P in ((keyof T) & (keyof LlmCallGroupByOutputType))]: P extends '_count' ? T[P] extends boolean ? number : Prisma.GetScalarType : Prisma.GetScalarType } > > export type LlmCallWhereInput = { AND?: Prisma.LlmCallWhereInput | Prisma.LlmCallWhereInput[] OR?: Prisma.LlmCallWhereInput[] NOT?: Prisma.LlmCallWhereInput | Prisma.LlmCallWhereInput[] id?: Prisma.StringFilter<"LlmCall"> | string createdAt?: Prisma.DateTimeFilter<"LlmCall"> | Date | string chatId?: Prisma.StringFilter<"LlmCall"> | string provider?: Prisma.EnumProviderFilter<"LlmCall"> | $Enums.Provider model?: Prisma.StringFilter<"LlmCall"> | string request?: Prisma.JsonFilter<"LlmCall"> response?: Prisma.JsonNullableFilter<"LlmCall"> inputTokens?: Prisma.IntNullableFilter<"LlmCall"> | number | null outputTokens?: Prisma.IntNullableFilter<"LlmCall"> | number | null totalTokens?: Prisma.IntNullableFilter<"LlmCall"> | number | null latencyMs?: Prisma.IntNullableFilter<"LlmCall"> | number | null error?: Prisma.StringNullableFilter<"LlmCall"> | string | null chat?: Prisma.XOR } export type LlmCallOrderByWithRelationInput = { id?: Prisma.SortOrder createdAt?: Prisma.SortOrder chatId?: Prisma.SortOrder provider?: Prisma.SortOrder model?: Prisma.SortOrder request?: Prisma.SortOrder response?: Prisma.SortOrderInput | Prisma.SortOrder inputTokens?: Prisma.SortOrderInput | Prisma.SortOrder outputTokens?: Prisma.SortOrderInput | Prisma.SortOrder totalTokens?: Prisma.SortOrderInput | Prisma.SortOrder latencyMs?: Prisma.SortOrderInput | Prisma.SortOrder error?: Prisma.SortOrderInput | Prisma.SortOrder chat?: Prisma.ChatOrderByWithRelationInput } export type LlmCallWhereUniqueInput = Prisma.AtLeast<{ id?: string AND?: Prisma.LlmCallWhereInput | Prisma.LlmCallWhereInput[] OR?: Prisma.LlmCallWhereInput[] NOT?: Prisma.LlmCallWhereInput | Prisma.LlmCallWhereInput[] createdAt?: Prisma.DateTimeFilter<"LlmCall"> | Date | string chatId?: Prisma.StringFilter<"LlmCall"> | string provider?: Prisma.EnumProviderFilter<"LlmCall"> | $Enums.Provider model?: Prisma.StringFilter<"LlmCall"> | string request?: Prisma.JsonFilter<"LlmCall"> response?: Prisma.JsonNullableFilter<"LlmCall"> inputTokens?: Prisma.IntNullableFilter<"LlmCall"> | number | null outputTokens?: Prisma.IntNullableFilter<"LlmCall"> | number | null totalTokens?: Prisma.IntNullableFilter<"LlmCall"> | number | null latencyMs?: Prisma.IntNullableFilter<"LlmCall"> | number | null error?: Prisma.StringNullableFilter<"LlmCall"> | string | null chat?: Prisma.XOR }, "id"> export type LlmCallOrderByWithAggregationInput = { id?: Prisma.SortOrder createdAt?: Prisma.SortOrder chatId?: Prisma.SortOrder provider?: Prisma.SortOrder model?: Prisma.SortOrder request?: Prisma.SortOrder response?: Prisma.SortOrderInput | Prisma.SortOrder inputTokens?: Prisma.SortOrderInput | Prisma.SortOrder outputTokens?: Prisma.SortOrderInput | Prisma.SortOrder totalTokens?: Prisma.SortOrderInput | Prisma.SortOrder latencyMs?: Prisma.SortOrderInput | Prisma.SortOrder error?: Prisma.SortOrderInput | Prisma.SortOrder _count?: Prisma.LlmCallCountOrderByAggregateInput _avg?: Prisma.LlmCallAvgOrderByAggregateInput _max?: Prisma.LlmCallMaxOrderByAggregateInput _min?: Prisma.LlmCallMinOrderByAggregateInput _sum?: Prisma.LlmCallSumOrderByAggregateInput } export type LlmCallScalarWhereWithAggregatesInput = { AND?: Prisma.LlmCallScalarWhereWithAggregatesInput | Prisma.LlmCallScalarWhereWithAggregatesInput[] OR?: Prisma.LlmCallScalarWhereWithAggregatesInput[] NOT?: Prisma.LlmCallScalarWhereWithAggregatesInput | Prisma.LlmCallScalarWhereWithAggregatesInput[] id?: Prisma.StringWithAggregatesFilter<"LlmCall"> | string createdAt?: Prisma.DateTimeWithAggregatesFilter<"LlmCall"> | Date | string chatId?: Prisma.StringWithAggregatesFilter<"LlmCall"> | string provider?: Prisma.EnumProviderWithAggregatesFilter<"LlmCall"> | $Enums.Provider model?: Prisma.StringWithAggregatesFilter<"LlmCall"> | string request?: Prisma.JsonWithAggregatesFilter<"LlmCall"> response?: Prisma.JsonNullableWithAggregatesFilter<"LlmCall"> inputTokens?: Prisma.IntNullableWithAggregatesFilter<"LlmCall"> | number | null outputTokens?: Prisma.IntNullableWithAggregatesFilter<"LlmCall"> | number | null totalTokens?: Prisma.IntNullableWithAggregatesFilter<"LlmCall"> | number | null latencyMs?: Prisma.IntNullableWithAggregatesFilter<"LlmCall"> | number | null error?: Prisma.StringNullableWithAggregatesFilter<"LlmCall"> | string | null } export type LlmCallCreateInput = { id?: string createdAt?: Date | string provider: $Enums.Provider model: string request: Prisma.JsonNullValueInput | runtime.InputJsonValue response?: Prisma.NullableJsonNullValueInput | runtime.InputJsonValue inputTokens?: number | null outputTokens?: number | null totalTokens?: number | null latencyMs?: number | null error?: string | null chat: Prisma.ChatCreateNestedOneWithoutCallsInput } export type LlmCallUncheckedCreateInput = { id?: string createdAt?: Date | string chatId: string provider: $Enums.Provider model: string request: Prisma.JsonNullValueInput | runtime.InputJsonValue response?: Prisma.NullableJsonNullValueInput | runtime.InputJsonValue inputTokens?: number | null outputTokens?: number | null totalTokens?: number | null latencyMs?: number | null error?: string | null } export type LlmCallUpdateInput = { id?: Prisma.StringFieldUpdateOperationsInput | string createdAt?: Prisma.DateTimeFieldUpdateOperationsInput | Date | string provider?: Prisma.EnumProviderFieldUpdateOperationsInput | $Enums.Provider model?: Prisma.StringFieldUpdateOperationsInput | string request?: Prisma.JsonNullValueInput | runtime.InputJsonValue response?: Prisma.NullableJsonNullValueInput | runtime.InputJsonValue inputTokens?: Prisma.NullableIntFieldUpdateOperationsInput | number | null outputTokens?: Prisma.NullableIntFieldUpdateOperationsInput | number | null totalTokens?: Prisma.NullableIntFieldUpdateOperationsInput | number | null latencyMs?: Prisma.NullableIntFieldUpdateOperationsInput | number | null error?: Prisma.NullableStringFieldUpdateOperationsInput | string | null chat?: Prisma.ChatUpdateOneRequiredWithoutCallsNestedInput } export type LlmCallUncheckedUpdateInput = { id?: Prisma.StringFieldUpdateOperationsInput | string createdAt?: Prisma.DateTimeFieldUpdateOperationsInput | Date | string chatId?: Prisma.StringFieldUpdateOperationsInput | string provider?: Prisma.EnumProviderFieldUpdateOperationsInput | $Enums.Provider model?: Prisma.StringFieldUpdateOperationsInput | string request?: Prisma.JsonNullValueInput | runtime.InputJsonValue response?: Prisma.NullableJsonNullValueInput | runtime.InputJsonValue inputTokens?: Prisma.NullableIntFieldUpdateOperationsInput | number | null outputTokens?: Prisma.NullableIntFieldUpdateOperationsInput | number | null totalTokens?: Prisma.NullableIntFieldUpdateOperationsInput | number | null latencyMs?: Prisma.NullableIntFieldUpdateOperationsInput | number | null error?: Prisma.NullableStringFieldUpdateOperationsInput | string | null } export type LlmCallCreateManyInput = { id?: string createdAt?: Date | string chatId: string provider: $Enums.Provider model: string request: Prisma.JsonNullValueInput | runtime.InputJsonValue response?: Prisma.NullableJsonNullValueInput | runtime.InputJsonValue inputTokens?: number | null outputTokens?: number | null totalTokens?: number | null latencyMs?: number | null error?: string | null } export type LlmCallUpdateManyMutationInput = { id?: Prisma.StringFieldUpdateOperationsInput | string createdAt?: Prisma.DateTimeFieldUpdateOperationsInput | Date | string provider?: Prisma.EnumProviderFieldUpdateOperationsInput | $Enums.Provider model?: Prisma.StringFieldUpdateOperationsInput | string request?: Prisma.JsonNullValueInput | runtime.InputJsonValue response?: Prisma.NullableJsonNullValueInput | runtime.InputJsonValue inputTokens?: Prisma.NullableIntFieldUpdateOperationsInput | number | null outputTokens?: Prisma.NullableIntFieldUpdateOperationsInput | number | null totalTokens?: Prisma.NullableIntFieldUpdateOperationsInput | number | null latencyMs?: Prisma.NullableIntFieldUpdateOperationsInput | number | null error?: Prisma.NullableStringFieldUpdateOperationsInput | string | null } export type LlmCallUncheckedUpdateManyInput = { id?: Prisma.StringFieldUpdateOperationsInput | string createdAt?: Prisma.DateTimeFieldUpdateOperationsInput | Date | string chatId?: Prisma.StringFieldUpdateOperationsInput | string provider?: Prisma.EnumProviderFieldUpdateOperationsInput | $Enums.Provider model?: Prisma.StringFieldUpdateOperationsInput | string request?: Prisma.JsonNullValueInput | runtime.InputJsonValue response?: Prisma.NullableJsonNullValueInput | runtime.InputJsonValue inputTokens?: Prisma.NullableIntFieldUpdateOperationsInput | number | null outputTokens?: Prisma.NullableIntFieldUpdateOperationsInput | number | null totalTokens?: Prisma.NullableIntFieldUpdateOperationsInput | number | null latencyMs?: Prisma.NullableIntFieldUpdateOperationsInput | number | null error?: Prisma.NullableStringFieldUpdateOperationsInput | string | null } export type LlmCallListRelationFilter = { every?: Prisma.LlmCallWhereInput some?: Prisma.LlmCallWhereInput none?: Prisma.LlmCallWhereInput } export type LlmCallOrderByRelationAggregateInput = { _count?: Prisma.SortOrder } export type LlmCallCountOrderByAggregateInput = { id?: Prisma.SortOrder createdAt?: Prisma.SortOrder chatId?: Prisma.SortOrder provider?: Prisma.SortOrder model?: Prisma.SortOrder request?: Prisma.SortOrder response?: Prisma.SortOrder inputTokens?: Prisma.SortOrder outputTokens?: Prisma.SortOrder totalTokens?: Prisma.SortOrder latencyMs?: Prisma.SortOrder error?: Prisma.SortOrder } export type LlmCallAvgOrderByAggregateInput = { inputTokens?: Prisma.SortOrder outputTokens?: Prisma.SortOrder totalTokens?: Prisma.SortOrder latencyMs?: Prisma.SortOrder } export type LlmCallMaxOrderByAggregateInput = { id?: Prisma.SortOrder createdAt?: Prisma.SortOrder chatId?: Prisma.SortOrder provider?: Prisma.SortOrder model?: Prisma.SortOrder inputTokens?: Prisma.SortOrder outputTokens?: Prisma.SortOrder totalTokens?: Prisma.SortOrder latencyMs?: Prisma.SortOrder error?: Prisma.SortOrder } export type LlmCallMinOrderByAggregateInput = { id?: Prisma.SortOrder createdAt?: Prisma.SortOrder chatId?: Prisma.SortOrder provider?: Prisma.SortOrder model?: Prisma.SortOrder inputTokens?: Prisma.SortOrder outputTokens?: Prisma.SortOrder totalTokens?: Prisma.SortOrder latencyMs?: Prisma.SortOrder error?: Prisma.SortOrder } export type LlmCallSumOrderByAggregateInput = { inputTokens?: Prisma.SortOrder outputTokens?: Prisma.SortOrder totalTokens?: Prisma.SortOrder latencyMs?: Prisma.SortOrder } export type LlmCallCreateNestedManyWithoutChatInput = { create?: Prisma.XOR | Prisma.LlmCallCreateWithoutChatInput[] | Prisma.LlmCallUncheckedCreateWithoutChatInput[] connectOrCreate?: Prisma.LlmCallCreateOrConnectWithoutChatInput | Prisma.LlmCallCreateOrConnectWithoutChatInput[] createMany?: Prisma.LlmCallCreateManyChatInputEnvelope connect?: Prisma.LlmCallWhereUniqueInput | Prisma.LlmCallWhereUniqueInput[] } export type LlmCallUncheckedCreateNestedManyWithoutChatInput = { create?: Prisma.XOR | Prisma.LlmCallCreateWithoutChatInput[] | Prisma.LlmCallUncheckedCreateWithoutChatInput[] connectOrCreate?: Prisma.LlmCallCreateOrConnectWithoutChatInput | Prisma.LlmCallCreateOrConnectWithoutChatInput[] createMany?: Prisma.LlmCallCreateManyChatInputEnvelope connect?: Prisma.LlmCallWhereUniqueInput | Prisma.LlmCallWhereUniqueInput[] } export type LlmCallUpdateManyWithoutChatNestedInput = { create?: Prisma.XOR | Prisma.LlmCallCreateWithoutChatInput[] | Prisma.LlmCallUncheckedCreateWithoutChatInput[] connectOrCreate?: Prisma.LlmCallCreateOrConnectWithoutChatInput | Prisma.LlmCallCreateOrConnectWithoutChatInput[] upsert?: Prisma.LlmCallUpsertWithWhereUniqueWithoutChatInput | Prisma.LlmCallUpsertWithWhereUniqueWithoutChatInput[] createMany?: Prisma.LlmCallCreateManyChatInputEnvelope set?: Prisma.LlmCallWhereUniqueInput | Prisma.LlmCallWhereUniqueInput[] disconnect?: Prisma.LlmCallWhereUniqueInput | Prisma.LlmCallWhereUniqueInput[] delete?: Prisma.LlmCallWhereUniqueInput | Prisma.LlmCallWhereUniqueInput[] connect?: Prisma.LlmCallWhereUniqueInput | Prisma.LlmCallWhereUniqueInput[] update?: Prisma.LlmCallUpdateWithWhereUniqueWithoutChatInput | Prisma.LlmCallUpdateWithWhereUniqueWithoutChatInput[] updateMany?: Prisma.LlmCallUpdateManyWithWhereWithoutChatInput | Prisma.LlmCallUpdateManyWithWhereWithoutChatInput[] deleteMany?: Prisma.LlmCallScalarWhereInput | Prisma.LlmCallScalarWhereInput[] } export type LlmCallUncheckedUpdateManyWithoutChatNestedInput = { create?: Prisma.XOR | Prisma.LlmCallCreateWithoutChatInput[] | Prisma.LlmCallUncheckedCreateWithoutChatInput[] connectOrCreate?: Prisma.LlmCallCreateOrConnectWithoutChatInput | Prisma.LlmCallCreateOrConnectWithoutChatInput[] upsert?: Prisma.LlmCallUpsertWithWhereUniqueWithoutChatInput | Prisma.LlmCallUpsertWithWhereUniqueWithoutChatInput[] createMany?: Prisma.LlmCallCreateManyChatInputEnvelope set?: Prisma.LlmCallWhereUniqueInput | Prisma.LlmCallWhereUniqueInput[] disconnect?: Prisma.LlmCallWhereUniqueInput | Prisma.LlmCallWhereUniqueInput[] delete?: Prisma.LlmCallWhereUniqueInput | Prisma.LlmCallWhereUniqueInput[] connect?: Prisma.LlmCallWhereUniqueInput | Prisma.LlmCallWhereUniqueInput[] update?: Prisma.LlmCallUpdateWithWhereUniqueWithoutChatInput | Prisma.LlmCallUpdateWithWhereUniqueWithoutChatInput[] updateMany?: Prisma.LlmCallUpdateManyWithWhereWithoutChatInput | Prisma.LlmCallUpdateManyWithWhereWithoutChatInput[] deleteMany?: Prisma.LlmCallScalarWhereInput | Prisma.LlmCallScalarWhereInput[] } export type EnumProviderFieldUpdateOperationsInput = { set?: $Enums.Provider } export type NullableIntFieldUpdateOperationsInput = { set?: number | null increment?: number decrement?: number multiply?: number divide?: number } export type LlmCallCreateWithoutChatInput = { id?: string createdAt?: Date | string provider: $Enums.Provider model: string request: Prisma.JsonNullValueInput | runtime.InputJsonValue response?: Prisma.NullableJsonNullValueInput | runtime.InputJsonValue inputTokens?: number | null outputTokens?: number | null totalTokens?: number | null latencyMs?: number | null error?: string | null } export type LlmCallUncheckedCreateWithoutChatInput = { id?: string createdAt?: Date | string provider: $Enums.Provider model: string request: Prisma.JsonNullValueInput | runtime.InputJsonValue response?: Prisma.NullableJsonNullValueInput | runtime.InputJsonValue inputTokens?: number | null outputTokens?: number | null totalTokens?: number | null latencyMs?: number | null error?: string | null } export type LlmCallCreateOrConnectWithoutChatInput = { where: Prisma.LlmCallWhereUniqueInput create: Prisma.XOR } export type LlmCallCreateManyChatInputEnvelope = { data: Prisma.LlmCallCreateManyChatInput | Prisma.LlmCallCreateManyChatInput[] } export type LlmCallUpsertWithWhereUniqueWithoutChatInput = { where: Prisma.LlmCallWhereUniqueInput update: Prisma.XOR create: Prisma.XOR } export type LlmCallUpdateWithWhereUniqueWithoutChatInput = { where: Prisma.LlmCallWhereUniqueInput data: Prisma.XOR } export type LlmCallUpdateManyWithWhereWithoutChatInput = { where: Prisma.LlmCallScalarWhereInput data: Prisma.XOR } export type LlmCallScalarWhereInput = { AND?: Prisma.LlmCallScalarWhereInput | Prisma.LlmCallScalarWhereInput[] OR?: Prisma.LlmCallScalarWhereInput[] NOT?: Prisma.LlmCallScalarWhereInput | Prisma.LlmCallScalarWhereInput[] id?: Prisma.StringFilter<"LlmCall"> | string createdAt?: Prisma.DateTimeFilter<"LlmCall"> | Date | string chatId?: Prisma.StringFilter<"LlmCall"> | string provider?: Prisma.EnumProviderFilter<"LlmCall"> | $Enums.Provider model?: Prisma.StringFilter<"LlmCall"> | string request?: Prisma.JsonFilter<"LlmCall"> response?: Prisma.JsonNullableFilter<"LlmCall"> inputTokens?: Prisma.IntNullableFilter<"LlmCall"> | number | null outputTokens?: Prisma.IntNullableFilter<"LlmCall"> | number | null totalTokens?: Prisma.IntNullableFilter<"LlmCall"> | number | null latencyMs?: Prisma.IntNullableFilter<"LlmCall"> | number | null error?: Prisma.StringNullableFilter<"LlmCall"> | string | null } export type LlmCallCreateManyChatInput = { id?: string createdAt?: Date | string provider: $Enums.Provider model: string request: Prisma.JsonNullValueInput | runtime.InputJsonValue response?: Prisma.NullableJsonNullValueInput | runtime.InputJsonValue inputTokens?: number | null outputTokens?: number | null totalTokens?: number | null latencyMs?: number | null error?: string | null } export type LlmCallUpdateWithoutChatInput = { id?: Prisma.StringFieldUpdateOperationsInput | string createdAt?: Prisma.DateTimeFieldUpdateOperationsInput | Date | string provider?: Prisma.EnumProviderFieldUpdateOperationsInput | $Enums.Provider model?: Prisma.StringFieldUpdateOperationsInput | string request?: Prisma.JsonNullValueInput | runtime.InputJsonValue response?: Prisma.NullableJsonNullValueInput | runtime.InputJsonValue inputTokens?: Prisma.NullableIntFieldUpdateOperationsInput | number | null outputTokens?: Prisma.NullableIntFieldUpdateOperationsInput | number | null totalTokens?: Prisma.NullableIntFieldUpdateOperationsInput | number | null latencyMs?: Prisma.NullableIntFieldUpdateOperationsInput | number | null error?: Prisma.NullableStringFieldUpdateOperationsInput | string | null } export type LlmCallUncheckedUpdateWithoutChatInput = { id?: Prisma.StringFieldUpdateOperationsInput | string createdAt?: Prisma.DateTimeFieldUpdateOperationsInput | Date | string provider?: Prisma.EnumProviderFieldUpdateOperationsInput | $Enums.Provider model?: Prisma.StringFieldUpdateOperationsInput | string request?: Prisma.JsonNullValueInput | runtime.InputJsonValue response?: Prisma.NullableJsonNullValueInput | runtime.InputJsonValue inputTokens?: Prisma.NullableIntFieldUpdateOperationsInput | number | null outputTokens?: Prisma.NullableIntFieldUpdateOperationsInput | number | null totalTokens?: Prisma.NullableIntFieldUpdateOperationsInput | number | null latencyMs?: Prisma.NullableIntFieldUpdateOperationsInput | number | null error?: Prisma.NullableStringFieldUpdateOperationsInput | string | null } export type LlmCallUncheckedUpdateManyWithoutChatInput = { id?: Prisma.StringFieldUpdateOperationsInput | string createdAt?: Prisma.DateTimeFieldUpdateOperationsInput | Date | string provider?: Prisma.EnumProviderFieldUpdateOperationsInput | $Enums.Provider model?: Prisma.StringFieldUpdateOperationsInput | string request?: Prisma.JsonNullValueInput | runtime.InputJsonValue response?: Prisma.NullableJsonNullValueInput | runtime.InputJsonValue inputTokens?: Prisma.NullableIntFieldUpdateOperationsInput | number | null outputTokens?: Prisma.NullableIntFieldUpdateOperationsInput | number | null totalTokens?: Prisma.NullableIntFieldUpdateOperationsInput | number | null latencyMs?: Prisma.NullableIntFieldUpdateOperationsInput | number | null error?: Prisma.NullableStringFieldUpdateOperationsInput | string | null } export type LlmCallSelect = runtime.Types.Extensions.GetSelect<{ id?: boolean createdAt?: boolean chatId?: boolean provider?: boolean model?: boolean request?: boolean response?: boolean inputTokens?: boolean outputTokens?: boolean totalTokens?: boolean latencyMs?: boolean error?: boolean chat?: boolean | Prisma.ChatDefaultArgs }, ExtArgs["result"]["llmCall"]> export type LlmCallSelectCreateManyAndReturn = runtime.Types.Extensions.GetSelect<{ id?: boolean createdAt?: boolean chatId?: boolean provider?: boolean model?: boolean request?: boolean response?: boolean inputTokens?: boolean outputTokens?: boolean totalTokens?: boolean latencyMs?: boolean error?: boolean chat?: boolean | Prisma.ChatDefaultArgs }, ExtArgs["result"]["llmCall"]> export type LlmCallSelectUpdateManyAndReturn = runtime.Types.Extensions.GetSelect<{ id?: boolean createdAt?: boolean chatId?: boolean provider?: boolean model?: boolean request?: boolean response?: boolean inputTokens?: boolean outputTokens?: boolean totalTokens?: boolean latencyMs?: boolean error?: boolean chat?: boolean | Prisma.ChatDefaultArgs }, ExtArgs["result"]["llmCall"]> export type LlmCallSelectScalar = { id?: boolean createdAt?: boolean chatId?: boolean provider?: boolean model?: boolean request?: boolean response?: boolean inputTokens?: boolean outputTokens?: boolean totalTokens?: boolean latencyMs?: boolean error?: boolean } export type LlmCallOmit = runtime.Types.Extensions.GetOmit<"id" | "createdAt" | "chatId" | "provider" | "model" | "request" | "response" | "inputTokens" | "outputTokens" | "totalTokens" | "latencyMs" | "error", ExtArgs["result"]["llmCall"]> export type LlmCallInclude = { chat?: boolean | Prisma.ChatDefaultArgs } export type LlmCallIncludeCreateManyAndReturn = { chat?: boolean | Prisma.ChatDefaultArgs } export type LlmCallIncludeUpdateManyAndReturn = { chat?: boolean | Prisma.ChatDefaultArgs } export type $LlmCallPayload = { name: "LlmCall" objects: { chat: Prisma.$ChatPayload } scalars: runtime.Types.Extensions.GetPayloadResult<{ id: string createdAt: Date chatId: string provider: $Enums.Provider model: string request: runtime.JsonValue response: runtime.JsonValue | null inputTokens: number | null outputTokens: number | null totalTokens: number | null latencyMs: number | null error: string | null }, ExtArgs["result"]["llmCall"]> composites: {} } export type LlmCallGetPayload = runtime.Types.Result.GetResult export type LlmCallCountArgs = Omit & { select?: LlmCallCountAggregateInputType | true } export interface LlmCallDelegate { [K: symbol]: { types: Prisma.TypeMap['model']['LlmCall'], meta: { name: 'LlmCall' } } /** * Find zero or one LlmCall that matches the filter. * @param {LlmCallFindUniqueArgs} args - Arguments to find a LlmCall * @example * // Get one LlmCall * const llmCall = await prisma.llmCall.findUnique({ * where: { * // ... provide filter here * } * }) */ findUnique(args: Prisma.SelectSubset>): Prisma.Prisma__LlmCallClient, T, "findUnique", GlobalOmitOptions> | null, null, ExtArgs, GlobalOmitOptions> /** * Find one LlmCall that matches the filter or throw an error with `error.code='P2025'` * if no matches were found. * @param {LlmCallFindUniqueOrThrowArgs} args - Arguments to find a LlmCall * @example * // Get one LlmCall * const llmCall = await prisma.llmCall.findUniqueOrThrow({ * where: { * // ... provide filter here * } * }) */ findUniqueOrThrow(args: Prisma.SelectSubset>): Prisma.Prisma__LlmCallClient, T, "findUniqueOrThrow", GlobalOmitOptions>, never, ExtArgs, GlobalOmitOptions> /** * Find the first LlmCall that matches the filter. * Note, that providing `undefined` is treated as the value not being there. * Read more here: https://pris.ly/d/null-undefined * @param {LlmCallFindFirstArgs} args - Arguments to find a LlmCall * @example * // Get one LlmCall * const llmCall = await prisma.llmCall.findFirst({ * where: { * // ... provide filter here * } * }) */ findFirst(args?: Prisma.SelectSubset>): Prisma.Prisma__LlmCallClient, T, "findFirst", GlobalOmitOptions> | null, null, ExtArgs, GlobalOmitOptions> /** * Find the first LlmCall that matches the filter or * throw `PrismaKnownClientError` with `P2025` code if no matches were found. * Note, that providing `undefined` is treated as the value not being there. * Read more here: https://pris.ly/d/null-undefined * @param {LlmCallFindFirstOrThrowArgs} args - Arguments to find a LlmCall * @example * // Get one LlmCall * const llmCall = await prisma.llmCall.findFirstOrThrow({ * where: { * // ... provide filter here * } * }) */ findFirstOrThrow(args?: Prisma.SelectSubset>): Prisma.Prisma__LlmCallClient, T, "findFirstOrThrow", GlobalOmitOptions>, never, ExtArgs, GlobalOmitOptions> /** * Find zero or more LlmCalls that matches the filter. * Note, that providing `undefined` is treated as the value not being there. * Read more here: https://pris.ly/d/null-undefined * @param {LlmCallFindManyArgs} args - Arguments to filter and select certain fields only. * @example * // Get all LlmCalls * const llmCalls = await prisma.llmCall.findMany() * * // Get first 10 LlmCalls * const llmCalls = await prisma.llmCall.findMany({ take: 10 }) * * // Only select the `id` * const llmCallWithIdOnly = await prisma.llmCall.findMany({ select: { id: true } }) * */ findMany(args?: Prisma.SelectSubset>): Prisma.PrismaPromise, T, "findMany", GlobalOmitOptions>> /** * Create a LlmCall. * @param {LlmCallCreateArgs} args - Arguments to create a LlmCall. * @example * // Create one LlmCall * const LlmCall = await prisma.llmCall.create({ * data: { * // ... data to create a LlmCall * } * }) * */ create(args: Prisma.SelectSubset>): Prisma.Prisma__LlmCallClient, T, "create", GlobalOmitOptions>, never, ExtArgs, GlobalOmitOptions> /** * Create many LlmCalls. * @param {LlmCallCreateManyArgs} args - Arguments to create many LlmCalls. * @example * // Create many LlmCalls * const llmCall = await prisma.llmCall.createMany({ * data: [ * // ... provide data here * ] * }) * */ createMany(args?: Prisma.SelectSubset>): Prisma.PrismaPromise /** * Create many LlmCalls and returns the data saved in the database. * @param {LlmCallCreateManyAndReturnArgs} args - Arguments to create many LlmCalls. * @example * // Create many LlmCalls * const llmCall = await prisma.llmCall.createManyAndReturn({ * data: [ * // ... provide data here * ] * }) * * // Create many LlmCalls and only return the `id` * const llmCallWithIdOnly = await prisma.llmCall.createManyAndReturn({ * select: { id: true }, * data: [ * // ... provide data here * ] * }) * Note, that providing `undefined` is treated as the value not being there. * Read more here: https://pris.ly/d/null-undefined * */ createManyAndReturn(args?: Prisma.SelectSubset>): Prisma.PrismaPromise, T, "createManyAndReturn", GlobalOmitOptions>> /** * Delete a LlmCall. * @param {LlmCallDeleteArgs} args - Arguments to delete one LlmCall. * @example * // Delete one LlmCall * const LlmCall = await prisma.llmCall.delete({ * where: { * // ... filter to delete one LlmCall * } * }) * */ delete(args: Prisma.SelectSubset>): Prisma.Prisma__LlmCallClient, T, "delete", GlobalOmitOptions>, never, ExtArgs, GlobalOmitOptions> /** * Update one LlmCall. * @param {LlmCallUpdateArgs} args - Arguments to update one LlmCall. * @example * // Update one LlmCall * const llmCall = await prisma.llmCall.update({ * where: { * // ... provide filter here * }, * data: { * // ... provide data here * } * }) * */ update(args: Prisma.SelectSubset>): Prisma.Prisma__LlmCallClient, T, "update", GlobalOmitOptions>, never, ExtArgs, GlobalOmitOptions> /** * Delete zero or more LlmCalls. * @param {LlmCallDeleteManyArgs} args - Arguments to filter LlmCalls to delete. * @example * // Delete a few LlmCalls * const { count } = await prisma.llmCall.deleteMany({ * where: { * // ... provide filter here * } * }) * */ deleteMany(args?: Prisma.SelectSubset>): Prisma.PrismaPromise /** * Update zero or more LlmCalls. * Note, that providing `undefined` is treated as the value not being there. * Read more here: https://pris.ly/d/null-undefined * @param {LlmCallUpdateManyArgs} args - Arguments to update one or more rows. * @example * // Update many LlmCalls * const llmCall = await prisma.llmCall.updateMany({ * where: { * // ... provide filter here * }, * data: { * // ... provide data here * } * }) * */ updateMany(args: Prisma.SelectSubset>): Prisma.PrismaPromise /** * Update zero or more LlmCalls and returns the data updated in the database. * @param {LlmCallUpdateManyAndReturnArgs} args - Arguments to update many LlmCalls. * @example * // Update many LlmCalls * const llmCall = await prisma.llmCall.updateManyAndReturn({ * where: { * // ... provide filter here * }, * data: [ * // ... provide data here * ] * }) * * // Update zero or more LlmCalls and only return the `id` * const llmCallWithIdOnly = await prisma.llmCall.updateManyAndReturn({ * select: { id: true }, * where: { * // ... provide filter here * }, * data: [ * // ... provide data here * ] * }) * Note, that providing `undefined` is treated as the value not being there. * Read more here: https://pris.ly/d/null-undefined * */ updateManyAndReturn(args: Prisma.SelectSubset>): Prisma.PrismaPromise, T, "updateManyAndReturn", GlobalOmitOptions>> /** * Create or update one LlmCall. * @param {LlmCallUpsertArgs} args - Arguments to update or create a LlmCall. * @example * // Update or create a LlmCall * const llmCall = await prisma.llmCall.upsert({ * create: { * // ... data to create a LlmCall * }, * update: { * // ... in case it already exists, update * }, * where: { * // ... the filter for the LlmCall we want to update * } * }) */ upsert(args: Prisma.SelectSubset>): Prisma.Prisma__LlmCallClient, T, "upsert", GlobalOmitOptions>, never, ExtArgs, GlobalOmitOptions> /** * Count the number of LlmCalls. * Note, that providing `undefined` is treated as the value not being there. * Read more here: https://pris.ly/d/null-undefined * @param {LlmCallCountArgs} args - Arguments to filter LlmCalls to count. * @example * // Count the number of LlmCalls * const count = await prisma.llmCall.count({ * where: { * // ... the filter for the LlmCalls we want to count * } * }) **/ count( args?: Prisma.Subset, ): Prisma.PrismaPromise< T extends runtime.Types.Utils.Record<'select', any> ? T['select'] extends true ? number : Prisma.GetScalarType : number > /** * Allows you to perform aggregations operations on a LlmCall. * Note, that providing `undefined` is treated as the value not being there. * Read more here: https://pris.ly/d/null-undefined * @param {LlmCallAggregateArgs} args - Select which aggregations you would like to apply and on what fields. * @example * // Ordered by age ascending * // Where email contains prisma.io * // Limited to the 10 users * const aggregations = await prisma.user.aggregate({ * _avg: { * age: true, * }, * where: { * email: { * contains: "prisma.io", * }, * }, * orderBy: { * age: "asc", * }, * take: 10, * }) **/ aggregate(args: Prisma.Subset): Prisma.PrismaPromise> /** * Group by LlmCall. * Note, that providing `undefined` is treated as the value not being there. * Read more here: https://pris.ly/d/null-undefined * @param {LlmCallGroupByArgs} args - Group by arguments. * @example * // Group by city, order by createdAt, get count * const result = await prisma.user.groupBy({ * by: ['city', 'createdAt'], * orderBy: { * createdAt: true * }, * _count: { * _all: true * }, * }) * **/ groupBy< T extends LlmCallGroupByArgs, HasSelectOrTake extends Prisma.Or< Prisma.Extends<'skip', Prisma.Keys>, Prisma.Extends<'take', Prisma.Keys> >, OrderByArg extends Prisma.True extends HasSelectOrTake ? { orderBy: LlmCallGroupByArgs['orderBy'] } : { orderBy?: LlmCallGroupByArgs['orderBy'] }, OrderFields extends Prisma.ExcludeUnderscoreKeys>>, ByFields extends Prisma.MaybeTupleToUnion, ByValid extends Prisma.Has, HavingFields extends Prisma.GetHavingFields, HavingValid extends Prisma.Has, ByEmpty extends T['by'] extends never[] ? Prisma.True : Prisma.False, InputErrors extends ByEmpty extends Prisma.True ? `Error: "by" must not be empty.` : HavingValid extends Prisma.False ? { [P in HavingFields]: P extends ByFields ? never : P extends string ? `Error: Field "${P}" used in "having" needs to be provided in "by".` : [ Error, 'Field ', P, ` in "having" needs to be provided in "by"`, ] }[HavingFields] : 'take' extends Prisma.Keys ? 'orderBy' extends Prisma.Keys ? ByValid extends Prisma.True ? {} : { [P in OrderFields]: P extends ByFields ? never : `Error: Field "${P}" in "orderBy" needs to be provided in "by"` }[OrderFields] : 'Error: If you provide "take", you also need to provide "orderBy"' : 'skip' extends Prisma.Keys ? 'orderBy' extends Prisma.Keys ? ByValid extends Prisma.True ? {} : { [P in OrderFields]: P extends ByFields ? never : `Error: Field "${P}" in "orderBy" needs to be provided in "by"` }[OrderFields] : 'Error: If you provide "skip", you also need to provide "orderBy"' : ByValid extends Prisma.True ? {} : { [P in OrderFields]: P extends ByFields ? never : `Error: Field "${P}" in "orderBy" needs to be provided in "by"` }[OrderFields] >(args: Prisma.SubsetIntersection & InputErrors): {} extends InputErrors ? GetLlmCallGroupByPayload : Prisma.PrismaPromise /** * Fields of the LlmCall model */ readonly fields: LlmCallFieldRefs; } /** * The delegate class that acts as a "Promise-like" for LlmCall. * Why is this prefixed with `Prisma__`? * Because we want to prevent naming conflicts as mentioned in * https://github.com/prisma/prisma-client-js/issues/707 */ export interface Prisma__LlmCallClient extends Prisma.PrismaPromise { readonly [Symbol.toStringTag]: "PrismaPromise" chat = {}>(args?: Prisma.Subset>): Prisma.Prisma__ChatClient, T, "findUniqueOrThrow", GlobalOmitOptions> | Null, Null, ExtArgs, GlobalOmitOptions> /** * Attaches callbacks for the resolution and/or rejection of the Promise. * @param onfulfilled The callback to execute when the Promise is resolved. * @param onrejected The callback to execute when the Promise is rejected. * @returns A Promise for the completion of which ever callback is executed. */ then(onfulfilled?: ((value: T) => TResult1 | PromiseLike) | undefined | null, onrejected?: ((reason: any) => TResult2 | PromiseLike) | undefined | null): runtime.Types.Utils.JsPromise /** * Attaches a callback for only the rejection of the Promise. * @param onrejected The callback to execute when the Promise is rejected. * @returns A Promise for the completion of the callback. */ catch(onrejected?: ((reason: any) => TResult | PromiseLike) | undefined | null): runtime.Types.Utils.JsPromise /** * Attaches a callback that is invoked when the Promise is settled (fulfilled or rejected). The * resolved value cannot be modified from the callback. * @param onfinally The callback to execute when the Promise is settled (fulfilled or rejected). * @returns A Promise for the completion of the callback. */ finally(onfinally?: (() => void) | undefined | null): runtime.Types.Utils.JsPromise } /** * Fields of the LlmCall model */ export interface LlmCallFieldRefs { readonly id: Prisma.FieldRef<"LlmCall", 'String'> readonly createdAt: Prisma.FieldRef<"LlmCall", 'DateTime'> readonly chatId: Prisma.FieldRef<"LlmCall", 'String'> readonly provider: Prisma.FieldRef<"LlmCall", 'Provider'> readonly model: Prisma.FieldRef<"LlmCall", 'String'> readonly request: Prisma.FieldRef<"LlmCall", 'Json'> readonly response: Prisma.FieldRef<"LlmCall", 'Json'> readonly inputTokens: Prisma.FieldRef<"LlmCall", 'Int'> readonly outputTokens: Prisma.FieldRef<"LlmCall", 'Int'> readonly totalTokens: Prisma.FieldRef<"LlmCall", 'Int'> readonly latencyMs: Prisma.FieldRef<"LlmCall", 'Int'> readonly error: Prisma.FieldRef<"LlmCall", 'String'> } // Custom InputTypes /** * LlmCall findUnique */ export type LlmCallFindUniqueArgs = { /** * Select specific fields to fetch from the LlmCall */ select?: Prisma.LlmCallSelect | null /** * Omit specific fields from the LlmCall */ omit?: Prisma.LlmCallOmit | null /** * Choose, which related nodes to fetch as well */ include?: Prisma.LlmCallInclude | null /** * Filter, which LlmCall to fetch. */ where: Prisma.LlmCallWhereUniqueInput } /** * LlmCall findUniqueOrThrow */ export type LlmCallFindUniqueOrThrowArgs = { /** * Select specific fields to fetch from the LlmCall */ select?: Prisma.LlmCallSelect | null /** * Omit specific fields from the LlmCall */ omit?: Prisma.LlmCallOmit | null /** * Choose, which related nodes to fetch as well */ include?: Prisma.LlmCallInclude | null /** * Filter, which LlmCall to fetch. */ where: Prisma.LlmCallWhereUniqueInput } /** * LlmCall findFirst */ export type LlmCallFindFirstArgs = { /** * Select specific fields to fetch from the LlmCall */ select?: Prisma.LlmCallSelect | null /** * Omit specific fields from the LlmCall */ omit?: Prisma.LlmCallOmit | null /** * Choose, which related nodes to fetch as well */ include?: Prisma.LlmCallInclude | null /** * Filter, which LlmCall to fetch. */ where?: Prisma.LlmCallWhereInput /** * {@link https://www.prisma.io/docs/concepts/components/prisma-client/sorting Sorting Docs} * * Determine the order of LlmCalls to fetch. */ orderBy?: Prisma.LlmCallOrderByWithRelationInput | Prisma.LlmCallOrderByWithRelationInput[] /** * {@link https://www.prisma.io/docs/concepts/components/prisma-client/pagination#cursor-based-pagination Cursor Docs} * * Sets the position for searching for LlmCalls. */ cursor?: Prisma.LlmCallWhereUniqueInput /** * {@link https://www.prisma.io/docs/concepts/components/prisma-client/pagination Pagination Docs} * * Take `±n` LlmCalls from the position of the cursor. */ take?: number /** * {@link https://www.prisma.io/docs/concepts/components/prisma-client/pagination Pagination Docs} * * Skip the first `n` LlmCalls. */ skip?: number /** * {@link https://www.prisma.io/docs/concepts/components/prisma-client/distinct Distinct Docs} * * Filter by unique combinations of LlmCalls. */ distinct?: Prisma.LlmCallScalarFieldEnum | Prisma.LlmCallScalarFieldEnum[] } /** * LlmCall findFirstOrThrow */ export type LlmCallFindFirstOrThrowArgs = { /** * Select specific fields to fetch from the LlmCall */ select?: Prisma.LlmCallSelect | null /** * Omit specific fields from the LlmCall */ omit?: Prisma.LlmCallOmit | null /** * Choose, which related nodes to fetch as well */ include?: Prisma.LlmCallInclude | null /** * Filter, which LlmCall to fetch. */ where?: Prisma.LlmCallWhereInput /** * {@link https://www.prisma.io/docs/concepts/components/prisma-client/sorting Sorting Docs} * * Determine the order of LlmCalls to fetch. */ orderBy?: Prisma.LlmCallOrderByWithRelationInput | Prisma.LlmCallOrderByWithRelationInput[] /** * {@link https://www.prisma.io/docs/concepts/components/prisma-client/pagination#cursor-based-pagination Cursor Docs} * * Sets the position for searching for LlmCalls. */ cursor?: Prisma.LlmCallWhereUniqueInput /** * {@link https://www.prisma.io/docs/concepts/components/prisma-client/pagination Pagination Docs} * * Take `±n` LlmCalls from the position of the cursor. */ take?: number /** * {@link https://www.prisma.io/docs/concepts/components/prisma-client/pagination Pagination Docs} * * Skip the first `n` LlmCalls. */ skip?: number /** * {@link https://www.prisma.io/docs/concepts/components/prisma-client/distinct Distinct Docs} * * Filter by unique combinations of LlmCalls. */ distinct?: Prisma.LlmCallScalarFieldEnum | Prisma.LlmCallScalarFieldEnum[] } /** * LlmCall findMany */ export type LlmCallFindManyArgs = { /** * Select specific fields to fetch from the LlmCall */ select?: Prisma.LlmCallSelect | null /** * Omit specific fields from the LlmCall */ omit?: Prisma.LlmCallOmit | null /** * Choose, which related nodes to fetch as well */ include?: Prisma.LlmCallInclude | null /** * Filter, which LlmCalls to fetch. */ where?: Prisma.LlmCallWhereInput /** * {@link https://www.prisma.io/docs/concepts/components/prisma-client/sorting Sorting Docs} * * Determine the order of LlmCalls to fetch. */ orderBy?: Prisma.LlmCallOrderByWithRelationInput | Prisma.LlmCallOrderByWithRelationInput[] /** * {@link https://www.prisma.io/docs/concepts/components/prisma-client/pagination#cursor-based-pagination Cursor Docs} * * Sets the position for listing LlmCalls. */ cursor?: Prisma.LlmCallWhereUniqueInput /** * {@link https://www.prisma.io/docs/concepts/components/prisma-client/pagination Pagination Docs} * * Take `±n` LlmCalls from the position of the cursor. */ take?: number /** * {@link https://www.prisma.io/docs/concepts/components/prisma-client/pagination Pagination Docs} * * Skip the first `n` LlmCalls. */ skip?: number distinct?: Prisma.LlmCallScalarFieldEnum | Prisma.LlmCallScalarFieldEnum[] } /** * LlmCall create */ export type LlmCallCreateArgs = { /** * Select specific fields to fetch from the LlmCall */ select?: Prisma.LlmCallSelect | null /** * Omit specific fields from the LlmCall */ omit?: Prisma.LlmCallOmit | null /** * Choose, which related nodes to fetch as well */ include?: Prisma.LlmCallInclude | null /** * The data needed to create a LlmCall. */ data: Prisma.XOR } /** * LlmCall createMany */ export type LlmCallCreateManyArgs = { /** * The data used to create many LlmCalls. */ data: Prisma.LlmCallCreateManyInput | Prisma.LlmCallCreateManyInput[] } /** * LlmCall createManyAndReturn */ export type LlmCallCreateManyAndReturnArgs = { /** * Select specific fields to fetch from the LlmCall */ select?: Prisma.LlmCallSelectCreateManyAndReturn | null /** * Omit specific fields from the LlmCall */ omit?: Prisma.LlmCallOmit | null /** * The data used to create many LlmCalls. */ data: Prisma.LlmCallCreateManyInput | Prisma.LlmCallCreateManyInput[] /** * Choose, which related nodes to fetch as well */ include?: Prisma.LlmCallIncludeCreateManyAndReturn | null } /** * LlmCall update */ export type LlmCallUpdateArgs = { /** * Select specific fields to fetch from the LlmCall */ select?: Prisma.LlmCallSelect | null /** * Omit specific fields from the LlmCall */ omit?: Prisma.LlmCallOmit | null /** * Choose, which related nodes to fetch as well */ include?: Prisma.LlmCallInclude | null /** * The data needed to update a LlmCall. */ data: Prisma.XOR /** * Choose, which LlmCall to update. */ where: Prisma.LlmCallWhereUniqueInput } /** * LlmCall updateMany */ export type LlmCallUpdateManyArgs = { /** * The data used to update LlmCalls. */ data: Prisma.XOR /** * Filter which LlmCalls to update */ where?: Prisma.LlmCallWhereInput /** * Limit how many LlmCalls to update. */ limit?: number } /** * LlmCall updateManyAndReturn */ export type LlmCallUpdateManyAndReturnArgs = { /** * Select specific fields to fetch from the LlmCall */ select?: Prisma.LlmCallSelectUpdateManyAndReturn | null /** * Omit specific fields from the LlmCall */ omit?: Prisma.LlmCallOmit | null /** * The data used to update LlmCalls. */ data: Prisma.XOR /** * Filter which LlmCalls to update */ where?: Prisma.LlmCallWhereInput /** * Limit how many LlmCalls to update. */ limit?: number /** * Choose, which related nodes to fetch as well */ include?: Prisma.LlmCallIncludeUpdateManyAndReturn | null } /** * LlmCall upsert */ export type LlmCallUpsertArgs = { /** * Select specific fields to fetch from the LlmCall */ select?: Prisma.LlmCallSelect | null /** * Omit specific fields from the LlmCall */ omit?: Prisma.LlmCallOmit | null /** * Choose, which related nodes to fetch as well */ include?: Prisma.LlmCallInclude | null /** * The filter to search for the LlmCall to update in case it exists. */ where: Prisma.LlmCallWhereUniqueInput /** * In case the LlmCall found by the `where` argument doesn't exist, create a new LlmCall with this data. */ create: Prisma.XOR /** * In case the LlmCall was found with the provided `where` argument, update it with this data. */ update: Prisma.XOR } /** * LlmCall delete */ export type LlmCallDeleteArgs = { /** * Select specific fields to fetch from the LlmCall */ select?: Prisma.LlmCallSelect | null /** * Omit specific fields from the LlmCall */ omit?: Prisma.LlmCallOmit | null /** * Choose, which related nodes to fetch as well */ include?: Prisma.LlmCallInclude | null /** * Filter which LlmCall to delete. */ where: Prisma.LlmCallWhereUniqueInput } /** * LlmCall deleteMany */ export type LlmCallDeleteManyArgs = { /** * Filter which LlmCalls to delete */ where?: Prisma.LlmCallWhereInput /** * Limit how many LlmCalls to delete. */ limit?: number } /** * LlmCall without action */ export type LlmCallDefaultArgs = { /** * Select specific fields to fetch from the LlmCall */ select?: Prisma.LlmCallSelect | null /** * Omit specific fields from the LlmCall */ omit?: Prisma.LlmCallOmit | null /** * Choose, which related nodes to fetch as well */ include?: Prisma.LlmCallInclude | null }