Skip to content

MistralAI

Defined in: .build/typescript/packages/providers/mistral/src/llm.ts:79

MistralAI LLM implementation

  • ToolCallLLM<ToolCallLLMMessageOptions>

new MistralAI(init?): MistralAI

Defined in: .build/typescript/packages/providers/mistral/src/llm.ts:91

Partial<MistralAI>

MistralAI

ToolCallLLM<ToolCallLLMMessageOptions>.constructor

model: "mistral-small-latest" | "mistral-large-latest" | "codestral-latest" | "pixtral-large-latest" | "ministral-8b-latest" | "ministral-3b-latest" | "mistral-tiny" | "mistral-small" | "mistral-medium" | "mistral-saba-latest" | "mistral-embed" | "mistral-moderation-latest"

Defined in: .build/typescript/packages/providers/mistral/src/llm.ts:81


temperature: number

Defined in: .build/typescript/packages/providers/mistral/src/llm.ts:82


topP: number

Defined in: .build/typescript/packages/providers/mistral/src/llm.ts:83


optional maxTokens: number

Defined in: .build/typescript/packages/providers/mistral/src/llm.ts:84


optional apiKey: string

Defined in: .build/typescript/packages/providers/mistral/src/llm.ts:85


safeMode: boolean

Defined in: .build/typescript/packages/providers/mistral/src/llm.ts:86


optional randomSeed: number

Defined in: .build/typescript/packages/providers/mistral/src/llm.ts:87

get metadata(): object

Defined in: .build/typescript/packages/providers/mistral/src/llm.ts:102

object

model: "mistral-small-latest" | "mistral-large-latest" | "codestral-latest" | "pixtral-large-latest" | "ministral-8b-latest" | "ministral-3b-latest" | "mistral-tiny" | "mistral-small" | "mistral-medium" | "mistral-saba-latest" | "mistral-embed" | "mistral-moderation-latest"

temperature: number

topP: number

maxTokens: undefined | number

contextWindow: number

tokenizer: undefined = undefined

structuredOutput: boolean = false

ToolCallLLM.metadata


get supportToolCall(): boolean

Defined in: .build/typescript/packages/providers/mistral/src/llm.ts:114

boolean

ToolCallLLM.supportToolCall

formatMessages(messages): ({ role: "assistant"; content: string; toolCalls: object[]; toolCallId?: undefined; } | { toolCalls?: undefined; role: "tool"; content: string; toolCallId: string; } | { toolCalls?: undefined; toolCallId?: undefined; role: MessageType; content: string; })[]

Defined in: .build/typescript/packages/providers/mistral/src/llm.ts:118

ChatMessage<ToolCallLLMMessageOptions>[]

({ role: "assistant"; content: string; toolCalls: object[]; toolCallId?: undefined; } | { toolCalls?: undefined; role: "tool"; content: string; toolCallId: string; } | { toolCalls?: undefined; toolCallId?: undefined; role: MessageType; content: string; })[]


static toTool(tool): Tool

Defined in: .build/typescript/packages/providers/mistral/src/llm.ts:171

BaseTool

Tool


chat(params): Promise<AsyncIterable<ChatResponseChunk, any, any>>

Defined in: .build/typescript/packages/providers/mistral/src/llm.ts:186

LLMChatParamsStreaming

Promise<AsyncIterable<ChatResponseChunk, any, any>>

ToolCallLLM.chat

chat(params): Promise<ChatResponse<object>>

Defined in: .build/typescript/packages/providers/mistral/src/llm.ts:189

LLMChatParamsNonStreaming<ToolCallLLMMessageOptions>

Promise<ChatResponse<object>>

ToolCallLLM.chat


protected streamChat(messages, tools?): AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>>

Defined in: .build/typescript/packages/providers/mistral/src/llm.ts:240

ChatMessage[]

BaseTool<any>[]

AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>>