Skip to content

Ollama

Defined in: .build/typescript/packages/providers/ollama/src/llm.ts:61

  • ToolCallLLM

new Ollama(params): Ollama

Defined in: .build/typescript/packages/providers/ollama/src/llm.ts:75

OllamaParams

Ollama

ToolCallLLM.constructor

supportToolCall: boolean = true

Defined in: .build/typescript/packages/providers/ollama/src/llm.ts:62

ToolCallLLM.supportToolCall


readonly ollama: Ollama

Defined in: .build/typescript/packages/providers/ollama/src/llm.ts:63


model: string

Defined in: .build/typescript/packages/providers/ollama/src/llm.ts:66


options: Partial<Omit<Options, "num_ctx" | "top_p" | "temperature">> & Pick<Options, "num_ctx" | "top_p" | "temperature">

Defined in: .build/typescript/packages/providers/ollama/src/llm.ts:68

get metadata(): LLMMetadata

Defined in: .build/typescript/packages/providers/ollama/src/llm.ts:87

LLMMetadata

ToolCallLLM.metadata

chat(params): Promise<AsyncIterable<ChatResponseChunk, any, any>>

Defined in: .build/typescript/packages/providers/ollama/src/llm.ts:100

LLMChatParamsStreaming<ToolCallLLMMessageOptions>

Promise<AsyncIterable<ChatResponseChunk, any, any>>

ToolCallLLM.chat

chat(params): Promise<ChatResponse<ToolCallLLMMessageOptions>>

Defined in: .build/typescript/packages/providers/ollama/src/llm.ts:103

LLMChatParamsNonStreaming<ToolCallLLMMessageOptions>

Promise<ChatResponse<ToolCallLLMMessageOptions>>

ToolCallLLM.chat


complete(params): Promise<AsyncIterable<CompletionResponse, any, any>>

Defined in: .build/typescript/packages/providers/ollama/src/llm.ts:183

LLMCompletionParamsStreaming

Promise<AsyncIterable<CompletionResponse, any, any>>

ToolCallLLM.complete

complete(params): Promise<CompletionResponse>

Defined in: .build/typescript/packages/providers/ollama/src/llm.ts:186

LLMCompletionParamsNonStreaming

Promise<CompletionResponse>

ToolCallLLM.complete


static toTool(tool): Tool

Defined in: .build/typescript/packages/providers/ollama/src/llm.ts:219

BaseTool

Tool