Skip to content

OpenAI

Defined in: .build/typescript/packages/providers/openai/src/llm.ts:62

  • ToolCallLLM<OpenAIAdditionalChatOptions>

new OpenAI(init?): OpenAI

Defined in: .build/typescript/packages/providers/openai/src/llm.ts:94

Omit<Partial<OpenAI>, "session"> & object

OpenAI

ToolCallLLM<OpenAIAdditionalChatOptions>.constructor

model: ChatModel | string & object

Defined in: .build/typescript/packages/providers/openai/src/llm.ts:63


temperature: number

Defined in: .build/typescript/packages/providers/openai/src/llm.ts:67


optional reasoningEffort: "low" | "medium" | "high" | "minimal"

Defined in: .build/typescript/packages/providers/openai/src/llm.ts:68


topP: number

Defined in: .build/typescript/packages/providers/openai/src/llm.ts:69


optional maxTokens: number

Defined in: .build/typescript/packages/providers/openai/src/llm.ts:70


optional additionalChatOptions: OpenAIAdditionalChatOptions

Defined in: .build/typescript/packages/providers/openai/src/llm.ts:71


optional apiKey: string = undefined

Defined in: .build/typescript/packages/providers/openai/src/llm.ts:74


optional baseURL: string = undefined

Defined in: .build/typescript/packages/providers/openai/src/llm.ts:75


maxRetries: number

Defined in: .build/typescript/packages/providers/openai/src/llm.ts:76


optional timeout: number

Defined in: .build/typescript/packages/providers/openai/src/llm.ts:77


optional additionalSessionOptions: Omit<Partial<ClientOptions>, "apiKey" | "maxRetries" | "timeout">

Defined in: .build/typescript/packages/providers/openai/src/llm.ts:78


lazySession: () => Promise<LLMInstance>

Defined in: .build/typescript/packages/providers/openai/src/llm.ts:85

Promise<LLMInstance>

get session(): Promise<LLMInstance>

Defined in: .build/typescript/packages/providers/openai/src/llm.ts:87

Promise<LLMInstance>


get supportToolCall(): boolean

Defined in: .build/typescript/packages/providers/openai/src/llm.ts:132

boolean

ToolCallLLM.supportToolCall


get live(): OpenAILive

Defined in: .build/typescript/packages/providers/openai/src/llm.ts:136

OpenAILive


get metadata(): LLMMetadata & object

Defined in: .build/typescript/packages/providers/openai/src/llm.ts:147

LLMMetadata & object

ToolCallLLM.metadata

static toOpenAIRole(messageType): ChatCompletionRole

Defined in: .build/typescript/packages/providers/openai/src/llm.ts:163

MessageType

ChatCompletionRole


static toOpenAIMessage(messages): ChatCompletionMessageParam[]

Defined in: .build/typescript/packages/providers/openai/src/llm.ts:176

ChatMessage<ToolCallLLMMessageOptions>[]

ChatCompletionMessageParam[]


chat(params): Promise<AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>, any, any>>

Defined in: .build/typescript/packages/providers/openai/src/llm.ts:267

LLMChatParamsStreaming<OpenAIAdditionalChatOptions, ToolCallLLMMessageOptions>

Promise<AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>, any, any>>

ToolCallLLM.chat

chat(params): Promise<ChatResponse<ToolCallLLMMessageOptions>>

Defined in: .build/typescript/packages/providers/openai/src/llm.ts:273

LLMChatParamsNonStreaming<OpenAIAdditionalChatOptions, ToolCallLLMMessageOptions>

Promise<ChatResponse<ToolCallLLMMessageOptions>>

ToolCallLLM.chat


protected streamChat(baseRequestParams): AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>>

Defined in: .build/typescript/packages/providers/openai/src/llm.ts:370

ChatCompletionCreateParams

AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>>


static toTool(tool): ChatCompletionTool

Defined in: .build/typescript/packages/providers/openai/src/llm.ts:449

BaseTool

ChatCompletionTool