OpenAI
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:62
Extends
Section titled “Extends”ToolCallLLM
<OpenAIAdditionalChatOptions
>
Constructors
Section titled “Constructors”Constructor
Section titled “Constructor”new OpenAI(
init?
):OpenAI
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:94
Parameters
Section titled “Parameters”Omit
<Partial
<OpenAI
>, "session"
> & object
Returns
Section titled “Returns”OpenAI
Overrides
Section titled “Overrides”ToolCallLLM<OpenAIAdditionalChatOptions>.constructor
Properties
Section titled “Properties”model:
ChatModel
|string
&object
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:63
temperature
Section titled “temperature”temperature:
number
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:67
reasoningEffort?
Section titled “reasoningEffort?”
optional
reasoningEffort:"low"
|"medium"
|"high"
|"minimal"
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:68
topP:
number
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:69
maxTokens?
Section titled “maxTokens?”
optional
maxTokens:number
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:70
additionalChatOptions?
Section titled “additionalChatOptions?”
optional
additionalChatOptions:OpenAIAdditionalChatOptions
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:71
apiKey?
Section titled “apiKey?”
optional
apiKey:string
=undefined
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:74
baseURL?
Section titled “baseURL?”
optional
baseURL:string
=undefined
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:75
maxRetries
Section titled “maxRetries”maxRetries:
number
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:76
timeout?
Section titled “timeout?”
optional
timeout:number
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:77
additionalSessionOptions?
Section titled “additionalSessionOptions?”
optional
additionalSessionOptions:Omit
<Partial
<ClientOptions
>,"apiKey"
|"maxRetries"
|"timeout"
>
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:78
lazySession()
Section titled “lazySession()”lazySession: () =>
Promise
<LLMInstance
>
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:85
Returns
Section titled “Returns”Promise
<LLMInstance
>
Accessors
Section titled “Accessors”session
Section titled “session”Get Signature
Section titled “Get Signature”get session():
Promise
<LLMInstance
>
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:87
Returns
Section titled “Returns”Promise
<LLMInstance
>
supportToolCall
Section titled “supportToolCall”Get Signature
Section titled “Get Signature”get supportToolCall():
boolean
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:132
Returns
Section titled “Returns”boolean
Overrides
Section titled “Overrides”ToolCallLLM.supportToolCall
Get Signature
Section titled “Get Signature”get live():
OpenAILive
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:136
Returns
Section titled “Returns”OpenAILive
metadata
Section titled “metadata”Get Signature
Section titled “Get Signature”get metadata():
LLMMetadata
&object
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:147
Returns
Section titled “Returns”LLMMetadata
& object
Overrides
Section titled “Overrides”ToolCallLLM.metadata
Methods
Section titled “Methods”toOpenAIRole()
Section titled “toOpenAIRole()”
static
toOpenAIRole(messageType
):ChatCompletionRole
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:163
Parameters
Section titled “Parameters”messageType
Section titled “messageType”MessageType
Returns
Section titled “Returns”ChatCompletionRole
toOpenAIMessage()
Section titled “toOpenAIMessage()”
static
toOpenAIMessage(messages
):ChatCompletionMessageParam
[]
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:176
Parameters
Section titled “Parameters”messages
Section titled “messages”ChatMessage
<ToolCallLLMMessageOptions
>[]
Returns
Section titled “Returns”ChatCompletionMessageParam
[]
chat()
Section titled “chat()”Call Signature
Section titled “Call Signature”chat(
params
):Promise
<AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>,any
,any
>>
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:267
Parameters
Section titled “Parameters”params
Section titled “params”LLMChatParamsStreaming
<OpenAIAdditionalChatOptions
, ToolCallLLMMessageOptions
>
Returns
Section titled “Returns”Promise
<AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>, any
, any
>>
Overrides
Section titled “Overrides”ToolCallLLM.chat
Call Signature
Section titled “Call Signature”chat(
params
):Promise
<ChatResponse
<ToolCallLLMMessageOptions
>>
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:273
Parameters
Section titled “Parameters”params
Section titled “params”LLMChatParamsNonStreaming
<OpenAIAdditionalChatOptions
, ToolCallLLMMessageOptions
>
Returns
Section titled “Returns”Promise
<ChatResponse
<ToolCallLLMMessageOptions
>>
Overrides
Section titled “Overrides”ToolCallLLM.chat
streamChat()
Section titled “streamChat()”
protected
streamChat(baseRequestParams
):AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>>
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:370
Parameters
Section titled “Parameters”baseRequestParams
Section titled “baseRequestParams”ChatCompletionCreateParams
Returns
Section titled “Returns”AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>>
toTool()
Section titled “toTool()”
static
toTool(tool
):ChatCompletionTool
Defined in: .build/typescript/packages/providers/openai/src/llm.ts:449
Parameters
Section titled “Parameters”BaseTool
Returns
Section titled “Returns”ChatCompletionTool