Skip to content

OpenAIResponses

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:49

  • ToolCallLLM<OpenAIResponsesChatOptions>

new OpenAIResponses(init?): OpenAIResponses

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:76

Omit<Partial<OpenAIResponses>, "session"> & object

OpenAIResponses

ToolCallLLM<OpenAIResponsesChatOptions>.constructor

model: string

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:50


temperature: number

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:51


topP: number

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:52


optional maxOutputTokens: number

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:53


optional additionalChatOptions: OpenAIResponsesChatOptions

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:54


optional reasoningEffort: "low" | "medium" | "high"

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:55


optional apiKey: string

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:56


optional baseURL: string

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:57


maxRetries: number

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:58


optional timeout: number

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:59


optional additionalSessionOptions: Omit<Partial<ClientOptions>, "apiKey" | "maxRetries" | "timeout">

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:60


lazySession: () => Promise<LLMInstance>

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:64

Promise<LLMInstance>


trackPreviousResponses: boolean

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:65


store: boolean

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:66


user: string

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:67


callMetadata: StoredValue

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:68


builtInTools: null | Tool[]

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:69


strict: boolean

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:70


include: null | ResponseIncludable[]

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:71


instructions: string

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:72


previousResponseId: null | string

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:73


truncation: null | "auto" | "disabled"

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:74

get session(): Promise<LLMInstance>

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:127

Promise<LLMInstance>


get supportToolCall(): boolean

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:134

boolean

ToolCallLLM.supportToolCall


get metadata(): LLMMetadata & object

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:138

LLMMetadata & object

ToolCallLLM.metadata

chat(params): Promise<AsyncIterable<ChatResponseChunk<OpenAIResponsesMessageOptions>, any, any>>

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:544

LLMChatParamsStreaming<OpenAIResponsesChatOptions, OpenAIResponsesMessageOptions>

Promise<AsyncIterable<ChatResponseChunk<OpenAIResponsesMessageOptions>, any, any>>

ToolCallLLM.chat

chat(params): Promise<ChatResponse<OpenAIResponsesMessageOptions>>

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:550

LLMChatParamsNonStreaming<OpenAIResponsesChatOptions, OpenAIResponsesMessageOptions>

Promise<ChatResponse<OpenAIResponsesMessageOptions>>

ToolCallLLM.chat


protected streamChat(baseRequestParams): AsyncIterable<ChatResponseChunk<OpenAIResponsesMessageOptions>>

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:636

ResponseCreateParams

AsyncIterable<ChatResponseChunk<OpenAIResponsesMessageOptions>>


toOpenAIResponsesRole(messageType): OpenAIResponsesRole

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:655

MessageType

OpenAIResponsesRole


toOpenAIResponseMessage(message): ResponseInputItem | ResponseInputItem[]

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:778

ChatMessage<OpenAIResponsesMessageOptions>

ResponseInputItem | ResponseInputItem[]


toOpenAIResponseMessages(messages): ResponseInput

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:796

ChatMessage<OpenAIResponsesMessageOptions>[]

ResponseInput


toResponsesTool(tool): Tool

Defined in: .build/typescript/packages/providers/openai/src/responses.ts:811

BaseTool

Tool