Skip to content

ReplicateLLM

Defined in: .build/typescript/packages/providers/replicate/src/llm.ts:107

Replicate LLM implementation used

  • BaseLLM

new ReplicateLLM(init?): ReplicateLLM

Defined in: .build/typescript/packages/providers/replicate/src/llm.ts:115

Partial<ReplicateLLM> & object

ReplicateLLM

BaseLLM.constructor

model: "Llama-2-70b-chat-old" | "Llama-2-70b-chat-4bit" | "Llama-2-13b-chat-old" | "Llama-2-13b-chat-4bit" | "Llama-2-7b-chat-old" | "Llama-2-7b-chat-4bit" | "llama-3-70b-instruct" | "llama-3-8b-instruct"

Defined in: .build/typescript/packages/providers/replicate/src/llm.ts:108


chatStrategy: ReplicateChatStrategy

Defined in: .build/typescript/packages/providers/replicate/src/llm.ts:109


temperature: number

Defined in: .build/typescript/packages/providers/replicate/src/llm.ts:110


topP: number

Defined in: .build/typescript/packages/providers/replicate/src/llm.ts:111


optional maxTokens: number

Defined in: .build/typescript/packages/providers/replicate/src/llm.ts:112


replicateSession: ReplicateSession

Defined in: .build/typescript/packages/providers/replicate/src/llm.ts:113

get metadata(): object

Defined in: .build/typescript/packages/providers/replicate/src/llm.ts:140

object

model: "Llama-2-70b-chat-old" | "Llama-2-70b-chat-4bit" | "Llama-2-13b-chat-old" | "Llama-2-13b-chat-4bit" | "Llama-2-7b-chat-old" | "Llama-2-7b-chat-4bit" | "llama-3-70b-instruct" | "llama-3-8b-instruct"

temperature: number

topP: number

maxTokens: undefined | number

contextWindow: number

tokenizer: undefined = undefined

structuredOutput: boolean = false

BaseLLM.metadata

mapMessagesToPrompt(messages): object

Defined in: .build/typescript/packages/providers/replicate/src/llm.ts:152

ChatMessage[]

object

prompt: string

systemPrompt: undefined | MessageContent


mapMessagesToPromptLlama3(messages): object

Defined in: .build/typescript/packages/providers/replicate/src/llm.ts:178

ChatMessage[]

object

prompt: string

systemPrompt: undefined = undefined


mapMessagesToPromptA16Z(messages): object

Defined in: .build/typescript/packages/providers/replicate/src/llm.ts:204

ChatMessage[]

object

prompt: string

systemPrompt: undefined = undefined


mapMessageTypeA16Z(messageType): string

Defined in: .build/typescript/packages/providers/replicate/src/llm.ts:218

MessageType

string


mapMessagesToPromptMeta(messages, opts?): object

Defined in: .build/typescript/packages/providers/replicate/src/llm.ts:231

ChatMessage[]

boolean

boolean

boolean

object

prompt: string

systemPrompt: undefined | MessageContent


chat(params): Promise<AsyncIterable<ChatResponseChunk, any, any>>

Defined in: .build/typescript/packages/providers/replicate/src/llm.ts:307

LLMChatParamsStreaming

Promise<AsyncIterable<ChatResponseChunk, any, any>>

BaseLLM.chat

chat(params): Promise<ChatResponse<object>>

Defined in: .build/typescript/packages/providers/replicate/src/llm.ts:310

LLMChatParamsNonStreaming

Promise<ChatResponse<object>>

BaseLLM.chat