Skip to content

HuggingFaceLLM

Defined in: .build/typescript/packages/providers/huggingface/src/llm.ts:31

  • BaseLLM

new HuggingFaceLLM(init?): HuggingFaceLLM

Defined in: .build/typescript/packages/providers/huggingface/src/llm.ts:42

HFLLMConfig

HuggingFaceLLM

BaseLLM.constructor

modelName: string

Defined in: .build/typescript/packages/providers/huggingface/src/llm.ts:32


tokenizerName: string

Defined in: .build/typescript/packages/providers/huggingface/src/llm.ts:33


temperature: number

Defined in: .build/typescript/packages/providers/huggingface/src/llm.ts:34


topP: number

Defined in: .build/typescript/packages/providers/huggingface/src/llm.ts:35


optional maxTokens: number

Defined in: .build/typescript/packages/providers/huggingface/src/llm.ts:36


contextWindow: number

Defined in: .build/typescript/packages/providers/huggingface/src/llm.ts:37

get metadata(): LLMMetadata

Defined in: .build/typescript/packages/providers/huggingface/src/llm.ts:52

LLMMetadata

BaseLLM.metadata

getTokenizer(): Promise<PreTrainedTokenizer>

Defined in: .build/typescript/packages/providers/huggingface/src/llm.ts:64

Promise<PreTrainedTokenizer>


getModel(): Promise<PreTrainedModel>

Defined in: .build/typescript/packages/providers/huggingface/src/llm.ts:80

Promise<PreTrainedModel>


chat(params): Promise<AsyncIterable<ChatResponseChunk, any, any>>

Defined in: .build/typescript/packages/providers/huggingface/src/llm.ts:96

LLMChatParamsStreaming

Promise<AsyncIterable<ChatResponseChunk, any, any>>

BaseLLM.chat

chat(params): Promise<ChatResponse<object>>

Defined in: .build/typescript/packages/providers/huggingface/src/llm.ts:99

LLMChatParamsNonStreaming

Promise<ChatResponse<object>>

BaseLLM.chat


protected nonStreamChat(params): Promise<ChatResponse<object>>

Defined in: .build/typescript/packages/providers/huggingface/src/llm.ts:108

LLMChatParamsNonStreaming

Promise<ChatResponse<object>>


protected streamChat(params): Promise<never>

Defined in: .build/typescript/packages/providers/huggingface/src/llm.ts:143

LLMChatParamsStreaming

Promise<never>