Class: HuggingFaceLLM
Extends
BaseLLM
Constructors
new HuggingFaceLLM()
new HuggingFaceLLM(
init
?):HuggingFaceLLM
Parameters
• init?: HFLLMConfig
Returns
Overrides
BaseLLM.constructor
Source
packages/core/src/llm/huggingface.ts:171
Properties
contextWindow
contextWindow:
number
Source
packages/core/src/llm/huggingface.ts:166
maxTokens?
optional
maxTokens:number
Source
packages/core/src/llm/huggingface.ts:165
model
private
model:null
|PreTrainedModel
=null
Source
packages/core/src/llm/huggingface.ts:169
modelName
modelName:
string
Source
packages/core/src/llm/huggingface.ts:161
temperature
temperature:
number
Source
packages/core/src/llm/huggingface.ts:163
tokenizer
private
tokenizer:null
|PreTrainedTokenizer
=null
Source
packages/core/src/llm/huggingface.ts:168
tokenizerName
tokenizerName:
string
Source
packages/core/src/llm/huggingface.ts:162
topP
topP:
number
Source
packages/core/src/llm/huggingface.ts:164
Accessors
metadata
get
metadata():LLMMetadata
Returns
Source
packages/core/src/llm/huggingface.ts:181
Methods
chat()
chat(params)
chat(
params
):Promise
<AsyncIterable
<ChatResponseChunk
>>
Parameters
• params: LLMChatParamsStreaming
<object
, object
>
Returns
Promise
<AsyncIterable
<ChatResponseChunk
>>
Overrides
BaseLLM.chat
Source
packages/core/src/llm/huggingface.ts:208
chat(params)
chat(
params
):Promise
<ChatResponse
<object
>>