Class: abstract
ToolCallLLM<AdditionalChatOptions, AdditionalMessageOptions>
Unified language model interface
Extends
BaseLLM
<AdditionalChatOptions
,AdditionalMessageOptions
>
Extended by
Type Parameters
• AdditionalChatOptions extends object
= object
• AdditionalMessageOptions extends ToolCallLLMMessageOptions
= ToolCallLLMMessageOptions
Constructors
new ToolCallLLM()
new ToolCallLLM<
AdditionalChatOptions
,AdditionalMessageOptions
>():ToolCallLLM
<AdditionalChatOptions
,AdditionalMessageOptions
>
Returns
ToolCallLLM
<AdditionalChatOptions
, AdditionalMessageOptions
>
Inherited from
Properties
metadata
abstract
metadata:LLMMetadata
Inherited from
Defined in
packages/core/llms/dist/index.d.ts:162
supportToolCall
abstract
supportToolCall:boolean
Defined in
packages/core/llms/dist/index.d.ts:169
Methods
chat()
chat(params)
abstract
chat(params
):Promise
<AsyncIterable
<ChatResponseChunk
<object
>,any
,any
>>
Get a chat response from the LLM
Parameters
• params: LLMChatParamsStreaming
<AdditionalChatOptions
, AdditionalMessageOptions
>
Returns
Promise
<AsyncIterable
<ChatResponseChunk
<object
>, any
, any
>>
Inherited from
Defined in
packages/core/llms/dist/index.d.ts:165
chat(params)
abstract
chat(params
):Promise
<ChatResponse
<AdditionalMessageOptions
>>
Parameters
• params: LLMChatParamsNonStreaming
<AdditionalChatOptions
, AdditionalMessageOptions
>
Returns
Promise
<ChatResponse
<AdditionalMessageOptions
>>
Inherited from
Defined in
packages/core/llms/dist/index.d.ts:166
complete()
complete(params)
complete(
params
):Promise
<AsyncIterable
<CompletionResponse
,any
,any
>>
Get a prompt completion from the LLM
Parameters
• params: LLMCompletionParamsStreaming
Returns
Promise
<AsyncIterable
<CompletionResponse
, any
, any
>>
Inherited from
Defined in
packages/core/llms/dist/index.d.ts:163
complete(params)
complete(
params
):Promise
<CompletionResponse
>
Parameters
• params: LLMCompletionParamsNonStreaming
Returns
Promise
<CompletionResponse
>
Inherited from
Defined in
packages/core/llms/dist/index.d.ts:164