Class: Anthropic
Anthropic LLM implementation
Hierarchy
-
BaseLLM
↳
Anthropic
Constructors
constructor
• new Anthropic(init?
): Anthropic
Parameters
Name | Type |
---|---|
init? | Partial <Anthropic > |
Returns
Overrides
BaseLLM.constructor
Defined in
packages/core/src/llm/LLM.ts:654
Properties
apiKey
• Optional
apiKey: string
= undefined
Defined in
packages/core/src/llm/LLM.ts:647
callbackManager
• Optional
callbackManager: CallbackManager
Defined in
packages/core/src/llm/LLM.ts:652
maxRetries
• maxRetries: number
Defined in
packages/core/src/llm/LLM.ts:648
maxTokens
• Optional
maxTokens: number
Defined in
packages/core/src/llm/LLM.ts:644
model
• model: "claude-3-opus"
| "claude-3-sonnet"
| "claude-2.1"
| "claude-instant-1.2"
Defined in
packages/core/src/llm/LLM.ts:641
session
• session: AnthropicSession
Defined in
packages/core/src/llm/LLM.ts:650
temperature
• temperature: number
Defined in
packages/core/src/llm/LLM.ts:642
timeout
• Optional
timeout: number
Defined in
packages/core/src/llm/LLM.ts:649
topP
• topP: number
Defined in
packages/core/src/llm/LLM.ts:643
Accessors
metadata
• get
metadata(): Object
Returns
Object
Name | Type |
---|---|
contextWindow | number |
maxTokens | undefined | number |
model | "claude-3-opus" | "claude-3-sonnet" | "claude-2.1" | "claude-instant-1.2" |
temperature | number |
tokenizer | undefined |
topP | number |
Overrides
BaseLLM.metadata
Defined in
packages/core/src/llm/LLM.ts:679
Methods
chat
▸ chat(params
): Promise
<AsyncIterable
<ChatResponseChunk
>>
Parameters
Name | Type |
---|---|
params | LLMChatParamsStreaming |
Returns
Promise
<AsyncIterable
<ChatResponseChunk
>>
Overrides
BaseLLM.chat
Defined in
packages/core/src/llm/LLM.ts:710
▸ chat(params
): Promise
<ChatResponse
>
Parameters
Name | Type |
---|---|
params | LLMChatParamsNonStreaming |
Returns
Promise
<ChatResponse
>
Overrides
BaseLLM.chat
Defined in
packages/core/src/llm/LLM.ts:713
complete
▸ complete(params
): Promise
<AsyncIterable
<CompletionResponse
>>
Parameters
Name | Type |
---|---|
params | LLMCompletionParamsStreaming |
Returns
Promise
<AsyncIterable
<CompletionResponse
>>
Inherited from
BaseLLM.complete
Defined in
packages/core/src/llm/base.ts:18
▸ complete(params
): Promise
<CompletionResponse
>
Parameters
Name | Type |
---|---|
params | LLMCompletionParamsNonStreaming |
Returns
Promise
<CompletionResponse
>
Inherited from
BaseLLM.complete
Defined in
packages/core/src/llm/base.ts:21
formatMessages
▸ formatMessages(messages
): { content
: any
= message.content; role
: "user"
| "assistant"
= message.role }[]
Parameters
Name | Type |
---|---|
messages | ChatMessage [] |
Returns
{ content
: any
= message.content; role
: "user"
| "assistant"
= message.role }[]
Defined in
packages/core/src/llm/LLM.ts:697
getModelName
▸ getModelName(model
): string
Parameters
Name | Type |
---|---|
model | string |
Returns
string
Defined in
packages/core/src/llm/LLM.ts:690
streamChat
▸ streamChat(messages
, parentEvent?
, systemPrompt?
): AsyncIterable
<ChatResponseChunk
>
Parameters
Name | Type |
---|---|
messages | ChatMessage [] |
parentEvent? | Event |
systemPrompt? | null | string |
Returns
AsyncIterable
<ChatResponseChunk
>
Defined in
packages/core/src/llm/LLM.ts:754
tokens
▸ tokens(messages
): number
Parameters
Name | Type |
---|---|
messages | ChatMessage [] |
Returns
number
Overrides
BaseLLM.tokens