Class: Portkey
Hierarchy
-
BaseLLM
↳
Portkey
Constructors
constructor
• new Portkey(init?
): Portkey
Parameters
Name | Type |
---|---|
init? | Partial <Portkey > |
Returns
Overrides
BaseLLM.constructor
Defined in
packages/core/src/llm/LLM.ts:791
Properties
apiKey
• Optional
apiKey: string
= undefined
Defined in
packages/core/src/llm/LLM.ts:784
baseURL
• Optional
baseURL: string
= undefined
Defined in
packages/core/src/llm/LLM.ts:785
callbackManager
• Optional
callbackManager: CallbackManager
Defined in
packages/core/src/llm/LLM.ts:789
llms
• Optional
llms: null
| [LLMOptions
] = undefined
Defined in
packages/core/src/llm/LLM.ts:787
mode
• Optional
mode: string
= undefined
Defined in
packages/core/src/llm/LLM.ts:786
session
• session: PortkeySession
Defined in
packages/core/src/llm/LLM.ts:788
Accessors
metadata
• get
metadata(): LLMMetadata
Returns
Overrides
BaseLLM.metadata
Defined in
packages/core/src/llm/LLM.ts:810
Methods
chat
▸ chat(params
): Promise
<AsyncIterable
<ChatResponseChunk
>>
Parameters
Name | Type |
---|---|
params | LLMChatParamsStreaming |
Returns
Promise
<AsyncIterable
<ChatResponseChunk
>>
Overrides
BaseLLM.chat
Defined in
packages/core/src/llm/LLM.ts:814
▸ chat(params
): Promise
<ChatResponse
>
Parameters
Name | Type |
---|---|
params | LLMChatParamsNonStreaming |
Returns
Promise
<ChatResponse
>
Overrides
BaseLLM.chat
Defined in
packages/core/src/llm/LLM.ts:817
complete
▸ complete(params
): Promise
<AsyncIterable
<CompletionResponse
>>
Parameters
Name | Type |
---|---|
params | LLMCompletionParamsStreaming |
Returns
Promise
<AsyncIterable
<CompletionResponse
>>
Inherited from
BaseLLM.complete
Defined in
packages/core/src/llm/base.ts:18
▸ complete(params
): Promise
<CompletionResponse
>
Parameters
Name | Type |
---|---|
params | LLMCompletionParamsNonStreaming |
Returns
Promise
<CompletionResponse
>
Inherited from
BaseLLM.complete
Defined in
packages/core/src/llm/base.ts:21
streamChat
▸ streamChat(messages
, parentEvent?
, params?
): AsyncIterable
<ChatResponseChunk
>
Parameters
Name | Type |
---|---|
messages | ChatMessage [] |
parentEvent? | Event |
params? | Record <string , any > |
Returns
AsyncIterable
<ChatResponseChunk
>
Defined in
packages/core/src/llm/LLM.ts:837
tokens
▸ tokens(messages
): number
Parameters
Name | Type |
---|---|
messages | ChatMessage [] |
Returns
number
Overrides
BaseLLM.tokens