Class: OpenAI
OpenAI LLM implementation
Hierarchy
-
BaseLLM
↳
OpenAI
↳↳
FireworksLLM
↳↳
Groq
↳↳
TogetherLLM
Constructors
constructor
• new OpenAI(init?
): OpenAI
Parameters
Name | Type |
---|---|
init? | Partial <OpenAI > & { azure? : AzureOpenAIConfig } |
Returns
Overrides
BaseLLM.constructor
Defined in
packages/core/src/llm/LLM.ts:106
Properties
additionalChatOptions
• Optional
additionalChatOptions: Omit
<Partial
<ChatCompletionCreateParams
>, "stream"
| "max_tokens"
| "messages"
| "model"
| "temperature"
| "top_p"
| "tools"
| "toolChoice"
>
Defined in
packages/core/src/llm/LLM.ts:82
additionalSessionOptions
• Optional
additionalSessionOptions: Omit
<Partial
<ClientOptions
>, "apiKey"
| "timeout"
| "maxRetries"
>
Defined in
packages/core/src/llm/LLM.ts:99
apiKey
• Optional
apiKey: string
= undefined
Defined in
packages/core/src/llm/LLM.ts:95
callbackManager
• Optional
callbackManager: CallbackManager
Defined in
packages/core/src/llm/LLM.ts:104
maxRetries
• maxRetries: number
Defined in
packages/core/src/llm/LLM.ts:96
maxTokens
• Optional
maxTokens: number
Defined in
packages/core/src/llm/LLM.ts:81
model
• model: string
Defined in
packages/core/src/llm/LLM.ts:78
session
• session: OpenAISession
Defined in
packages/core/src/llm/LLM.ts:98
temperature
• temperature: number
Defined in
packages/core/src/llm/LLM.ts:79
timeout
• Optional
timeout: number
Defined in
packages/core/src/llm/LLM.ts:97
topP
• topP: number
Defined in
packages/core/src/llm/LLM.ts:80