Class: OpenAI
Hierarchy
-
BaseLLM
<OpenAIAdditionalChatOptions
>↳
OpenAI
↳↳
FireworksLLM
↳↳
Groq
↳↳
TogetherLLM
Constructors
constructor
• new OpenAI(init?
): OpenAI
Parameters
Name | Type |
---|---|
init? | Partial <OpenAI > & { azure? : AzureOpenAIConfig } |
Returns
Overrides
BaseLLM<OpenAIAdditionalChatOptions>.constructor
Defined in
packages/core/src/llm/open_ai.ts:171
Properties
additionalChatOptions
• Optional
additionalChatOptions: OpenAIAdditionalChatOptions
Defined in
packages/core/src/llm/open_ai.ts:159
additionalSessionOptions
• Optional
additionalSessionOptions: Omit
<Partial
<ClientOptions
>, "apiKey"
| "timeout"
| "maxRetries"
>
Defined in
packages/core/src/llm/open_ai.ts:166
apiKey
• Optional
apiKey: string
= undefined
Defined in
packages/core/src/llm/open_ai.ts:162
maxRetries
• maxRetries: number
Defined in
packages/core/src/llm/open_ai.ts:163
maxTokens
• Optional
maxTokens: number
Defined in
packages/core/src/llm/open_ai.ts:158
model
• model: string
Defined in
packages/core/src/llm/open_ai.ts:155
session
• session: OpenAISession
Defined in
packages/core/src/llm/open_ai.ts:165
temperature
• temperature: number
Defined in
packages/core/src/llm/open_ai.ts:156
timeout
• Optional
timeout: number
Defined in
packages/core/src/llm/open_ai.ts:164
topP
• topP: number
Defined in
packages/core/src/llm/open_ai.ts:157
Accessors
metadata
• get
metadata(): LLMMetadata
& OpenAIAdditionalMetadata
Returns
LLMMetadata
& OpenAIAdditionalMetadata
Overrides
BaseLLM.metadata
Defined in
packages/core/src/llm/open_ai.ts:224
Methods
chat
▸ chat(params
): Promise
<AsyncIterable
<ChatResponseChunk
>>
Parameters
Name | Type |
---|---|
params | LLMChatParamsStreaming <OpenAIAdditionalChatOptions > |
Returns
Promise
<AsyncIterable
<ChatResponseChunk
>>
Overrides
BaseLLM.chat
Defined in
packages/core/src/llm/open_ai.ts:276
▸ chat(params
): Promise
<ChatResponse
>
Parameters
Name | Type |
---|---|
params | LLMChatParamsNonStreaming <OpenAIAdditionalChatOptions > |
Returns
Promise
<ChatResponse
>
Overrides
BaseLLM.chat
Defined in
packages/core/src/llm/open_ai.ts:279
complete
▸ complete(params
): Promise
<AsyncIterable
<CompletionResponse
>>
Parameters
Name | Type |
---|---|
params | LLMCompletionParamsStreaming |
Returns
Promise
<AsyncIterable
<CompletionResponse
>>
Inherited from
BaseLLM.complete
Defined in
packages/core/src/llm/base.ts:23
▸ complete(params
): Promise
<CompletionResponse
>
Parameters
Name | Type |
---|---|
params | LLMCompletionParamsNonStreaming |
Returns
Promise
<CompletionResponse
>
Inherited from
BaseLLM.complete
Defined in
packages/core/src/llm/base.ts:26
mapMessageType
▸ mapMessageType(messageType
): "function"
| "user"
| "assistant"
| "system"
| "tool"
Parameters
Name | Type |
---|---|
messageType | MessageType |
Returns
"function"
| "user"
| "assistant"
| "system"
| "tool"
Defined in
packages/core/src/llm/open_ai.ts:240
streamChat
▸ streamChat(baseRequestParams
): AsyncIterable
<ChatResponseChunk
>
Parameters
Name | Type |
---|---|
baseRequestParams | ChatCompletionCreateParams |
Returns
AsyncIterable
<ChatResponseChunk
>
Defined in
packages/core/src/llm/open_ai.ts:329
toOpenAIMessage
▸ toOpenAIMessage(messages
): { content
: MessageContent
= message.content; role
: "function"
| "user"
| "assistant"
| "system"
| "tool"
}[]
Parameters
Name | Type |
---|---|
messages | ChatMessage [] |
Returns
{ content
: MessageContent
= message.content; role
: "function"
| "user"
| "assistant"
| "system"
| "tool"
}[]
Defined in
packages/core/src/llm/open_ai.ts:259
toTool
▸ toTool(tool
): ChatCompletionTool
Parameters
Name | Type |
---|---|
tool | BaseTool |
Returns
ChatCompletionTool