انتقل إلى المحتوى الرئيسي

Class: FireworksLLM

Hierarchy

Constructors

constructor

new FireworksLLM(init?): FireworksLLM

Parameters

NameType
init?Partial<OpenAI>

Returns

FireworksLLM

Overrides

OpenAI.constructor

Defined in

packages/core/src/llm/fireworks.ts:5

Properties

additionalChatOptions

Optional additionalChatOptions: OpenAIAdditionalChatOptions

Inherited from

OpenAI.additionalChatOptions

Defined in

packages/core/src/llm/open_ai.ts:159


additionalSessionOptions

Optional additionalSessionOptions: Omit<Partial<ClientOptions>, "apiKey" | "timeout" | "maxRetries">

Inherited from

OpenAI.additionalSessionOptions

Defined in

packages/core/src/llm/open_ai.ts:166


apiKey

Optional apiKey: string = undefined

Inherited from

OpenAI.apiKey

Defined in

packages/core/src/llm/open_ai.ts:162


maxRetries

maxRetries: number

Inherited from

OpenAI.maxRetries

Defined in

packages/core/src/llm/open_ai.ts:163


maxTokens

Optional maxTokens: number

Inherited from

OpenAI.maxTokens

Defined in

packages/core/src/llm/open_ai.ts:158


model

model: string

Inherited from

OpenAI.model

Defined in

packages/core/src/llm/open_ai.ts:155


session

session: OpenAISession

Inherited from

OpenAI.session

Defined in

packages/core/src/llm/open_ai.ts:165


temperature

temperature: number

Inherited from

OpenAI.temperature

Defined in

packages/core/src/llm/open_ai.ts:156


timeout

Optional timeout: number

Inherited from

OpenAI.timeout

Defined in

packages/core/src/llm/open_ai.ts:164


topP

topP: number

Inherited from

OpenAI.topP

Defined in

packages/core/src/llm/open_ai.ts:157

Accessors

metadata

get metadata(): LLMMetadata & OpenAIAdditionalMetadata

Returns

LLMMetadata & OpenAIAdditionalMetadata

Inherited from

OpenAI.metadata

Defined in

packages/core/src/llm/open_ai.ts:224

Methods

chat

chat(params): Promise<AsyncIterable<ChatResponseChunk>>

Parameters

NameType
paramsLLMChatParamsStreaming<OpenAIAdditionalChatOptions>

Returns

Promise<AsyncIterable<ChatResponseChunk>>

Inherited from

OpenAI.chat

Defined in

packages/core/src/llm/open_ai.ts:276

chat(params): Promise<ChatResponse>

Parameters

NameType
paramsLLMChatParamsNonStreaming<OpenAIAdditionalChatOptions>

Returns

Promise<ChatResponse>

Inherited from

OpenAI.chat

Defined in

packages/core/src/llm/open_ai.ts:279


complete

complete(params): Promise<AsyncIterable<CompletionResponse>>

Parameters

NameType
paramsLLMCompletionParamsStreaming

Returns

Promise<AsyncIterable<CompletionResponse>>

Inherited from

OpenAI.complete

Defined in

packages/core/src/llm/base.ts:23

complete(params): Promise<CompletionResponse>

Parameters

NameType
paramsLLMCompletionParamsNonStreaming

Returns

Promise<CompletionResponse>

Inherited from

OpenAI.complete

Defined in

packages/core/src/llm/base.ts:26


mapMessageType

mapMessageType(messageType): "function" | "user" | "assistant" | "system" | "tool"

Parameters

NameType
messageTypeMessageType

Returns

"function" | "user" | "assistant" | "system" | "tool"

Inherited from

OpenAI.mapMessageType

Defined in

packages/core/src/llm/open_ai.ts:240


streamChat

streamChat(baseRequestParams): AsyncIterable<ChatResponseChunk>

Parameters

NameType
baseRequestParamsChatCompletionCreateParams

Returns

AsyncIterable<ChatResponseChunk>

Inherited from

OpenAI.streamChat

Defined in

packages/core/src/llm/open_ai.ts:329


toOpenAIMessage

toOpenAIMessage(messages): { content: MessageContent = message.content; role: "function" | "user" | "assistant" | "system" | "tool" }[]

Parameters

NameType
messagesChatMessage[]

Returns

{ content: MessageContent = message.content; role: "function" | "user" | "assistant" | "system" | "tool" }[]

Inherited from

OpenAI.toOpenAIMessage

Defined in

packages/core/src/llm/open_ai.ts:259


toTool

toTool(tool): ChatCompletionTool

Parameters

NameType
toolBaseTool

Returns

ChatCompletionTool

Inherited from

OpenAI.toTool

Defined in

packages/core/src/llm/open_ai.ts:365