Class: TogetherLLM
Unified language model interface
Extends
Constructors
new TogetherLLM()
new TogetherLLM(
init
?):TogetherLLM
Parameters
• init?: Omit
<Partial
<OpenAI
>, "session"
>
Returns
Overrides
Defined in
packages/llamaindex/src/llm/together.ts:5
Properties
additionalChatOptions?
optional
additionalChatOptions:OpenAIAdditionalChatOptions
Inherited from
Defined in
packages/providers/openai/dist/index.d.ts:246
additionalSessionOptions?
optional
additionalSessionOptions:Omit
<Partial
<ClientOptions
>,"apiKey"
|"maxRetries"
|"timeout"
>
Inherited from
OpenAI
.additionalSessionOptions
Defined in
packages/providers/openai/dist/index.d.ts:250
apiKey?
optional
apiKey:string
Inherited from
Defined in
packages/providers/openai/dist/index.d.ts:247
lazySession()
lazySession: () =>
Promise
<LLMInstance$1
>
Returns
Promise
<LLMInstance$1
>
Inherited from
Defined in
packages/providers/openai/dist/index.d.ts:251
maxRetries
maxRetries:
number
Inherited from
Defined in
packages/providers/openai/dist/index.d.ts:248
maxTokens?
optional
maxTokens:number
Inherited from
Defined in
packages/providers/openai/dist/index.d.ts:245
model
model:
string
&object
|ChatModel
Inherited from
Defined in
packages/providers/openai/dist/index.d.ts:242
temperature
temperature:
number
Inherited from
Defined in
packages/providers/openai/dist/index.d.ts:243
timeout?
optional
timeout:number
Inherited from
Defined in
packages/providers/openai/dist/index.d.ts:249
topP
topP:
number
Inherited from
Defined in
packages/providers/openai/dist/index.d.ts:244
Accessors
metadata
Get Signature
get metadata():
LLMMetadata
&object
Returns
LLMMetadata
& object
Inherited from
Defined in
packages/providers/openai/dist/index.d.ts:258
session
Get Signature
get session():
Promise
<LLMInstance$1
>
Returns
Promise
<LLMInstance$1
>
Inherited from
Defined in
packages/providers/openai/dist/index.d.ts:252
supportToolCall
Get Signature
get supportToolCall():
boolean
Returns
boolean
Inherited from
Defined in
packages/providers/openai/dist/index.d.ts:257
Methods
chat()
chat(params)
chat(
params
):Promise
<AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>,any
,any
>>
Get a chat response from the LLM
Parameters
• params: LLMChatParamsStreaming
<OpenAIAdditionalChatOptions
, ToolCallLLMMessageOptions
>
Returns
Promise
<AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>, any
, any
>>
Inherited from
Defined in
packages/providers/openai/dist/index.d.ts:261
chat(params)
chat(
params
):Promise
<ChatResponse
<ToolCallLLMMessageOptions
>>
Parameters
• params: LLMChatParamsNonStreaming
<OpenAIAdditionalChatOptions
, ToolCallLLMMessageOptions
>
Returns
Promise
<ChatResponse
<ToolCallLLMMessageOptions
>>
Inherited from
Defined in
packages/providers/openai/dist/index.d.ts:262
complete()
complete(params)
complete(
params
):Promise
<AsyncIterable
<CompletionResponse
,any
,any
>>
Get a prompt completion from the LLM
Parameters
• params: LLMCompletionParamsStreaming
Returns
Promise
<AsyncIterable
<CompletionResponse
, any
, any
>>
Inherited from
Defined in
packages/core/llms/dist/index.d.ts:163
complete(params)
complete(
params
):Promise
<CompletionResponse
>
Parameters
• params: LLMCompletionParamsNonStreaming
Returns
Promise
<CompletionResponse
>
Inherited from
Defined in
packages/core/llms/dist/index.d.ts:164
streamChat()
protected
streamChat(baseRequestParams
):AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>,any
,any
>
Parameters
• baseRequestParams: ChatCompletionCreateParams
Returns
AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>, any
, any
>
Inherited from
Defined in
packages/providers/openai/dist/index.d.ts:263
toOpenAIMessage()
static
toOpenAIMessage(messages
):ChatCompletionMessageParam
[]