Skip to main content

Class: Anthropic

Unified language model interface

Extends

Constructors

new Anthropic()

new Anthropic(init?): Anthropic

Parameters

init?: Partial<Anthropic>

Returns

Anthropic

Overrides

ToolCallLLM.constructor

Defined in

packages/llamaindex/src/llm/anthropic.ts:121

Properties

apiKey?

optional apiKey: string

Defined in

packages/llamaindex/src/llm/anthropic.ts:116


maxRetries

maxRetries: number

Defined in

packages/llamaindex/src/llm/anthropic.ts:117


maxTokens?

optional maxTokens: number

Defined in

packages/llamaindex/src/llm/anthropic.ts:113


model

model: "claude-3-5-sonnet" | "claude-3-opus" | "claude-3-sonnet" | "claude-3-haiku" | "claude-2.1" | "claude-instant-1.2"

Defined in

packages/llamaindex/src/llm/anthropic.ts:110


session

session: AnthropicSession

Defined in

packages/llamaindex/src/llm/anthropic.ts:119


temperature

temperature: number

Defined in

packages/llamaindex/src/llm/anthropic.ts:111


timeout?

optional timeout: number

Defined in

packages/llamaindex/src/llm/anthropic.ts:118


topP

topP: number

Defined in

packages/llamaindex/src/llm/anthropic.ts:112

Accessors

metadata

get metadata(): object

Returns

object

contextWindow

contextWindow: number

maxTokens

maxTokens: undefined | number

model

model: "claude-3-5-sonnet" | "claude-3-opus" | "claude-3-sonnet" | "claude-3-haiku" | "claude-2.1" | "claude-instant-1.2"

temperature

temperature: number

tokenizer

tokenizer: undefined = undefined

topP

topP: number

Overrides

ToolCallLLM.metadata

Defined in

packages/llamaindex/src/llm/anthropic.ts:144


supportToolCall

get supportToolCall(): boolean

Returns

boolean

Overrides

ToolCallLLM.supportToolCall

Defined in

packages/llamaindex/src/llm/anthropic.ts:140

Methods

chat()

chat(params)

chat(params): Promise<AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>, any, any>>

Get a chat response from the LLM

Parameters

params: LLMChatParamsStreaming<AnthropicAdditionalChatOptions, ToolCallLLMMessageOptions>

Returns

Promise<AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>, any, any>>

Overrides

ToolCallLLM.chat

Defined in

packages/llamaindex/src/llm/anthropic.ts:291

chat(params)

chat(params): Promise<ChatResponse<ToolCallLLMMessageOptions>>

Get a chat response from the LLM

Parameters

params: LLMChatParamsNonStreaming<AnthropicAdditionalChatOptions, ToolCallLLMMessageOptions>

Returns

Promise<ChatResponse<ToolCallLLMMessageOptions>>

Overrides

ToolCallLLM.chat

Defined in

packages/llamaindex/src/llm/anthropic.ts:297


complete()

complete(params)

complete(params): Promise<AsyncIterable<CompletionResponse, any, any>>

Get a prompt completion from the LLM

Parameters

params: LLMCompletionParamsStreaming

Returns

Promise<AsyncIterable<CompletionResponse, any, any>>

Inherited from

ToolCallLLM.complete

Defined in

packages/core/llms/dist/index.d.ts:168

complete(params)

complete(params): Promise<CompletionResponse>

Get a prompt completion from the LLM

Parameters

params: LLMCompletionParamsNonStreaming

Returns

Promise<CompletionResponse>

Inherited from

ToolCallLLM.complete

Defined in

packages/core/llms/dist/index.d.ts:169


formatMessages()

formatMessages(messages): MessageParam[]

Parameters

messages: ChatMessage<ToolCallLLMMessageOptions>[]

Returns

MessageParam[]

Defined in

packages/llamaindex/src/llm/anthropic.ts:162


getModelName()

getModelName(model): string

Parameters

model: string

Returns

string

Defined in

packages/llamaindex/src/llm/anthropic.ts:155


streamChat()

protected streamChat(messages, systemPrompt?): AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>, any, any>

Parameters

messages: ChatMessage<ToolCallLLMMessageOptions>[]

systemPrompt?: null | string

Returns

AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>, any, any>

Defined in

packages/llamaindex/src/llm/anthropic.ts:413


toTool()

static toTool(tool): Tool

Parameters

tool: BaseTool<any>

Returns

Tool

Defined in

packages/llamaindex/src/llm/anthropic.ts:448