Skip to main content

Class: Portkey

Unified language model interface

Extends

Constructors

new Portkey()

new Portkey(init?): Portkey

Parameters

init?: Partial<Portkey> & ApiClientInterface

Returns

Portkey

Overrides

BaseLLM.constructor

Defined in

packages/llamaindex/src/llm/portkey.ts:68

Properties

apiKey?

optional apiKey: string = undefined

Defined in

packages/llamaindex/src/llm/portkey.ts:64


baseURL?

optional baseURL: string = undefined

Defined in

packages/llamaindex/src/llm/portkey.ts:65


session

session: PortkeySession

Defined in

packages/llamaindex/src/llm/portkey.ts:66

Accessors

metadata

get metadata(): LLMMetadata

Returns

LLMMetadata

Overrides

BaseLLM.metadata

Defined in

packages/llamaindex/src/llm/portkey.ts:80

Methods

chat()

chat(params)

chat(params): Promise<AsyncIterable<ChatResponseChunk, any, any>>

Get a chat response from the LLM

Parameters

params: LLMChatParamsStreaming<object, object>

Returns

Promise<AsyncIterable<ChatResponseChunk, any, any>>

Overrides

BaseLLM.chat

Defined in

packages/llamaindex/src/llm/portkey.ts:84

chat(params)

chat(params): Promise<ChatResponse<object>>

Get a chat response from the LLM

Parameters

params: LLMChatParamsNonStreaming<object, object>

Returns

Promise<ChatResponse<object>>

Overrides

BaseLLM.chat

Defined in

packages/llamaindex/src/llm/portkey.ts:87


complete()

complete(params)

complete(params): Promise<AsyncIterable<CompletionResponse, any, any>>

Get a prompt completion from the LLM

Parameters

params: LLMCompletionParamsStreaming

Returns

Promise<AsyncIterable<CompletionResponse, any, any>>

Inherited from

BaseLLM.complete

Defined in

packages/core/llms/dist/index.d.ts:168

complete(params)

complete(params): Promise<CompletionResponse>

Get a prompt completion from the LLM

Parameters

params: LLMCompletionParamsNonStreaming

Returns

Promise<CompletionResponse>

Inherited from

BaseLLM.complete

Defined in

packages/core/llms/dist/index.d.ts:169


streamChat()

streamChat(messages, params?): AsyncIterable<ChatResponseChunk, any, any>

Parameters

messages: ChatMessage[]

params?: Record<string, any>

Returns

AsyncIterable<ChatResponseChunk, any, any>

Defined in

packages/llamaindex/src/llm/portkey.ts:111