Skip to main content

Class: OllamaEmbedding

OllamaEmbedding is an alias for Ollama that implements the BaseEmbedding interface.

Hierarchy

Implements

Constructors

constructor

new OllamaEmbedding(params): OllamaEmbedding

Parameters

NameType
paramsOllamaParams

Returns

OllamaEmbedding

Inherited from

Ollama.constructor

Defined in

packages/core/src/llm/ollama.ts:75

Properties

embedBatchSize

embedBatchSize: number = DEFAULT_EMBED_BATCH_SIZE

Implementation of

BaseEmbedding.embedBatchSize

Inherited from

Ollama.embedBatchSize

Defined in

packages/core/src/embeddings/types.ts:11


hasStreaming

Readonly hasStreaming: true

Inherited from

Ollama.hasStreaming

Defined in

packages/core/src/llm/ollama.ts:61


model

model: string

Inherited from

Ollama.model

Defined in

packages/core/src/llm/ollama.ts:66


ollama

ollama: Ollama

Inherited from

Ollama.ollama

Defined in

packages/core/src/llm/ollama.ts:63


options

options: Partial<Omit<Options, "temperature" | "top_p" | "num_ctx">> & Pick<Options, "temperature" | "top_p" | "num_ctx">

Inherited from

Ollama.options

Defined in

packages/core/src/llm/ollama.ts:68

Accessors

metadata

get metadata(): LLMMetadata

Returns

LLMMetadata

Inherited from

Ollama.metadata

Defined in

packages/core/src/llm/ollama.ts:87

Methods

abort

abort(): void

Returns

void

Inherited from

Ollama.abort

Defined in

packages/core/src/llm/ollama.ts:209


chat

chat(params): Promise<AsyncIterable<ChatResponseChunk>>

Get a chat response from the LLM

Parameters

NameType
paramsLLMChatParamsStreaming<object, object>

Returns

Promise<AsyncIterable<ChatResponseChunk>>

Inherited from

Ollama.chat

Defined in

packages/core/src/llm/ollama.ts:99

chat(params): Promise<ChatResponse<object>>

Parameters

NameType
paramsLLMChatParamsNonStreaming<object, object>

Returns

Promise<ChatResponse<object>>

Inherited from

Ollama.chat

Defined in

packages/core/src/llm/ollama.ts:102


complete

complete(params): Promise<AsyncIterable<CompletionResponse>>

Get a prompt completion from the LLM

Parameters

NameType
paramsLLMCompletionParamsStreaming

Returns

Promise<AsyncIterable<CompletionResponse>>

Inherited from

Ollama.complete

Defined in

packages/core/src/llm/ollama.ts:140

complete(params): Promise<CompletionResponse>

Parameters

NameType
paramsLLMCompletionParamsNonStreaming

Returns

Promise<CompletionResponse>

Inherited from

Ollama.complete

Defined in

packages/core/src/llm/ollama.ts:143


copy

copy(request): Promise<StatusResponse>

Parameters

NameType
requestCopyRequest

Returns

Promise<StatusResponse>

Inherited from

Ollama.copy

Defined in

packages/core/src/llm/ollama.ts:245


create

create(request): Promise<AsyncGenerator<ProgressResponse, any, unknown>>

Parameters

NameType
requestCreateRequest & { stream: true }

Returns

Promise<AsyncGenerator<ProgressResponse, any, unknown>>

Inherited from

Ollama.create

Defined in

packages/core/src/llm/ollama.ts:224

create(request): Promise<ProgressResponse>

Parameters

NameType
requestCreateRequest & { stream?: false }

Returns

Promise<ProgressResponse>

Inherited from

Ollama.create

Defined in

packages/core/src/llm/ollama.ts:227


delete

delete(request): Promise<StatusResponse>

Parameters

NameType
requestDeleteRequest

Returns

Promise<StatusResponse>

Inherited from

Ollama.delete

Defined in

packages/core/src/llm/ollama.ts:242


embeddings

embeddings(request): Promise<EmbeddingsResponse>

Parameters

NameType
requestEmbeddingsRequest

Returns

Promise<EmbeddingsResponse>

Inherited from

Ollama.embeddings

Defined in

packages/core/src/llm/ollama.ts:254


encodeImage

encodeImage(image): Promise<string>

Parameters

NameType
imagestring | Uint8Array

Returns

Promise<string>

Inherited from

Ollama.encodeImage

Defined in

packages/core/src/llm/ollama.ts:212


generate

generate(request): Promise<AsyncGenerator<GenerateResponse, any, unknown>>

Parameters

NameType
requestGenerateRequest & { stream: true }

Returns

Promise<AsyncGenerator<GenerateResponse, any, unknown>>

Inherited from

Ollama.generate

Defined in

packages/core/src/llm/ollama.ts:215

generate(request): Promise<GenerateResponse>

Parameters

NameType
requestGenerateRequest & { stream?: false }

Returns

Promise<GenerateResponse>

Inherited from

Ollama.generate

Defined in

packages/core/src/llm/ollama.ts:218


getQueryEmbedding

getQueryEmbedding(query): Promise<number[]>

Parameters

NameType
querystring

Returns

Promise<number[]>

Implementation of

BaseEmbedding.getQueryEmbedding

Inherited from

Ollama.getQueryEmbedding

Defined in

packages/core/src/llm/ollama.ts:194


getTextEmbedding

getTextEmbedding(text): Promise<number[]>

Parameters

NameType
textstring

Returns

Promise<number[]>

Implementation of

BaseEmbedding.getTextEmbedding

Inherited from

Ollama.getTextEmbedding

Defined in

packages/core/src/llm/ollama.ts:190


getTextEmbeddings

getTextEmbeddings(texts): Promise<number[][]>

Optionally override this method to retrieve multiple embeddings in a single request

Parameters

NameType
textsstring[]

Returns

Promise<number[][]>

Implementation of

BaseEmbedding.getTextEmbeddings

Inherited from

Ollama.getTextEmbeddings

Defined in

packages/core/src/embeddings/types.ts:28


getTextEmbeddingsBatch

getTextEmbeddingsBatch(texts, options?): Promise<number[][]>

Get embeddings for a batch of texts

Parameters

NameType
textsstring[]
options?Object
options.logProgress?boolean

Returns

Promise<number[][]>

Implementation of

BaseEmbedding.getTextEmbeddingsBatch

Inherited from

Ollama.getTextEmbeddingsBatch

Defined in

packages/core/src/embeddings/types.ts:44


list

list(): Promise<ListResponse>

Returns

Promise<ListResponse>

Inherited from

Ollama.list

Defined in

packages/core/src/llm/ollama.ts:248


pull

pull(request): Promise<AsyncGenerator<ProgressResponse, any, unknown>>

Parameters

NameType
requestPullRequest & { stream: true }

Returns

Promise<AsyncGenerator<ProgressResponse, any, unknown>>

Inherited from

Ollama.pull

Defined in

packages/core/src/llm/ollama.ts:233

pull(request): Promise<ProgressResponse>

Parameters

NameType
requestPullRequest & { stream?: false }

Returns

Promise<ProgressResponse>

Inherited from

Ollama.pull

Defined in

packages/core/src/llm/ollama.ts:236


push

push(request): Promise<AsyncGenerator<ProgressResponse, any, unknown>>

Parameters

NameType
requestPushRequest & { stream: true }

Returns

Promise<AsyncGenerator<ProgressResponse, any, unknown>>

Inherited from

Ollama.push

Defined in

packages/core/src/llm/ollama.ts:200

push(request): Promise<ProgressResponse>

Parameters

NameType
requestPushRequest & { stream?: false }

Returns

Promise<ProgressResponse>

Inherited from

Ollama.push

Defined in

packages/core/src/llm/ollama.ts:203


show

show(request): Promise<ShowResponse>

Parameters

NameType
requestShowRequest

Returns

Promise<ShowResponse>

Inherited from

Ollama.show

Defined in

packages/core/src/llm/ollama.ts:251


similarity

similarity(embedding1, embedding2, mode?): number

Parameters

NameTypeDefault value
embedding1number[]undefined
embedding2number[]undefined
modeSimilarityTypeSimilarityType.DEFAULT

Returns

number

Implementation of

BaseEmbedding.similarity

Inherited from

Ollama.similarity

Defined in

packages/core/src/embeddings/types.ts:13


transform

transform(nodes, _options?): Promise<BaseNode<Metadata>[]>

Parameters

NameType
nodesBaseNode<Metadata>[]
_options?any

Returns

Promise<BaseNode<Metadata>[]>

Implementation of

BaseEmbedding.transform

Inherited from

Ollama.transform

Defined in

packages/core/src/embeddings/types.ts:58