baseUrl
baseUrl: string = "http://localhost:11434"
keepAlive
keepAlive: string | number = "5m"
model
model: string = "llama3"
OptionalembeddingOnly
embeddingOnly?: boolean
Optionalf16KV
f16KV?: boolean
Optionalformat
format?: any
OptionalfrequencyPenalty
frequencyPenalty?: number
OptionallogitsAll
logitsAll?: boolean
OptionallowVram
lowVram?: boolean
OptionalmainGpu
mainGpu?: number
Optionalmirostat
mirostat?: number
OptionalmirostatEta
mirostatEta?: number
OptionalmirostatTau
mirostatTau?: number
OptionalnumBatch
numBatch?: number
OptionalnumCtx
numCtx?: number
OptionalnumGpu
numGpu?: number
OptionalnumKeep
numKeep?: number
OptionalnumPredict
numPredict?: number
OptionalnumThread
numThread?: number
OptionalpenalizeNewline
penalizeNewline?: boolean
OptionalpresencePenalty
presencePenalty?: number
OptionalrepeatLastN
repeatLastN?: number
OptionalrepeatPenalty
repeatPenalty?: number
Optionalstop
stop?: string[]
Optionaltemperature
temperature?: number
OptionaltfsZ
tfsZ?: number
OptionaltopK
topK?: number
OptionaltopP
topP?: number
OptionaltypicalP
typicalP?: number
OptionaluseMLock
useMLock?: boolean
OptionaluseMMap
useMMap?: boolean
OptionalvocabOnly
vocabOnly?: boolean
Class that represents the Ollama language model. It extends the base LLM class and implements the OllamaInput interface.
Example