ollama-haskell-0.1.0.0: Ollama Haskell library
Safe HaskellSafe-Inferred
LanguageGHC2021

Ollama

Description

This library lets you run LlMs from within Haskell projects. Inspired by `ollama-python`.

Synopsis

Main APIs

Generate Texts

generate :: GenerateOps -> IO (Either String GenerateResponse) Source #

Generate function that returns either a GenerateResponse type or an error message. It takes a GenerateOps configuration and performs a request to the Ollama generate API.

Examples:

Basic usage without streaming:

let ops = GenerateOps 
        { modelName = "llama3.2"
        , prompt = "Tell me a joke."
        , suffix = Nothing
        , images = Nothing
        , format = Nothing
        , system = Nothing
        , template = Nothing
        , stream = Nothing
        , raw = Nothing
        , keepAlive = Nothing
        }
result <- generate ops
case result of
  Left errorMsg -> putStrLn ("Error: " ++ errorMsg)
  Right response -> print response

Usage with streaming to print responses to the console:

void $
  generate
    defaultGenerateOps
      { modelName = "llama3.2"
      , prompt = "what is functional programming?"
      , stream = Just (T.putStr . response_, pure ())
      }

In this example, the first function in the $sel:stream:GenerateOps tuple processes each chunk of response by printing it, and the second function is a simple no-op flush.generate :: GenerateOps -> IO (Either String GenerateResponse)

defaultGenerateOps :: GenerateOps Source #

A function to create a default GenerateOps type with preset values.

Example:

let ops = defaultGenerateOps
generate ops

This will generate a response using the default configuration.

data GenerateOps Source #

Input type for generate functions. This data type represents all possible configurations that you can pass to the Ollama generate API.

Example:

let ops = GenerateOps 
        { modelName = "llama3.2"
        , prompt = "What is the meaning of life?"
        , suffix = Nothing
        , images = Nothing
        , format = Just "text"
        , system = Nothing
        , template = Nothing
        , stream = Nothing
        , raw = Just False
        , keepAlive = Just "yes"
        }

Constructors

GenerateOps 

Fields

  • modelName :: Text

    The name of the model to be used for generation.

  • prompt :: Text

    The prompt text that will be provided to the model for generating a response.

  • suffix :: Maybe Text

    An optional suffix to append to the generated text.

  • images :: Maybe [Text]

    Optional list of base64 encoded images to include with the request.

  • format :: Maybe Text

    An optional format specifier for the response.

  • system :: Maybe Text

    Optional system text that can be included in the generation context.

  • template :: Maybe Text

    An optional template to format the response.

  • stream :: Maybe (GenerateResponse -> IO (), IO ())

    An optional streaming function where the first function handles each chunk of response, and the second flushes the stream.

  • raw :: Maybe Bool

    An optional flag to return the raw response.

  • keepAlive :: Maybe Text

    Optional text to specify keep-alive behavior.

data GenerateResponse Source #

Result type for generate function containing the model's response and meta-information.

Constructors

GenerateResponse 

Fields

Chat with LLMs

chat :: ChatOps -> IO (Either String ChatResponse) Source #

Initiates a chat session with the specified ChatOps configuration and returns either a ChatResponse or an error message.

This function sends a request to the Ollama chat API with the given options.

Example:

let ops = defaultChatOps
result <- chat ops
case result of
  Left errorMsg -> putStrLn ("Error: " ++ errorMsg)
  Right response -> print response

data Role Source #

Enumerated roles that can participate in a chat.

Constructors

System 
User 
Assistant 
Tool 

Instances

Instances details
FromJSON Role Source # 
Instance details

Defined in Data.Ollama.Chat

ToJSON Role Source # 
Instance details

Defined in Data.Ollama.Chat

Show Role Source # 
Instance details

Defined in Data.Ollama.Chat

Methods

showsPrec :: Int -> Role -> ShowS #

show :: Role -> String #

showList :: [Role] -> ShowS #

Eq Role Source # 
Instance details

Defined in Data.Ollama.Chat

Methods

(==) :: Role -> Role -> Bool #

(/=) :: Role -> Role -> Bool #

defaultChatOps :: ChatOps Source #

A default configuration for initiating a chat with a model. This can be used as a starting point and modified as needed.

Example:

let ops = defaultChatOps { chatModelName = "customModel" }
chat ops

data ChatResponse Source #

Constructors

ChatResponse 

Fields

data ChatOps Source #

Constructors

ChatOps 

Fields

Instances

Instances details
ToJSON ChatOps Source # 
Instance details

Defined in Data.Ollama.Chat

Show ChatOps Source # 
Instance details

Defined in Data.Ollama.Chat

Eq ChatOps Source # 
Instance details

Defined in Data.Ollama.Chat

Methods

(==) :: ChatOps -> ChatOps -> Bool #

(/=) :: ChatOps -> ChatOps -> Bool #

Embeddings

embedding Source #

Arguments

:: Text

Model

-> Text

Input

-> IO (Maybe EmbeddingResp) 

Embedding API

embeddingOps Source #

Arguments

:: Text

Model

-> Text

Input

-> Maybe Bool

Truncate

-> Maybe Text

Keep Alive

-> IO (Maybe EmbeddingResp) 

Embedding API

Ollama operations

Copy Models

copyModel Source #

Arguments

:: Text

Source model

-> Text

Destination model

-> IO () 

Copy model from source to destination

Create Models

createModel Source #

Arguments

:: Text

Model Name

-> Maybe Text

Model File

-> Maybe FilePath

Path

-> IO () 

Create a new model | Please note, if you specify both ModelFile and Path, ModelFile will be used.

createModelOps Source #

Arguments

:: Text

Model Name

-> Maybe Text

Model File

-> Maybe Bool

Stream

-> Maybe FilePath

Path

-> IO () 

Create a new model either from ModelFile or Path Please note, if you specify both ModelFile and Path, ModelFile will be used.

Delete Models

deleteModel Source #

Arguments

:: Text

Model name

-> IO () 

Delete a model

List Models

list :: IO (Maybe Models) Source #

List all models from local

List currently running models

ps :: IO (Maybe RunningModels) Source #

List running models

Push and Pull

push Source #

Arguments

:: Text

Model name

-> IO () 

Push a model

pushOps Source #

Arguments

:: Text

Model name

-> Maybe Bool

Insecure

-> Maybe Bool

Stream

-> IO () 

Push a model with options

pull Source #

Arguments

:: Text

Model Name

-> IO () 

Pull a model using default options. This simplifies the pull operation by not requiring additional options.

Example:

pull "myModel"

This will pull "myModel" using default settings (no insecure connections and no streaming).

pullOps Source #

Arguments

:: Text

Model Name

-> Maybe Bool

Insecure

-> Maybe Bool

Stream

-> IO () 

Pull a model with additional options for insecure connections and streaming. This function interacts directly with the Ollama API to download the specified model.

Example:

pullOps "myModel" (Just True) (Just True)

This will attempt to pull "myModel" with insecure connections allowed and enable streaming.

Show Model Info

showModel Source #

Arguments

:: Text

model name

-> IO (Maybe ShowModelResponse) 

Show given model's information.

Higher level API for show. @since 1.0.0.0

showModelOps Source #

Arguments

:: Text

model name

-> Maybe Bool

verbose

-> IO (Maybe ShowModelResponse) 

Show given model's information with options.

Since: 1.0.0.0

Types

data ShowModelResponse Source #

Ouput structure for show model information.

Constructors

ShowModelResponse 

Fields

newtype Models Source #

Constructors

Models [ModelInfo] 

Instances

Instances details
FromJSON Models Source # 
Instance details

Defined in Data.Ollama.List

Show Models Source # 
Instance details

Defined in Data.Ollama.List

Eq Models Source # 
Instance details

Defined in Data.Ollama.List

Methods

(==) :: Models -> Models -> Bool #

(/=) :: Models -> Models -> Bool #

data ModelInfo Source #

Constructors

ModelInfo 

Instances

Instances details
FromJSON ModelInfo Source # 
Instance details

Defined in Data.Ollama.List

Show ModelInfo Source # 
Instance details

Defined in Data.Ollama.List

Eq ModelInfo Source # 
Instance details

Defined in Data.Ollama.List

data Message Source #

Represents a message within a chat, including its role and content.

Constructors

Message 

Fields

Instances

Instances details
FromJSON Message Source # 
Instance details

Defined in Data.Ollama.Chat

ToJSON Message Source # 
Instance details

Defined in Data.Ollama.Chat

Generic Message Source # 
Instance details

Defined in Data.Ollama.Chat

Associated Types

type Rep Message :: Type -> Type #

Methods

from :: Message -> Rep Message x #

to :: Rep Message x -> Message #

Show Message Source # 
Instance details

Defined in Data.Ollama.Chat

Eq Message Source # 
Instance details

Defined in Data.Ollama.Chat

Methods

(==) :: Message -> Message -> Bool #

(/=) :: Message -> Message -> Bool #

type Rep Message Source # 
Instance details

Defined in Data.Ollama.Chat

type Rep Message = D1 ('MetaData "Message" "Data.Ollama.Chat" "ollama-haskell-0.1.0.0-Hhs8YtljEqoGZTH1LFF25l" 'False) (C1 ('MetaCons "Message" 'PrefixI 'True) (S1 ('MetaSel ('Just "role") 'NoSourceUnpackedness 'NoSourceStrictness 'DecidedLazy) (Rec0 Role) :*: (S1 ('MetaSel ('Just "content") 'NoSourceUnpackedness 'NoSourceStrictness 'DecidedLazy) (Rec0 Text) :*: S1 ('MetaSel ('Just "images") 'NoSourceUnpackedness 'NoSourceStrictness 'DecidedLazy) (Rec0 (Maybe [Text])))))