From 20c58a2eac95d389e42ca5bdc8f6ab5c36f83244 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Manuel=20de=20la=20Pe=C3=B1a?= Date: Tue, 3 Dec 2024 13:31:31 +0100 Subject: [PATCH 01/35] feat: support running ollama from the local binary --- .../modules/ollama/install-dependencies.sh | 3 + .github/workflows/ci-test-go.yml | 11 + docs/modules/ollama.md | 38 ++ modules/ollama/examples_test.go | 70 +++ modules/ollama/go.mod | 2 +- modules/ollama/local.go | 479 ++++++++++++++++++ modules/ollama/local_test.go | 241 +++++++++ modules/ollama/ollama.go | 24 +- modules/ollama/options.go | 41 ++ modules/ollama/options_test.go | 41 ++ 10 files changed, 948 insertions(+), 2 deletions(-) create mode 100755 .github/scripts/modules/ollama/install-dependencies.sh create mode 100644 modules/ollama/local.go create mode 100644 modules/ollama/local_test.go create mode 100644 modules/ollama/options_test.go diff --git a/.github/scripts/modules/ollama/install-dependencies.sh b/.github/scripts/modules/ollama/install-dependencies.sh new file mode 100755 index 0000000000..f041595b4b --- /dev/null +++ b/.github/scripts/modules/ollama/install-dependencies.sh @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +curl -fsSL https://ollama.com/install.sh | sh diff --git a/.github/workflows/ci-test-go.yml b/.github/workflows/ci-test-go.yml index 54a3a3d152..b3c8a131ed 100644 --- a/.github/workflows/ci-test-go.yml +++ b/.github/workflows/ci-test-go.yml @@ -109,6 +109,17 @@ jobs: working-directory: ./${{ inputs.project-directory }} run: go build + - name: Install dependencies + working-directory: ./${{ inputs.project-directory }} + shell: bash + run: | + SCRIPT_PATH="./.github/scripts/${{ inputs.project-directory }}/install-dependencies.sh" + if [ -f "$SCRIPT_PATH" ]; then + bash "$SCRIPT_PATH" + else + echo "No dependencies script found at $SCRIPT_PATH - skipping installation" + fi + - name: go test # only run tests on linux, there are a number of things that won't allow the tests to run on anything else # many (maybe, all?) images used can only be build on Linux, they don't have Windows in their manifest, and diff --git a/docs/modules/ollama.md b/docs/modules/ollama.md index c16e612142..ec2b61f789 100644 --- a/docs/modules/ollama.md +++ b/docs/modules/ollama.md @@ -16,10 +16,15 @@ go get github.com/testcontainers/testcontainers-go/modules/ollama ## Usage example +The module allows you to run the Ollama container or the local Ollama binary. + [Creating a Ollama container](../../modules/ollama/examples_test.go) inside_block:runOllamaContainer +[Running the local Ollama binary](../../modules/ollama/examples_test.go) inside_block:localOllama +If the local Ollama binary fails to execute, the module will fallback to the container version of Ollama. + ## Module Reference ### Run function @@ -48,6 +53,39 @@ When starting the Ollama container, you can pass options in a variadic way to co If you need to set a different Ollama Docker image, you can set a valid Docker image as the second argument in the `Run` function. E.g. `Run(context.Background(), "ollama/ollama:0.1.25")`. +#### Use Local + +- Not available until the next release of testcontainers-go :material-tag: main + +If you need to run the local Ollama binary, you can set the `UseLocal` option in the `Run` function. +This option accepts a list of environment variables as a string, that will be applied to the Ollama binary when executing commands. + +E.g. `Run(context.Background(), "ollama/ollama:0.1.25", WithUseLocal("OLLAMA_DEBUG=true"))`. + +All the container methods are available when using the local Ollama binary, but will be executed locally instead of inside the container. +Please consider the following differences when using the local Ollama binary: + +- The local Ollama binary will create a log file in the current working directory, identified by the session ID. E.g. `local-ollama-.log`. +- `ConnectionString` returns the connection string to connect to the local Ollama binary instead of the container, which maps to `127.0.0.1:11434`. +- `ContainerIP` returns `127.0.0.1`. +- `ContainerIPs` returns `["127.0.0.1"]`. +- `CopyToContainer`, `CopyDirToContainer`, `CopyFileToContainer` and `CopyFileFromContainer` don't perform any action. +- `GetLogProductionErrorChannel` returns a nil channel. +- `Endpoint` returns the endpoint to connect to the local Ollama binary instead of the container, which maps to `127.0.0.1:11434`. +- `Exec` passes the command to the local Ollama binary instead of inside the container. First argument is the command to execute, and the second argument is the list of arguments. +- `GetContainerID` returns the container ID of the local Ollama binary instead of the container, which maps to `local-ollama-`. +- `Host` returns `127.0.0.1`. +- `Inspect` returns a ContainerJSON with the state of the local Ollama binary. +- `IsRunning` returns true if the local Ollama binary process is running. +- `Logs` returns the logs from the local Ollama binary instead of the container. +- `MappedPort` returns the port mapping for the local Ollama binary instead of the container. +- `Start` starts the local Ollama binary process. +- `State` returns the current state of the local Ollama binary process, `stopped` or `running`. +- `Stop` stops the local Ollama binary process. +- `Terminate` calls the `Stop` method and then removes the log file. + +The local Ollama binary will create a log file in the current working directory, and it will be available in the container's `Logs` method. + {% include "../features/common_functional_options.md" %} ### Container Methods diff --git a/modules/ollama/examples_test.go b/modules/ollama/examples_test.go index 741db846be..188be45bbb 100644 --- a/modules/ollama/examples_test.go +++ b/modules/ollama/examples_test.go @@ -173,3 +173,73 @@ func ExampleRun_withModel_llama2_langchain() { // Intentionally not asserting the output, as we don't want to run this example in the tests. } + +func ExampleRun_withLocal() { + ctx := context.Background() + + // localOllama { + ollamaContainer, err := tcollama.Run(ctx, "ollama/ollama:0.3.13", tcollama.WithUseLocal("OLLAMA_DEBUG=true")) + defer func() { + if err := testcontainers.TerminateContainer(ollamaContainer); err != nil { + log.Printf("failed to terminate container: %s", err) + } + }() + if err != nil { + log.Printf("failed to start container: %s", err) + return + } + // } + + model := "llama3.2:1b" + + _, _, err = ollamaContainer.Exec(ctx, []string{"ollama", "pull", model}) + if err != nil { + log.Printf("failed to pull model %s: %s", model, err) + return + } + + _, _, err = ollamaContainer.Exec(ctx, []string{"ollama", "run", model}) + if err != nil { + log.Printf("failed to run model %s: %s", model, err) + return + } + + connectionStr, err := ollamaContainer.ConnectionString(ctx) + if err != nil { + log.Printf("failed to get connection string: %s", err) + return + } + + var llm *langchainollama.LLM + if llm, err = langchainollama.New( + langchainollama.WithModel(model), + langchainollama.WithServerURL(connectionStr), + ); err != nil { + log.Printf("failed to create langchain ollama: %s", err) + return + } + + completion, err := llm.Call( + context.Background(), + "how can Testcontainers help with testing?", + llms.WithSeed(42), // the lower the seed, the more deterministic the completion + llms.WithTemperature(0.0), // the lower the temperature, the more creative the completion + ) + if err != nil { + log.Printf("failed to create langchain ollama: %s", err) + return + } + + words := []string{ + "easy", "isolation", "consistency", + } + lwCompletion := strings.ToLower(completion) + + for _, word := range words { + if strings.Contains(lwCompletion, word) { + fmt.Println(true) + } + } + + // Intentionally not asserting the output, as we don't want to run this example in the tests. +} diff --git a/modules/ollama/go.mod b/modules/ollama/go.mod index b3a1b1e5c2..86e34ca34e 100644 --- a/modules/ollama/go.mod +++ b/modules/ollama/go.mod @@ -4,6 +4,7 @@ go 1.22 require ( github.com/docker/docker v27.1.1+incompatible + github.com/docker/go-connections v0.5.0 github.com/google/uuid v1.6.0 github.com/stretchr/testify v1.9.0 github.com/testcontainers/testcontainers-go v0.34.0 @@ -22,7 +23,6 @@ require ( github.com/davecgh/go-spew v1.1.1 // indirect github.com/distribution/reference v0.6.0 // indirect github.com/dlclark/regexp2 v1.8.1 // indirect - github.com/docker/go-connections v0.5.0 // indirect github.com/docker/go-units v0.5.0 // indirect github.com/felixge/httpsnoop v1.0.4 // indirect github.com/go-logr/logr v1.4.1 // indirect diff --git a/modules/ollama/local.go b/modules/ollama/local.go new file mode 100644 index 0000000000..cd14c03884 --- /dev/null +++ b/modules/ollama/local.go @@ -0,0 +1,479 @@ +package ollama + +import ( + "bytes" + "context" + "fmt" + "io" + "os" + "os/exec" + "sync" + "syscall" + "time" + + "github.com/docker/docker/api/types" + "github.com/docker/docker/api/types/container" + "github.com/docker/docker/api/types/network" + "github.com/docker/go-connections/nat" + + "github.com/testcontainers/testcontainers-go" + tcexec "github.com/testcontainers/testcontainers-go/exec" + "github.com/testcontainers/testcontainers-go/wait" +) + +const localIP = "127.0.0.1" + +var defaultStopTimeout = time.Second * 5 + +// localContext is a type holding the context for local Ollama executions. +type localContext struct { + useLocal bool + env []string + serveCmd *exec.Cmd + logFile *os.File + mx sync.Mutex +} + +// runLocal calls the local Ollama binary instead of using a Docker container. +func runLocal(env map[string]string) (*OllamaContainer, error) { + // Apply the environment variables to the command. + cmdEnv := []string{} + for k, v := range env { + cmdEnv = append(cmdEnv, fmt.Sprintf("%s=%s", k, v)) + } + + c := &OllamaContainer{ + localCtx: &localContext{ + useLocal: true, + env: cmdEnv, + }, + } + + c.localCtx.mx.Lock() + + serveCmd, logFile, err := startOllama(context.Background(), c.localCtx) + if err != nil { + return nil, fmt.Errorf("start ollama: %w", err) + } + + c.localCtx.serveCmd = serveCmd + c.localCtx.logFile = logFile + c.localCtx.mx.Unlock() + // Wait until the Ollama process is ready, checking that the log file contains + // the "Listening on 127.0.0.1:11434" message + err = wait.ForLog("Listening on "+localIP+":11434").WaitUntilReady(context.Background(), c) + if err != nil { + return nil, fmt.Errorf("wait for ollama to start: %w", err) + } + + return c, nil +} + +// logFile returns an existing log file or creates a new one if it doesn't exist. +func logFile() (*os.File, error) { + logName := "local-ollama-" + testcontainers.SessionID() + ".log" + if _, err := os.Stat(logName); err == nil { + return os.Open(logName) + } + + file, err := os.Create(logName) + if err != nil { + return nil, fmt.Errorf("create ollama log file: %w", err) + } + + return file, nil +} + +// startOllama starts the Ollama serve command in the background, writing to the +// provided log file. +func startOllama(ctx context.Context, localCtx *localContext) (*exec.Cmd, *os.File, error) { + serveCmd := exec.CommandContext(ctx, "ollama", "serve") + serveCmd.Env = append(serveCmd.Env, localCtx.env...) + serveCmd.Env = append(serveCmd.Env, os.Environ()...) + + logFile, err := logFile() + if err != nil { + return nil, nil, fmt.Errorf("ollama log file: %w", err) + } + + serveCmd.Stdout = logFile + serveCmd.Stderr = logFile + + // Run the ollama serve command in background + err = serveCmd.Start() + if err != nil { + return nil, nil, fmt.Errorf("start ollama serve: %w", err) + } + + return serveCmd, logFile, nil +} + +// ContainerIP returns the IP address of the local Ollama binary. +func (c *OllamaContainer) ContainerIP(ctx context.Context) (string, error) { + if !c.localCtx.useLocal { + return c.Container.ContainerIP(ctx) + } + + return localIP, nil +} + +// ContainerIPs returns a slice with the IP address of the local Ollama binary. +func (c *OllamaContainer) ContainerIPs(ctx context.Context) ([]string, error) { + if !c.localCtx.useLocal { + return c.Container.ContainerIPs(ctx) + } + + return []string{localIP}, nil +} + +// CopyToContainer is a no-op for the local Ollama binary. +func (c *OllamaContainer) CopyToContainer(ctx context.Context, fileContent []byte, containerFilePath string, fileMode int64) error { + if !c.localCtx.useLocal { + return c.Container.CopyToContainer(ctx, fileContent, containerFilePath, fileMode) + } + + return nil +} + +// CopyDirToContainer is a no-op for the local Ollama binary. +func (c *OllamaContainer) CopyDirToContainer(ctx context.Context, hostDirPath string, containerParentPath string, fileMode int64) error { + if !c.localCtx.useLocal { + return c.Container.CopyDirToContainer(ctx, hostDirPath, containerParentPath, fileMode) + } + + return nil +} + +// CopyFileToContainer is a no-op for the local Ollama binary. +func (c *OllamaContainer) CopyFileToContainer(ctx context.Context, hostFilePath string, containerFilePath string, fileMode int64) error { + if !c.localCtx.useLocal { + return c.Container.CopyFileToContainer(ctx, hostFilePath, containerFilePath, fileMode) + } + + return nil +} + +// CopyFileFromContainer is a no-op for the local Ollama binary. +func (c *OllamaContainer) CopyFileFromContainer(ctx context.Context, filePath string) (io.ReadCloser, error) { + if !c.localCtx.useLocal { + return c.Container.CopyFileFromContainer(ctx, filePath) + } + + return nil, nil +} + +// GetLogProductionErrorChannel returns a nil channel. +func (c *OllamaContainer) GetLogProductionErrorChannel() <-chan error { + if !c.localCtx.useLocal { + return c.Container.GetLogProductionErrorChannel() + } + + return nil +} + +// Endpoint returns the 127.0.0.1:11434 endpoint for the local Ollama binary. +func (c *OllamaContainer) Endpoint(ctx context.Context, port string) (string, error) { + if !c.localCtx.useLocal { + return c.Container.Endpoint(ctx, port) + } + + return localIP + ":11434", nil +} + +// Exec executes a command using the local Ollama binary. +func (c *OllamaContainer) Exec(ctx context.Context, cmd []string, options ...tcexec.ProcessOption) (int, io.Reader, error) { + if !c.localCtx.useLocal { + return c.Container.Exec(ctx, cmd, options...) + } + + c.localCtx.mx.Lock() + defer c.localCtx.mx.Unlock() + + args := []string{} + if len(cmd) > 1 { + args = cmd[1:] // prevent when there is only one command + } + + command := prepareExec(ctx, cmd[0], args, c.localCtx.env, c.localCtx.logFile) + err := command.Run() + if err != nil { + return command.ProcessState.ExitCode(), c.localCtx.logFile, fmt.Errorf("exec %v: %w", cmd, err) + } + + return command.ProcessState.ExitCode(), c.localCtx.logFile, nil +} + +func prepareExec(ctx context.Context, bin string, args []string, env []string, output io.Writer) *exec.Cmd { + command := exec.CommandContext(ctx, bin, args...) + command.Env = append(command.Env, env...) + command.Env = append(command.Env, os.Environ()...) + + command.Stdout = output + command.Stderr = output + + return command +} + +// GetContainerID returns a placeholder ID for local execution +func (c *OllamaContainer) GetContainerID() string { + if !c.localCtx.useLocal { + return c.Container.GetContainerID() + } + + return "local-ollama-" + testcontainers.SessionID() +} + +// Host returns the 127.0.0.1 address for the local Ollama binary. +func (c *OllamaContainer) Host(ctx context.Context) (string, error) { + if !c.localCtx.useLocal { + return c.Container.Host(ctx) + } + + return localIP, nil +} + +// Inspect returns a ContainerJSON with the state of the local Ollama binary. +// The version is read from the local Ollama binary (ollama -v), and the port +// mapping is set to 11434. +func (c *OllamaContainer) Inspect(ctx context.Context) (*types.ContainerJSON, error) { + if !c.localCtx.useLocal { + return c.Container.Inspect(ctx) + } + + state, err := c.State(ctx) + if err != nil { + return nil, fmt.Errorf("get ollama state: %w", err) + } + + // read the version from the ollama binary + buf := &bytes.Buffer{} + command := prepareExec(ctx, "ollama", []string{"-v"}, c.localCtx.env, buf) + err = command.Run() + if err != nil { + return nil, fmt.Errorf("read ollama -v output: %w", err) + } + + bs, err := io.ReadAll(buf) + if err != nil { + return nil, fmt.Errorf("read ollama -v output: %w", err) + } + + return &types.ContainerJSON{ + ContainerJSONBase: &types.ContainerJSONBase{ + ID: c.GetContainerID(), + Name: "local-ollama-" + testcontainers.SessionID(), + State: state, + }, + Config: &container.Config{ + Image: string(bs), + ExposedPorts: nat.PortSet{ + "11434/tcp": struct{}{}, + }, + Hostname: "localhost", + Entrypoint: []string{"ollama", "serve"}, + }, + NetworkSettings: &types.NetworkSettings{ + Networks: map[string]*network.EndpointSettings{}, + NetworkSettingsBase: types.NetworkSettingsBase{ + Bridge: "bridge", + Ports: nat.PortMap{ + "11434/tcp": { + {HostIP: localIP, HostPort: "11434"}, + }, + }, + }, + DefaultNetworkSettings: types.DefaultNetworkSettings{ + IPAddress: localIP, + }, + }, + }, nil +} + +// IsRunning returns true if the local Ollama process is running. +func (c *OllamaContainer) IsRunning() bool { + if !c.localCtx.useLocal { + return c.Container.IsRunning() + } + + c.localCtx.mx.Lock() + defer c.localCtx.mx.Unlock() + + return c.localCtx.serveCmd != nil +} + +// Logs returns the logs from the local Ollama binary. +func (c *OllamaContainer) Logs(ctx context.Context) (io.ReadCloser, error) { + if !c.localCtx.useLocal { + return c.Container.Logs(ctx) + } + + c.localCtx.mx.Lock() + defer c.localCtx.mx.Unlock() + + // stream the log file + return os.Open(c.localCtx.logFile.Name()) +} + +// MappedPort returns the configured port for local Ollama binary. +func (c *OllamaContainer) MappedPort(ctx context.Context, port nat.Port) (nat.Port, error) { + if !c.localCtx.useLocal { + return c.Container.MappedPort(ctx, port) + } + + // Ollama typically uses port 11434 by default + return "11434/tcp", nil +} + +// Networks returns the networks for local Ollama binary, which is empty. +func (c *OllamaContainer) Networks(ctx context.Context) ([]string, error) { + if !c.localCtx.useLocal { + return c.Container.Networks(ctx) + } + + return []string{}, nil +} + +// NetworkAliases returns the network aliases for local Ollama binary, which is empty. +func (c *OllamaContainer) NetworkAliases(ctx context.Context) (map[string][]string, error) { + if !c.localCtx.useLocal { + return c.Container.NetworkAliases(ctx) + } + + return map[string][]string{}, nil +} + +// SessionID returns the session ID for local Ollama binary, which is the session ID +// of the test execution. +func (c *OllamaContainer) SessionID() string { + if !c.localCtx.useLocal { + return c.Container.SessionID() + } + + return testcontainers.SessionID() +} + +// Start starts the local Ollama process, not failing if it's already running. +func (c *OllamaContainer) Start(ctx context.Context) error { + if !c.localCtx.useLocal { + return c.Container.Start(ctx) + } + + c.localCtx.mx.Lock() + + if c.localCtx.serveCmd != nil { + c.localCtx.mx.Unlock() + return nil + } + + testcontainers.Logger.Printf("starting ollama") + + serveCmd, logFile, err := startOllama(context.Background(), c.localCtx) + if err != nil { + c.localCtx.mx.Unlock() + return fmt.Errorf("start ollama: %w", err) + } + c.localCtx.serveCmd = serveCmd + c.localCtx.logFile = logFile + c.localCtx.mx.Unlock() // unlock before waiting for the process to be ready + + // Wait until the Ollama process is ready, checking that the log file contains + // the "Listening on 127.0.0.1:11434" message + err = wait.ForLog("Listening on "+localIP+":11434").WaitUntilReady(context.Background(), c) + if err != nil { + return fmt.Errorf("wait for ollama to start: %w", err) + } + + testcontainers.Logger.Printf("ollama started") + + return nil +} + +// State returns the current state of the Ollama process, simulating a container state +// for local execution. +func (c *OllamaContainer) State(ctx context.Context) (*types.ContainerState, error) { + if !c.localCtx.useLocal { + return c.Container.State(ctx) + } + + c.localCtx.mx.Lock() + defer c.localCtx.mx.Unlock() + + if c.localCtx.serveCmd == nil { + return &types.ContainerState{Status: "stopped"}, nil + } + + // Check if process is still running. Signal(0) is a special case in Unix-like systems. + // When you send signal 0 to a process: + // - It performs all the normal error checking (permissions, process existence, etc.) + // - But it doesn't actually send any signal to the process + if err := c.localCtx.serveCmd.Process.Signal(syscall.Signal(0)); err != nil { + return &types.ContainerState{Status: "stopped"}, nil + } + + // Setting the Running field because it's required by the wait strategy + // to check if the given log message is present. + return &types.ContainerState{Status: "running", Running: true}, nil +} + +// Stop gracefully stops the local Ollama process +func (c *OllamaContainer) Stop(ctx context.Context, d *time.Duration) error { + if !c.localCtx.useLocal { + return c.Container.Stop(ctx, d) + } + + c.localCtx.mx.Lock() + defer c.localCtx.mx.Unlock() + + testcontainers.Logger.Printf("stopping ollama") + + if c.localCtx.serveCmd == nil { + return nil + } + + if err := c.localCtx.serveCmd.Process.Signal(syscall.Signal(syscall.SIGTERM)); err != nil { + return fmt.Errorf("signal ollama: %w", err) + } + + c.localCtx.serveCmd = nil + + testcontainers.Logger.Printf("ollama stopped") + + return nil +} + +// Terminate stops the local Ollama process, removing the log file. +func (c *OllamaContainer) Terminate(ctx context.Context) (err error) { + if !c.localCtx.useLocal { + return c.Container.Terminate(ctx) + } + + // First try to stop gracefully + err = c.Stop(ctx, &defaultStopTimeout) + if err != nil { + return fmt.Errorf("stop ollama: %w", err) + } + + defer func() { + c.localCtx.mx.Lock() + defer c.localCtx.mx.Unlock() + + if c.localCtx.logFile == nil { + return + } + + // remove the log file if it exists + if _, err := os.Stat(c.localCtx.logFile.Name()); err == nil { + err = c.localCtx.logFile.Close() + if err != nil { + return + } + + err = os.Remove(c.localCtx.logFile.Name()) + if err != nil { + return + } + } + }() + + return nil +} diff --git a/modules/ollama/local_test.go b/modules/ollama/local_test.go new file mode 100644 index 0000000000..c8d75ce6a5 --- /dev/null +++ b/modules/ollama/local_test.go @@ -0,0 +1,241 @@ +package ollama_test + +import ( + "context" + "io" + "os" + "os/exec" + "testing" + "time" + + "github.com/docker/docker/api/types/strslice" + "github.com/stretchr/testify/require" + + "github.com/testcontainers/testcontainers-go" + tcexec "github.com/testcontainers/testcontainers-go/exec" + "github.com/testcontainers/testcontainers-go/modules/ollama" +) + +func TestRun_local(t *testing.T) { + // check if the local ollama binary is available + if _, err := exec.LookPath("ollama"); err != nil { + t.Skip("local ollama binary not found, skipping") + } + + ctx := context.Background() + + ollamaContainer, err := ollama.Run( + ctx, + "ollama/ollama:0.1.25", + ollama.WithUseLocal("FOO=BAR"), + ) + testcontainers.CleanupContainer(t, ollamaContainer) + require.NoError(t, err) + + t.Run("connection-string", func(t *testing.T) { + connectionStr, err := ollamaContainer.ConnectionString(ctx) + require.NoError(t, err) + require.Equal(t, "http://127.0.0.1:11434", connectionStr) + }) + + t.Run("container-id", func(t *testing.T) { + id := ollamaContainer.GetContainerID() + require.Equal(t, "local-ollama-"+testcontainers.SessionID(), id) + }) + + t.Run("container-ips", func(t *testing.T) { + ip, err := ollamaContainer.ContainerIP(ctx) + require.NoError(t, err) + require.Equal(t, "127.0.0.1", ip) + + ips, err := ollamaContainer.ContainerIPs(ctx) + require.NoError(t, err) + require.Equal(t, []string{"127.0.0.1"}, ips) + }) + + t.Run("copy", func(t *testing.T) { + err := ollamaContainer.CopyToContainer(ctx, []byte("test"), "/tmp", 0o755) + require.NoError(t, err) + + err = ollamaContainer.CopyDirToContainer(ctx, ".", "/tmp", 0o755) + require.NoError(t, err) + + err = ollamaContainer.CopyFileToContainer(ctx, ".", "/tmp", 0o755) + require.NoError(t, err) + + reader, err := ollamaContainer.CopyFileFromContainer(ctx, "/tmp") + require.NoError(t, err) + require.Nil(t, reader) + }) + + t.Run("log-production-error-channel", func(t *testing.T) { + ch := ollamaContainer.GetLogProductionErrorChannel() + require.Nil(t, ch) + }) + + t.Run("endpoint", func(t *testing.T) { + endpoint, err := ollamaContainer.Endpoint(ctx, "88888/tcp") + require.NoError(t, err) + require.Equal(t, "127.0.0.1:11434", endpoint) + }) + + t.Run("exec/pull-and-run-model", func(t *testing.T) { + const model = "llama3.2:1b" + + code, r, err := ollamaContainer.Exec(ctx, []string{"ollama", "pull", model}) + require.NoError(t, err) + require.Equal(t, 0, code) + + bs, err := io.ReadAll(r) + require.NoError(t, err) + require.Empty(t, bs) + + code, _, err = ollamaContainer.Exec(ctx, []string{"ollama", "run", model}, tcexec.Multiplexed()) + require.NoError(t, err) + require.Equal(t, 0, code) + + logs, err := ollamaContainer.Logs(ctx) + require.NoError(t, err) + defer logs.Close() + + bs, err = io.ReadAll(logs) + require.NoError(t, err) + require.Contains(t, string(bs), "llama runner started") + }) + + t.Run("is-running", func(t *testing.T) { + require.True(t, ollamaContainer.IsRunning()) + + err = ollamaContainer.Stop(ctx, nil) + require.NoError(t, err) + + require.False(t, ollamaContainer.IsRunning()) + + // return it to the running state + err = ollamaContainer.Start(ctx) + require.NoError(t, err) + + require.True(t, ollamaContainer.IsRunning()) + }) + + t.Run("host", func(t *testing.T) { + host, err := ollamaContainer.Host(ctx) + require.NoError(t, err) + require.Equal(t, "127.0.0.1", host) + }) + + t.Run("inspect", func(t *testing.T) { + inspect, err := ollamaContainer.Inspect(ctx) + require.NoError(t, err) + + require.Equal(t, "local-ollama-"+testcontainers.SessionID(), inspect.ContainerJSONBase.ID) + require.Equal(t, "local-ollama-"+testcontainers.SessionID(), inspect.ContainerJSONBase.Name) + require.True(t, inspect.ContainerJSONBase.State.Running) + + require.Contains(t, string(inspect.Config.Image), "ollama version is") + _, exists := inspect.Config.ExposedPorts["11434/tcp"] + require.True(t, exists) + require.Equal(t, "localhost", inspect.Config.Hostname) + require.Equal(t, strslice.StrSlice(strslice.StrSlice{"ollama", "serve"}), inspect.Config.Entrypoint) + + require.Empty(t, inspect.NetworkSettings.Networks) + require.Equal(t, "bridge", inspect.NetworkSettings.NetworkSettingsBase.Bridge) + + ports := inspect.NetworkSettings.NetworkSettingsBase.Ports + _, exists = ports["11434/tcp"] + require.True(t, exists) + + require.Equal(t, "127.0.0.1", inspect.NetworkSettings.Ports["11434/tcp"][0].HostIP) + require.Equal(t, "11434", inspect.NetworkSettings.Ports["11434/tcp"][0].HostPort) + }) + + t.Run("logfile", func(t *testing.T) { + openFile, err := os.Open("local-ollama-" + testcontainers.SessionID() + ".log") + require.NoError(t, err) + require.NotNil(t, openFile) + require.NoError(t, openFile.Close()) + }) + + t.Run("logs", func(t *testing.T) { + logs, err := ollamaContainer.Logs(ctx) + require.NoError(t, err) + defer logs.Close() + + bs, err := io.ReadAll(logs) + require.NoError(t, err) + + require.Contains(t, string(bs), "Listening on 127.0.0.1:11434") + }) + + t.Run("mapped-port", func(t *testing.T) { + port, err := ollamaContainer.MappedPort(ctx, "11434/tcp") + require.NoError(t, err) + require.Equal(t, "11434", port.Port()) + require.Equal(t, "tcp", port.Proto()) + }) + + t.Run("networks", func(t *testing.T) { + networks, err := ollamaContainer.Networks(ctx) + require.NoError(t, err) + require.Empty(t, networks) + }) + + t.Run("network-aliases", func(t *testing.T) { + aliases, err := ollamaContainer.NetworkAliases(ctx) + require.NoError(t, err) + require.Empty(t, aliases) + }) + + t.Run("session-id", func(t *testing.T) { + id := ollamaContainer.SessionID() + require.Equal(t, testcontainers.SessionID(), id) + }) + + t.Run("stop-start", func(t *testing.T) { + d := time.Second * 5 + + err := ollamaContainer.Stop(ctx, &d) + require.NoError(t, err) + + state, err := ollamaContainer.State(ctx) + require.NoError(t, err) + require.Equal(t, "stopped", state.Status) + + err = ollamaContainer.Start(ctx) + require.NoError(t, err) + + state, err = ollamaContainer.State(ctx) + require.NoError(t, err) + require.Equal(t, "running", state.Status) + + logs, err := ollamaContainer.Logs(ctx) + require.NoError(t, err) + defer logs.Close() + + bs, err := io.ReadAll(logs) + require.NoError(t, err) + + require.Contains(t, string(bs), "Listening on 127.0.0.1:11434") + }) + + t.Run("start-start", func(t *testing.T) { + state, err := ollamaContainer.State(ctx) + require.NoError(t, err) + require.Equal(t, "running", state.Status) + + err = ollamaContainer.Start(ctx) + require.NoError(t, err) + }) + + t.Run("terminate", func(t *testing.T) { + err := ollamaContainer.Terminate(ctx) + require.NoError(t, err) + + _, err = os.Stat("ollama-" + testcontainers.SessionID() + ".log") + require.True(t, os.IsNotExist(err)) + + state, err := ollamaContainer.State(ctx) + require.NoError(t, err) + require.Equal(t, "stopped", state.Status) + }) +} diff --git a/modules/ollama/ollama.go b/modules/ollama/ollama.go index 203d80103f..2f8d7e396d 100644 --- a/modules/ollama/ollama.go +++ b/modules/ollama/ollama.go @@ -20,11 +20,16 @@ const DefaultOllamaImage = "ollama/ollama:0.1.25" // OllamaContainer represents the Ollama container type used in the module type OllamaContainer struct { testcontainers.Container + localCtx *localContext } // ConnectionString returns the connection string for the Ollama container, // using the default port 11434. func (c *OllamaContainer) ConnectionString(ctx context.Context) (string, error) { + if c.localCtx.useLocal { + return "http://127.0.0.1:11434", nil + } + host, err := c.Host(ctx) if err != nil { return "", err @@ -43,6 +48,10 @@ func (c *OllamaContainer) ConnectionString(ctx context.Context) (string, error) // of the container into a new image with the given name, so it doesn't override existing images. // It should be used for creating an image that contains a loaded model. func (c *OllamaContainer) Commit(ctx context.Context, targetImage string) error { + if c.localCtx.useLocal { + return nil + } + cli, err := testcontainers.NewDockerClientWithOpts(context.Background()) if err != nil { return err @@ -94,16 +103,29 @@ func Run(ctx context.Context, img string, opts ...testcontainers.ContainerCustom // always request a GPU if the host supports it opts = append(opts, withGpu()) + options := defaultOptions() for _, opt := range opts { if err := opt.Customize(&genericContainerReq); err != nil { return nil, fmt.Errorf("customize: %w", err) } + if _, ok := opt.(UseLocal); ok { + options.useLocal = true + } + } + + if options.useLocal { + container, err := runLocal(req.Env) + if err == nil { + return container, nil + } + + testcontainers.Logger.Printf("failed to run local ollama: %v, switching to docker", err) } container, err := testcontainers.GenericContainer(ctx, genericContainerReq) var c *OllamaContainer if container != nil { - c = &OllamaContainer{Container: container} + c = &OllamaContainer{Container: container, localCtx: &localContext{useLocal: false}} } if err != nil { diff --git a/modules/ollama/options.go b/modules/ollama/options.go index 605768a379..82191e66f3 100644 --- a/modules/ollama/options.go +++ b/modules/ollama/options.go @@ -2,12 +2,22 @@ package ollama import ( "context" + "fmt" + "strings" "github.com/docker/docker/api/types/container" "github.com/testcontainers/testcontainers-go" ) +type options struct { + useLocal bool +} + +func defaultOptions() options { + return options{} +} + var noopCustomizeRequestOption = func(req *testcontainers.GenericContainerRequest) error { return nil } // withGpu requests a GPU for the container, which could improve performance for some models. @@ -37,3 +47,34 @@ func withGpu() testcontainers.CustomizeRequestOption { } }) } + +var _ testcontainers.ContainerCustomizer = (*UseLocal)(nil) + +// UseLocal will use the local Ollama instance instead of pulling the Docker image. +type UseLocal struct { + env []string +} + +// WithUseLocal the module will use the local Ollama instance instead of pulling the Docker image. +// Pass the environment variables you need to set for the Ollama binary to be used, +// in the format of "KEY=VALUE". KeyValue pairs with the wrong format will cause an error. +func WithUseLocal(keyVal ...string) UseLocal { + return UseLocal{env: keyVal} +} + +// Customize implements the ContainerCustomizer interface, taking the key value pairs +// and setting them as environment variables for the Ollama binary. +// In the case of an invalid key value pair, an error is returned. +func (u UseLocal) Customize(req *testcontainers.GenericContainerRequest) error { + env := make(map[string]string) + for _, kv := range u.env { + parts := strings.SplitN(kv, "=", 2) + if len(parts) != 2 { + return fmt.Errorf("invalid environment variable: %s", kv) + } + + env[parts[0]] = parts[1] + } + + return testcontainers.WithEnv(env)(req) +} diff --git a/modules/ollama/options_test.go b/modules/ollama/options_test.go new file mode 100644 index 0000000000..67d33e5732 --- /dev/null +++ b/modules/ollama/options_test.go @@ -0,0 +1,41 @@ +package ollama_test + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/testcontainers/testcontainers-go" + "github.com/testcontainers/testcontainers-go/modules/ollama" +) + +func TestWithUseLocal(t *testing.T) { + req := testcontainers.GenericContainerRequest{} + + t.Run("keyVal/valid", func(t *testing.T) { + opt := ollama.WithUseLocal("OLLAMA_MODELS=/path/to/models") + err := opt.Customize(&req) + require.NoError(t, err) + require.Equal(t, "/path/to/models", req.Env["OLLAMA_MODELS"]) + }) + + t.Run("keyVal/invalid", func(t *testing.T) { + opt := ollama.WithUseLocal("OLLAMA_MODELS") + err := opt.Customize(&req) + require.Error(t, err) + }) + + t.Run("keyVal/valid/multiple", func(t *testing.T) { + opt := ollama.WithUseLocal("OLLAMA_MODELS=/path/to/models", "OLLAMA_HOST=localhost") + err := opt.Customize(&req) + require.NoError(t, err) + require.Equal(t, "/path/to/models", req.Env["OLLAMA_MODELS"]) + require.Equal(t, "localhost", req.Env["OLLAMA_HOST"]) + }) + + t.Run("keyVal/invalid/multiple", func(t *testing.T) { + opt := ollama.WithUseLocal("OLLAMA_MODELS=/path/to/models", "OLLAMA_HOST") + err := opt.Customize(&req) + require.Error(t, err) + }) +} From 9aa0f34542d8f19f82602309f90b3062b37a70bc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Manuel=20de=20la=20Pe=C3=B1a?= Date: Tue, 3 Dec 2024 15:58:01 +0100 Subject: [PATCH 02/35] fix: wrong working dir at CI --- .github/workflows/ci-test-go.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/ci-test-go.yml b/.github/workflows/ci-test-go.yml index b3c8a131ed..89ce56ee86 100644 --- a/.github/workflows/ci-test-go.yml +++ b/.github/workflows/ci-test-go.yml @@ -110,7 +110,6 @@ jobs: run: go build - name: Install dependencies - working-directory: ./${{ inputs.project-directory }} shell: bash run: | SCRIPT_PATH="./.github/scripts/${{ inputs.project-directory }}/install-dependencies.sh" From d6d5bfb6475f637c873afb2ee07524726e33b445 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Manuel=20de=20la=20Pe=C3=B1a?= Date: Tue, 3 Dec 2024 17:32:16 +0100 Subject: [PATCH 03/35] chore: extract wait to a function --- modules/ollama/local.go | 26 ++++++++++++++++++++------ 1 file changed, 20 insertions(+), 6 deletions(-) diff --git a/modules/ollama/local.go b/modules/ollama/local.go index cd14c03884..7300b4dd0f 100644 --- a/modules/ollama/local.go +++ b/modules/ollama/local.go @@ -59,9 +59,11 @@ func runLocal(env map[string]string) (*OllamaContainer, error) { c.localCtx.serveCmd = serveCmd c.localCtx.logFile = logFile c.localCtx.mx.Unlock() - // Wait until the Ollama process is ready, checking that the log file contains - // the "Listening on 127.0.0.1:11434" message - err = wait.ForLog("Listening on "+localIP+":11434").WaitUntilReady(context.Background(), c) + + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + err = waitForOllama(ctx, c) if err != nil { return nil, fmt.Errorf("wait for ollama to start: %w", err) } @@ -108,6 +110,17 @@ func startOllama(ctx context.Context, localCtx *localContext) (*exec.Cmd, *os.Fi return serveCmd, logFile, nil } +// Wait until the Ollama process is ready, checking that the log file contains +// the "Listening on 127.0.0.1:11434" message +func waitForOllama(ctx context.Context, c *OllamaContainer) error { + err := wait.ForLog("Listening on "+localIP+":11434").WaitUntilReady(ctx, c) + if err != nil { + return fmt.Errorf("wait for ollama to start: %w", err) + } + + return nil +} + // ContainerIP returns the IP address of the local Ollama binary. func (c *OllamaContainer) ContainerIP(ctx context.Context) (string, error) { if !c.localCtx.useLocal { @@ -376,9 +389,10 @@ func (c *OllamaContainer) Start(ctx context.Context) error { c.localCtx.logFile = logFile c.localCtx.mx.Unlock() // unlock before waiting for the process to be ready - // Wait until the Ollama process is ready, checking that the log file contains - // the "Listening on 127.0.0.1:11434" message - err = wait.ForLog("Listening on "+localIP+":11434").WaitUntilReady(context.Background(), c) + waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second) + defer cancel() + + err = waitForOllama(waitCtx, c) if err != nil { return fmt.Errorf("wait for ollama to start: %w", err) } From 7b3be545a7129dc5b2b40f738fdf7a757b3aa3a4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Manuel=20de=20la=20Pe=C3=B1a?= Date: Tue, 3 Dec 2024 17:35:23 +0100 Subject: [PATCH 04/35] chore: print local binary logs on error --- modules/ollama/local.go | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/modules/ollama/local.go b/modules/ollama/local.go index 7300b4dd0f..6de9ea0f8e 100644 --- a/modules/ollama/local.go +++ b/modules/ollama/local.go @@ -115,6 +115,18 @@ func startOllama(ctx context.Context, localCtx *localContext) (*exec.Cmd, *os.Fi func waitForOllama(ctx context.Context, c *OllamaContainer) error { err := wait.ForLog("Listening on "+localIP+":11434").WaitUntilReady(ctx, c) if err != nil { + logs, err := c.Logs(ctx) + if err != nil { + return fmt.Errorf("wait for ollama to start: %w", err) + } + + bs, err := io.ReadAll(logs) + if err != nil { + return fmt.Errorf("read ollama logs: %w", err) + } + + testcontainers.Logger.Printf("ollama logs:\n%s", string(bs)) + return fmt.Errorf("wait for ollama to start: %w", err) } From 4761fa1485eaa2b556aea107011773c30f3585ce Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Manuel=20de=20la=20Pe=C3=B1a?= Date: Tue, 3 Dec 2024 18:11:57 +0100 Subject: [PATCH 05/35] chore: remove debug logs --- modules/ollama/local.go | 8 -------- 1 file changed, 8 deletions(-) diff --git a/modules/ollama/local.go b/modules/ollama/local.go index 6de9ea0f8e..87a10da36a 100644 --- a/modules/ollama/local.go +++ b/modules/ollama/local.go @@ -390,8 +390,6 @@ func (c *OllamaContainer) Start(ctx context.Context) error { return nil } - testcontainers.Logger.Printf("starting ollama") - serveCmd, logFile, err := startOllama(context.Background(), c.localCtx) if err != nil { c.localCtx.mx.Unlock() @@ -409,8 +407,6 @@ func (c *OllamaContainer) Start(ctx context.Context) error { return fmt.Errorf("wait for ollama to start: %w", err) } - testcontainers.Logger.Printf("ollama started") - return nil } @@ -450,8 +446,6 @@ func (c *OllamaContainer) Stop(ctx context.Context, d *time.Duration) error { c.localCtx.mx.Lock() defer c.localCtx.mx.Unlock() - testcontainers.Logger.Printf("stopping ollama") - if c.localCtx.serveCmd == nil { return nil } @@ -462,8 +456,6 @@ func (c *OllamaContainer) Stop(ctx context.Context, d *time.Duration) error { c.localCtx.serveCmd = nil - testcontainers.Logger.Printf("ollama stopped") - return nil } From 15d829b372ef0191bc521a98c6c59aa87d979b61 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Manuel=20de=20la=20Pe=C3=B1a?= Date: Tue, 3 Dec 2024 18:16:36 +0100 Subject: [PATCH 06/35] fix(ci): kill ollama before the tests --- .github/scripts/modules/ollama/install-dependencies.sh | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/scripts/modules/ollama/install-dependencies.sh b/.github/scripts/modules/ollama/install-dependencies.sh index f041595b4b..425829c018 100755 --- a/.github/scripts/modules/ollama/install-dependencies.sh +++ b/.github/scripts/modules/ollama/install-dependencies.sh @@ -1,3 +1,6 @@ #!/usr/bin/env bash curl -fsSL https://ollama.com/install.sh | sh + +# kill any running ollama process so that the tests can start from +pkill ollama From 15784afe7df19e5a3d9ed07b686050a14b206585 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Manuel=20de=20la=20Pe=C3=B1a?= Date: Tue, 3 Dec 2024 22:28:33 +0100 Subject: [PATCH 07/35] chore: stop ollama using systemctl --- .github/scripts/modules/ollama/install-dependencies.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/scripts/modules/ollama/install-dependencies.sh b/.github/scripts/modules/ollama/install-dependencies.sh index 425829c018..d699158806 100755 --- a/.github/scripts/modules/ollama/install-dependencies.sh +++ b/.github/scripts/modules/ollama/install-dependencies.sh @@ -2,5 +2,5 @@ curl -fsSL https://ollama.com/install.sh | sh -# kill any running ollama process so that the tests can start from -pkill ollama +# kill any running ollama process so that the tests can start from a clean state +sudo systemctl stop ollama.service From df122378af848cd24bbc2b8269e530fbcb19a0b5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Manuel=20de=20la=20Pe=C3=B1a?= Date: Wed, 4 Dec 2024 12:59:17 +0100 Subject: [PATCH 08/35] chore: support setting log file from the env --- docs/modules/ollama.md | 25 ++++++++++++++++--------- modules/ollama/local.go | 5 +++++ modules/ollama/local_test.go | 20 ++++++++++++++++++++ 3 files changed, 41 insertions(+), 9 deletions(-) diff --git a/docs/modules/ollama.md b/docs/modules/ollama.md index ec2b61f789..53c045d95c 100644 --- a/docs/modules/ollama.md +++ b/docs/modules/ollama.md @@ -57,6 +57,11 @@ E.g. `Run(context.Background(), "ollama/ollama:0.1.25")`. - Not available until the next release of testcontainers-go :material-tag: main +!!!warning + Please make sure the local Ollama binary is not running when using the local version of the module: + Ollama can be started as a system service, or as part of the Ollama application, + and interacting with the logs of a running Ollama process not managed by the module is not supported. + If you need to run the local Ollama binary, you can set the `UseLocal` option in the `Run` function. This option accepts a list of environment variables as a string, that will be applied to the Ollama binary when executing commands. @@ -65,20 +70,22 @@ E.g. `Run(context.Background(), "ollama/ollama:0.1.25", WithUseLocal("OLLAMA_DEB All the container methods are available when using the local Ollama binary, but will be executed locally instead of inside the container. Please consider the following differences when using the local Ollama binary: -- The local Ollama binary will create a log file in the current working directory, identified by the session ID. E.g. `local-ollama-.log`. -- `ConnectionString` returns the connection string to connect to the local Ollama binary instead of the container, which maps to `127.0.0.1:11434`. +- The local Ollama binary will create a log file in the current working directory, identified by the session ID. E.g. `local-ollama-.log`. It's possible to set the log file name using the `OLLAMA_LOGFILE` environment variable. So if you're running Ollama yourself, from the Ollama app, or the standalone binary, you could use this environment variable to set the same log file name. + - For the Ollama app, the default log file resides in the `$HOME/.ollama/logs/server.log`. + - For the standalone binary, you should start it redirecting the logs to a file. E.g. `ollama serve > /tmp/ollama.log 2>&1`. +- `ConnectionString` returns the connection string to connect to the local Ollama binary started by the module instead of the container, which maps to `127.0.0.1:11434`. - `ContainerIP` returns `127.0.0.1`. - `ContainerIPs` returns `["127.0.0.1"]`. - `CopyToContainer`, `CopyDirToContainer`, `CopyFileToContainer` and `CopyFileFromContainer` don't perform any action. - `GetLogProductionErrorChannel` returns a nil channel. -- `Endpoint` returns the endpoint to connect to the local Ollama binary instead of the container, which maps to `127.0.0.1:11434`. -- `Exec` passes the command to the local Ollama binary instead of inside the container. First argument is the command to execute, and the second argument is the list of arguments. -- `GetContainerID` returns the container ID of the local Ollama binary instead of the container, which maps to `local-ollama-`. +- `Endpoint` returns the endpoint to connect to the local Ollama binary started by the module instead of the container, which maps to `127.0.0.1:11434`. +- `Exec` passes the command to the local Ollama binary started by the module instead of inside the container. First argument is the command to execute, and the second argument is the list of arguments. +- `GetContainerID` returns the container ID of the local Ollama binary started by the module instead of the container, which maps to `local-ollama-`. - `Host` returns `127.0.0.1`. -- `Inspect` returns a ContainerJSON with the state of the local Ollama binary. -- `IsRunning` returns true if the local Ollama binary process is running. -- `Logs` returns the logs from the local Ollama binary instead of the container. -- `MappedPort` returns the port mapping for the local Ollama binary instead of the container. +- `Inspect` returns a ContainerJSON with the state of the local Ollama binary started by the module. +- `IsRunning` returns true if the local Ollama binary process started by the module is running. +- `Logs` returns the logs from the local Ollama binary started by the module instead of the container. +- `MappedPort` returns the port mapping for the local Ollama binary started by the module instead of the container. - `Start` starts the local Ollama binary process. - `State` returns the current state of the local Ollama binary process, `stopped` or `running`. - `Stop` stops the local Ollama binary process. diff --git a/modules/ollama/local.go b/modules/ollama/local.go index 87a10da36a..e69cdf8cf9 100644 --- a/modules/ollama/local.go +++ b/modules/ollama/local.go @@ -74,6 +74,11 @@ func runLocal(env map[string]string) (*OllamaContainer, error) { // logFile returns an existing log file or creates a new one if it doesn't exist. func logFile() (*os.File, error) { logName := "local-ollama-" + testcontainers.SessionID() + ".log" + + if envLogName := os.Getenv("OLLAMA_LOGFILE"); envLogName != "" { + logName = envLogName + } + if _, err := os.Stat(logName); err == nil { return os.Open(logName) } diff --git a/modules/ollama/local_test.go b/modules/ollama/local_test.go index c8d75ce6a5..e987274442 100644 --- a/modules/ollama/local_test.go +++ b/modules/ollama/local_test.go @@ -5,6 +5,7 @@ import ( "io" "os" "os/exec" + "path/filepath" "testing" "time" @@ -239,3 +240,22 @@ func TestRun_local(t *testing.T) { require.Equal(t, "stopped", state.Status) }) } + +func TestRun_localWithCustomLogFile(t *testing.T) { + t.Setenv("OLLAMA_LOGFILE", filepath.Join(t.TempDir(), "server.log")) + + ctx := context.Background() + + ollamaContainer, err := ollama.Run(ctx, "ollama/ollama:0.1.25", ollama.WithUseLocal("FOO=BAR")) + require.NoError(t, err) + testcontainers.CleanupContainer(t, ollamaContainer) + + logs, err := ollamaContainer.Logs(ctx) + require.NoError(t, err) + defer logs.Close() + + bs, err := io.ReadAll(logs) + require.NoError(t, err) + + require.Contains(t, string(bs), "Listening on 127.0.0.1:11434") +} From 822858318f4a11a24399ca6df58c03382bec9d63 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Manuel=20de=20la=20Pe=C3=B1a?= Date: Wed, 4 Dec 2024 13:26:55 +0100 Subject: [PATCH 09/35] chore: support running ollama commands, only --- docs/modules/ollama.md | 2 +- modules/ollama/local.go | 10 ++++++++++ modules/ollama/local_test.go | 20 ++++++++++++++++++++ 3 files changed, 31 insertions(+), 1 deletion(-) diff --git a/docs/modules/ollama.md b/docs/modules/ollama.md index 53c045d95c..8c7738f8b1 100644 --- a/docs/modules/ollama.md +++ b/docs/modules/ollama.md @@ -79,7 +79,7 @@ Please consider the following differences when using the local Ollama binary: - `CopyToContainer`, `CopyDirToContainer`, `CopyFileToContainer` and `CopyFileFromContainer` don't perform any action. - `GetLogProductionErrorChannel` returns a nil channel. - `Endpoint` returns the endpoint to connect to the local Ollama binary started by the module instead of the container, which maps to `127.0.0.1:11434`. -- `Exec` passes the command to the local Ollama binary started by the module instead of inside the container. First argument is the command to execute, and the second argument is the list of arguments. +- `Exec` passes the command to the local Ollama binary started by the module instead of inside the container. First argument is the command to execute, and the second argument is the list of arguments, else, an error is returned. - `GetContainerID` returns the container ID of the local Ollama binary started by the module instead of the container, which maps to `local-ollama-`. - `Host` returns `127.0.0.1`. - `Inspect` returns a ContainerJSON with the state of the local Ollama binary started by the module. diff --git a/modules/ollama/local.go b/modules/ollama/local.go index e69cdf8cf9..e3e685f3bf 100644 --- a/modules/ollama/local.go +++ b/modules/ollama/local.go @@ -3,10 +3,12 @@ package ollama import ( "bytes" "context" + "errors" "fmt" "io" "os" "os/exec" + "strings" "sync" "syscall" "time" @@ -219,6 +221,14 @@ func (c *OllamaContainer) Exec(ctx context.Context, cmd []string, options ...tce c.localCtx.mx.Lock() defer c.localCtx.mx.Unlock() + if len(cmd) == 0 { + err := errors.New("exec: no command provided") + return 1, strings.NewReader(err.Error()), err + } else if cmd[0] != "ollama" { + err := fmt.Errorf("%s: %w", cmd[0], errors.ErrUnsupported) + return 1, strings.NewReader(err.Error()), err + } + args := []string{} if len(cmd) > 1 { args = cmd[1:] // prevent when there is only one command diff --git a/modules/ollama/local_test.go b/modules/ollama/local_test.go index e987274442..6e85814e9e 100644 --- a/modules/ollama/local_test.go +++ b/modules/ollama/local_test.go @@ -2,6 +2,7 @@ package ollama_test import ( "context" + "errors" "io" "os" "os/exec" @@ -104,6 +105,25 @@ func TestRun_local(t *testing.T) { require.Contains(t, string(bs), "llama runner started") }) + t.Run("exec/unsupported-command", func(t *testing.T) { + code, r, err := ollamaContainer.Exec(ctx, []string{"cat", "/etc/passwd"}) + require.Equal(t, 1, code) + require.Error(t, err) + require.ErrorIs(t, err, errors.ErrUnsupported) + + bs, err := io.ReadAll(r) + require.NoError(t, err) + require.Equal(t, "cat: unsupported operation", string(bs)) + + code, r, err = ollamaContainer.Exec(ctx, []string{}) + require.Equal(t, 1, code) + require.Error(t, err) + + bs, err = io.ReadAll(r) + require.NoError(t, err) + require.Equal(t, "exec: no command provided", string(bs)) + }) + t.Run("is-running", func(t *testing.T) { require.True(t, ollamaContainer.IsRunning()) From 00936c374736c2f506fb9f244f9f6ebd1d0088b0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Manuel=20de=20la=20Pe=C3=B1a?= Date: Fri, 13 Dec 2024 07:25:14 +0100 Subject: [PATCH 10/35] fix: release lock on error --- modules/ollama/local.go | 1 + 1 file changed, 1 insertion(+) diff --git a/modules/ollama/local.go b/modules/ollama/local.go index e3e685f3bf..af52ce76f8 100644 --- a/modules/ollama/local.go +++ b/modules/ollama/local.go @@ -55,6 +55,7 @@ func runLocal(env map[string]string) (*OllamaContainer, error) { serveCmd, logFile, err := startOllama(context.Background(), c.localCtx) if err != nil { + c.localCtx.mx.Unlock() return nil, fmt.Errorf("start ollama: %w", err) } From ee30a0295ff9ba49a46fb64aed940aab1eb9e4fd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Manuel=20de=20la=20Pe=C3=B1a?= Date: Fri, 13 Dec 2024 07:36:15 +0100 Subject: [PATCH 11/35] chore: add more test coverage for the option --- modules/ollama/options_test.go | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/modules/ollama/options_test.go b/modules/ollama/options_test.go index 67d33e5732..f842d15a17 100644 --- a/modules/ollama/options_test.go +++ b/modules/ollama/options_test.go @@ -33,6 +33,14 @@ func TestWithUseLocal(t *testing.T) { require.Equal(t, "localhost", req.Env["OLLAMA_HOST"]) }) + t.Run("keyVal/valid/multiple-equals", func(t *testing.T) { + opt := ollama.WithUseLocal("OLLAMA_MODELS=/path/to/models", "OLLAMA_HOST=localhost=127.0.0.1") + err := opt.Customize(&req) + require.NoError(t, err) + require.Equal(t, "/path/to/models", req.Env["OLLAMA_MODELS"]) + require.Equal(t, "localhost=127.0.0.1", req.Env["OLLAMA_HOST"]) + }) + t.Run("keyVal/invalid/multiple", func(t *testing.T) { opt := ollama.WithUseLocal("OLLAMA_MODELS=/path/to/models", "OLLAMA_HOST") err := opt.Customize(&req) From 5b0e8c2ebca2cf5e3cd54129a1c2d9ca61d7e4e8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Manuel=20de=20la=20Pe=C3=B1a?= Date: Fri, 13 Dec 2024 07:36:53 +0100 Subject: [PATCH 12/35] chore: simplify useLocal checks --- modules/ollama/local.go | 50 +++++++++++++++++++--------------------- modules/ollama/ollama.go | 6 ++--- 2 files changed, 27 insertions(+), 29 deletions(-) diff --git a/modules/ollama/local.go b/modules/ollama/local.go index af52ce76f8..e7ae9d6158 100644 --- a/modules/ollama/local.go +++ b/modules/ollama/local.go @@ -29,7 +29,6 @@ var defaultStopTimeout = time.Second * 5 // localContext is a type holding the context for local Ollama executions. type localContext struct { - useLocal bool env []string serveCmd *exec.Cmd logFile *os.File @@ -46,8 +45,7 @@ func runLocal(env map[string]string) (*OllamaContainer, error) { c := &OllamaContainer{ localCtx: &localContext{ - useLocal: true, - env: cmdEnv, + env: cmdEnv, }, } @@ -143,7 +141,7 @@ func waitForOllama(ctx context.Context, c *OllamaContainer) error { // ContainerIP returns the IP address of the local Ollama binary. func (c *OllamaContainer) ContainerIP(ctx context.Context) (string, error) { - if !c.localCtx.useLocal { + if c.localCtx == nil { return c.Container.ContainerIP(ctx) } @@ -152,7 +150,7 @@ func (c *OllamaContainer) ContainerIP(ctx context.Context) (string, error) { // ContainerIPs returns a slice with the IP address of the local Ollama binary. func (c *OllamaContainer) ContainerIPs(ctx context.Context) ([]string, error) { - if !c.localCtx.useLocal { + if c.localCtx == nil { return c.Container.ContainerIPs(ctx) } @@ -161,7 +159,7 @@ func (c *OllamaContainer) ContainerIPs(ctx context.Context) ([]string, error) { // CopyToContainer is a no-op for the local Ollama binary. func (c *OllamaContainer) CopyToContainer(ctx context.Context, fileContent []byte, containerFilePath string, fileMode int64) error { - if !c.localCtx.useLocal { + if c.localCtx == nil { return c.Container.CopyToContainer(ctx, fileContent, containerFilePath, fileMode) } @@ -170,7 +168,7 @@ func (c *OllamaContainer) CopyToContainer(ctx context.Context, fileContent []byt // CopyDirToContainer is a no-op for the local Ollama binary. func (c *OllamaContainer) CopyDirToContainer(ctx context.Context, hostDirPath string, containerParentPath string, fileMode int64) error { - if !c.localCtx.useLocal { + if c.localCtx == nil { return c.Container.CopyDirToContainer(ctx, hostDirPath, containerParentPath, fileMode) } @@ -179,7 +177,7 @@ func (c *OllamaContainer) CopyDirToContainer(ctx context.Context, hostDirPath st // CopyFileToContainer is a no-op for the local Ollama binary. func (c *OllamaContainer) CopyFileToContainer(ctx context.Context, hostFilePath string, containerFilePath string, fileMode int64) error { - if !c.localCtx.useLocal { + if c.localCtx == nil { return c.Container.CopyFileToContainer(ctx, hostFilePath, containerFilePath, fileMode) } @@ -188,7 +186,7 @@ func (c *OllamaContainer) CopyFileToContainer(ctx context.Context, hostFilePath // CopyFileFromContainer is a no-op for the local Ollama binary. func (c *OllamaContainer) CopyFileFromContainer(ctx context.Context, filePath string) (io.ReadCloser, error) { - if !c.localCtx.useLocal { + if c.localCtx == nil { return c.Container.CopyFileFromContainer(ctx, filePath) } @@ -197,7 +195,7 @@ func (c *OllamaContainer) CopyFileFromContainer(ctx context.Context, filePath st // GetLogProductionErrorChannel returns a nil channel. func (c *OllamaContainer) GetLogProductionErrorChannel() <-chan error { - if !c.localCtx.useLocal { + if c.localCtx == nil { return c.Container.GetLogProductionErrorChannel() } @@ -206,7 +204,7 @@ func (c *OllamaContainer) GetLogProductionErrorChannel() <-chan error { // Endpoint returns the 127.0.0.1:11434 endpoint for the local Ollama binary. func (c *OllamaContainer) Endpoint(ctx context.Context, port string) (string, error) { - if !c.localCtx.useLocal { + if c.localCtx == nil { return c.Container.Endpoint(ctx, port) } @@ -215,7 +213,7 @@ func (c *OllamaContainer) Endpoint(ctx context.Context, port string) (string, er // Exec executes a command using the local Ollama binary. func (c *OllamaContainer) Exec(ctx context.Context, cmd []string, options ...tcexec.ProcessOption) (int, io.Reader, error) { - if !c.localCtx.useLocal { + if c.localCtx == nil { return c.Container.Exec(ctx, cmd, options...) } @@ -257,7 +255,7 @@ func prepareExec(ctx context.Context, bin string, args []string, env []string, o // GetContainerID returns a placeholder ID for local execution func (c *OllamaContainer) GetContainerID() string { - if !c.localCtx.useLocal { + if c.localCtx == nil { return c.Container.GetContainerID() } @@ -266,7 +264,7 @@ func (c *OllamaContainer) GetContainerID() string { // Host returns the 127.0.0.1 address for the local Ollama binary. func (c *OllamaContainer) Host(ctx context.Context) (string, error) { - if !c.localCtx.useLocal { + if c.localCtx == nil { return c.Container.Host(ctx) } @@ -277,7 +275,7 @@ func (c *OllamaContainer) Host(ctx context.Context) (string, error) { // The version is read from the local Ollama binary (ollama -v), and the port // mapping is set to 11434. func (c *OllamaContainer) Inspect(ctx context.Context) (*types.ContainerJSON, error) { - if !c.localCtx.useLocal { + if c.localCtx == nil { return c.Container.Inspect(ctx) } @@ -332,7 +330,7 @@ func (c *OllamaContainer) Inspect(ctx context.Context) (*types.ContainerJSON, er // IsRunning returns true if the local Ollama process is running. func (c *OllamaContainer) IsRunning() bool { - if !c.localCtx.useLocal { + if c.localCtx == nil { return c.Container.IsRunning() } @@ -344,7 +342,7 @@ func (c *OllamaContainer) IsRunning() bool { // Logs returns the logs from the local Ollama binary. func (c *OllamaContainer) Logs(ctx context.Context) (io.ReadCloser, error) { - if !c.localCtx.useLocal { + if c.localCtx == nil { return c.Container.Logs(ctx) } @@ -357,7 +355,7 @@ func (c *OllamaContainer) Logs(ctx context.Context) (io.ReadCloser, error) { // MappedPort returns the configured port for local Ollama binary. func (c *OllamaContainer) MappedPort(ctx context.Context, port nat.Port) (nat.Port, error) { - if !c.localCtx.useLocal { + if c.localCtx == nil { return c.Container.MappedPort(ctx, port) } @@ -367,7 +365,7 @@ func (c *OllamaContainer) MappedPort(ctx context.Context, port nat.Port) (nat.Po // Networks returns the networks for local Ollama binary, which is empty. func (c *OllamaContainer) Networks(ctx context.Context) ([]string, error) { - if !c.localCtx.useLocal { + if c.localCtx == nil { return c.Container.Networks(ctx) } @@ -376,7 +374,7 @@ func (c *OllamaContainer) Networks(ctx context.Context) ([]string, error) { // NetworkAliases returns the network aliases for local Ollama binary, which is empty. func (c *OllamaContainer) NetworkAliases(ctx context.Context) (map[string][]string, error) { - if !c.localCtx.useLocal { + if c.localCtx == nil { return c.Container.NetworkAliases(ctx) } @@ -386,7 +384,7 @@ func (c *OllamaContainer) NetworkAliases(ctx context.Context) (map[string][]stri // SessionID returns the session ID for local Ollama binary, which is the session ID // of the test execution. func (c *OllamaContainer) SessionID() string { - if !c.localCtx.useLocal { + if c.localCtx == nil { return c.Container.SessionID() } @@ -395,7 +393,7 @@ func (c *OllamaContainer) SessionID() string { // Start starts the local Ollama process, not failing if it's already running. func (c *OllamaContainer) Start(ctx context.Context) error { - if !c.localCtx.useLocal { + if c.localCtx == nil { return c.Container.Start(ctx) } @@ -406,7 +404,7 @@ func (c *OllamaContainer) Start(ctx context.Context) error { return nil } - serveCmd, logFile, err := startOllama(context.Background(), c.localCtx) + serveCmd, logFile, err := startOllama(ctx, c.localCtx) if err != nil { c.localCtx.mx.Unlock() return fmt.Errorf("start ollama: %w", err) @@ -429,7 +427,7 @@ func (c *OllamaContainer) Start(ctx context.Context) error { // State returns the current state of the Ollama process, simulating a container state // for local execution. func (c *OllamaContainer) State(ctx context.Context) (*types.ContainerState, error) { - if !c.localCtx.useLocal { + if c.localCtx == nil { return c.Container.State(ctx) } @@ -455,7 +453,7 @@ func (c *OllamaContainer) State(ctx context.Context) (*types.ContainerState, err // Stop gracefully stops the local Ollama process func (c *OllamaContainer) Stop(ctx context.Context, d *time.Duration) error { - if !c.localCtx.useLocal { + if c.localCtx == nil { return c.Container.Stop(ctx, d) } @@ -477,7 +475,7 @@ func (c *OllamaContainer) Stop(ctx context.Context, d *time.Duration) error { // Terminate stops the local Ollama process, removing the log file. func (c *OllamaContainer) Terminate(ctx context.Context) (err error) { - if !c.localCtx.useLocal { + if c.localCtx == nil { return c.Container.Terminate(ctx) } diff --git a/modules/ollama/ollama.go b/modules/ollama/ollama.go index 2f8d7e396d..f71323ca04 100644 --- a/modules/ollama/ollama.go +++ b/modules/ollama/ollama.go @@ -26,7 +26,7 @@ type OllamaContainer struct { // ConnectionString returns the connection string for the Ollama container, // using the default port 11434. func (c *OllamaContainer) ConnectionString(ctx context.Context) (string, error) { - if c.localCtx.useLocal { + if c.localCtx != nil { return "http://127.0.0.1:11434", nil } @@ -48,7 +48,7 @@ func (c *OllamaContainer) ConnectionString(ctx context.Context) (string, error) // of the container into a new image with the given name, so it doesn't override existing images. // It should be used for creating an image that contains a loaded model. func (c *OllamaContainer) Commit(ctx context.Context, targetImage string) error { - if c.localCtx.useLocal { + if c.localCtx != nil { return nil } @@ -125,7 +125,7 @@ func Run(ctx context.Context, img string, opts ...testcontainers.ContainerCustom container, err := testcontainers.GenericContainer(ctx, genericContainerReq) var c *OllamaContainer if container != nil { - c = &OllamaContainer{Container: container, localCtx: &localContext{useLocal: false}} + c = &OllamaContainer{Container: container} } if err != nil { From e16fc003c5047cc0ff82e432eb262644db4a33fb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Manuel=20de=20la=20Pe=C3=B1a?= Date: Fri, 13 Dec 2024 07:38:59 +0100 Subject: [PATCH 13/35] chore: simpolify --- modules/ollama/local.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/ollama/local.go b/modules/ollama/local.go index e7ae9d6158..51b63a7afc 100644 --- a/modules/ollama/local.go +++ b/modules/ollama/local.go @@ -38,9 +38,9 @@ type localContext struct { // runLocal calls the local Ollama binary instead of using a Docker container. func runLocal(env map[string]string) (*OllamaContainer, error) { // Apply the environment variables to the command. - cmdEnv := []string{} + cmdEnv := make([]string, 0, len(env)*2) for k, v := range env { - cmdEnv = append(cmdEnv, fmt.Sprintf("%s=%s", k, v)) + cmdEnv = append(cmdEnv, k+"="+v) } c := &OllamaContainer{ From 0fc2a21528bf4dade1f8641d79c0725d994acb80 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Manuel=20de=20la=20Pe=C3=B1a?= Date: Fri, 13 Dec 2024 07:40:33 +0100 Subject: [PATCH 14/35] chore: pass context to runLocal --- modules/ollama/local.go | 6 +++--- modules/ollama/ollama.go | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/modules/ollama/local.go b/modules/ollama/local.go index 51b63a7afc..49bd176e51 100644 --- a/modules/ollama/local.go +++ b/modules/ollama/local.go @@ -36,7 +36,7 @@ type localContext struct { } // runLocal calls the local Ollama binary instead of using a Docker container. -func runLocal(env map[string]string) (*OllamaContainer, error) { +func runLocal(ctx context.Context, env map[string]string) (*OllamaContainer, error) { // Apply the environment variables to the command. cmdEnv := make([]string, 0, len(env)*2) for k, v := range env { @@ -51,7 +51,7 @@ func runLocal(env map[string]string) (*OllamaContainer, error) { c.localCtx.mx.Lock() - serveCmd, logFile, err := startOllama(context.Background(), c.localCtx) + serveCmd, logFile, err := startOllama(ctx, c.localCtx) if err != nil { c.localCtx.mx.Unlock() return nil, fmt.Errorf("start ollama: %w", err) @@ -61,7 +61,7 @@ func runLocal(env map[string]string) (*OllamaContainer, error) { c.localCtx.logFile = logFile c.localCtx.mx.Unlock() - ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + ctx, cancel := context.WithTimeout(ctx, 10*time.Second) defer cancel() err = waitForOllama(ctx, c) diff --git a/modules/ollama/ollama.go b/modules/ollama/ollama.go index f71323ca04..56acb95226 100644 --- a/modules/ollama/ollama.go +++ b/modules/ollama/ollama.go @@ -114,7 +114,7 @@ func Run(ctx context.Context, img string, opts ...testcontainers.ContainerCustom } if options.useLocal { - container, err := runLocal(req.Env) + container, err := runLocal(ctx, req.Env) if err == nil { return container, nil } From 58a46b4c5f5b085e34b8d283e49f818ce39a4232 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Manuel=20de=20la=20Pe=C3=B1a?= Date: Fri, 13 Dec 2024 07:41:49 +0100 Subject: [PATCH 15/35] chore: move ctx to the right scope --- modules/ollama/local.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/ollama/local.go b/modules/ollama/local.go index 49bd176e51..8872ab0552 100644 --- a/modules/ollama/local.go +++ b/modules/ollama/local.go @@ -61,9 +61,6 @@ func runLocal(ctx context.Context, env map[string]string) (*OllamaContainer, err c.localCtx.logFile = logFile c.localCtx.mx.Unlock() - ctx, cancel := context.WithTimeout(ctx, 10*time.Second) - defer cancel() - err = waitForOllama(ctx, c) if err != nil { return nil, fmt.Errorf("wait for ollama to start: %w", err) @@ -119,6 +116,9 @@ func startOllama(ctx context.Context, localCtx *localContext) (*exec.Cmd, *os.Fi // Wait until the Ollama process is ready, checking that the log file contains // the "Listening on 127.0.0.1:11434" message func waitForOllama(ctx context.Context, c *OllamaContainer) error { + ctx, cancel := context.WithTimeout(ctx, 10*time.Second) + defer cancel() + err := wait.ForLog("Listening on "+localIP+":11434").WaitUntilReady(ctx, c) if err != nil { logs, err := c.Logs(ctx) From 01c560d027039372c4cff387c9c131697b3a70df Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Manuel=20de=20la=20Pe=C3=B1a?= Date: Fri, 13 Dec 2024 08:21:06 +0100 Subject: [PATCH 16/35] chore: remove not needed --- modules/ollama/local.go | 4 ---- 1 file changed, 4 deletions(-) diff --git a/modules/ollama/local.go b/modules/ollama/local.go index 8872ab0552..d2ac54dcf8 100644 --- a/modules/ollama/local.go +++ b/modules/ollama/local.go @@ -77,10 +77,6 @@ func logFile() (*os.File, error) { logName = envLogName } - if _, err := os.Stat(logName); err == nil { - return os.Open(logName) - } - file, err := os.Create(logName) if err != nil { return nil, fmt.Errorf("create ollama log file: %w", err) From 7621298d5f4a171702eabefa1d4b4004538a1c58 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Manuel=20de=20la=20Pe=C3=B1a?= Date: Fri, 13 Dec 2024 08:22:14 +0100 Subject: [PATCH 17/35] chore: use a container function --- modules/ollama/local.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/modules/ollama/local.go b/modules/ollama/local.go index d2ac54dcf8..a4c3a5f74c 100644 --- a/modules/ollama/local.go +++ b/modules/ollama/local.go @@ -61,7 +61,7 @@ func runLocal(ctx context.Context, env map[string]string) (*OllamaContainer, err c.localCtx.logFile = logFile c.localCtx.mx.Unlock() - err = waitForOllama(ctx, c) + err = c.waitForOllama(ctx) if err != nil { return nil, fmt.Errorf("wait for ollama to start: %w", err) } @@ -109,9 +109,9 @@ func startOllama(ctx context.Context, localCtx *localContext) (*exec.Cmd, *os.Fi return serveCmd, logFile, nil } -// Wait until the Ollama process is ready, checking that the log file contains +// waitForOllama Wait until the Ollama process is ready, checking that the log file contains // the "Listening on 127.0.0.1:11434" message -func waitForOllama(ctx context.Context, c *OllamaContainer) error { +func (c *OllamaContainer) waitForOllama(ctx context.Context) error { ctx, cancel := context.WithTimeout(ctx, 10*time.Second) defer cancel() @@ -412,7 +412,7 @@ func (c *OllamaContainer) Start(ctx context.Context) error { waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second) defer cancel() - err = waitForOllama(waitCtx, c) + err = c.waitForOllama(waitCtx) if err != nil { return fmt.Errorf("wait for ollama to start: %w", err) } From 98ecae9b82113131f31794244b8eb5e028e9c9db Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Manuel=20de=20la=20Pe=C3=B1a?= Date: Fri, 13 Dec 2024 08:57:46 +0100 Subject: [PATCH 18/35] chore: support reading OLLAMA_HOST --- docs/modules/ollama.md | 4 +++ modules/ollama/local.go | 42 ++++++++++++++++++------- modules/ollama/local_test.go | 61 ++++++++++++++++++++++++++++++++++++ modules/ollama/ollama.go | 2 +- 4 files changed, 97 insertions(+), 12 deletions(-) diff --git a/docs/modules/ollama.md b/docs/modules/ollama.md index 8c7738f8b1..bffe63648e 100644 --- a/docs/modules/ollama.md +++ b/docs/modules/ollama.md @@ -93,6 +93,10 @@ Please consider the following differences when using the local Ollama binary: The local Ollama binary will create a log file in the current working directory, and it will be available in the container's `Logs` method. +!!!info + The local Ollama binary will use the `OLLAMA_HOST` environment variable to set the host and port to listen on. + If the environment variable is not set, it will use the default host `127.0.0.1` and port `11434`. + {% include "../features/common_functional_options.md" %} ### Container Methods diff --git a/modules/ollama/local.go b/modules/ollama/local.go index a4c3a5f74c..9eaa2fed65 100644 --- a/modules/ollama/local.go +++ b/modules/ollama/local.go @@ -6,6 +6,7 @@ import ( "errors" "fmt" "io" + "net" "os" "os/exec" "strings" @@ -23,7 +24,10 @@ import ( "github.com/testcontainers/testcontainers-go/wait" ) -const localIP = "127.0.0.1" +const ( + localIP = "127.0.0.1" + localPort = "11434" +) var defaultStopTimeout = time.Second * 5 @@ -33,6 +37,8 @@ type localContext struct { serveCmd *exec.Cmd logFile *os.File mx sync.Mutex + host string + port string } // runLocal calls the local Ollama binary instead of using a Docker container. @@ -43,10 +49,24 @@ func runLocal(ctx context.Context, env map[string]string) (*OllamaContainer, err cmdEnv = append(cmdEnv, k+"="+v) } + localCtx := &localContext{ + env: cmdEnv, + host: localIP, + port: localPort, + } + + if envHost := os.Getenv("OLLAMA_HOST"); envHost != "" { + host, port, err := net.SplitHostPort(envHost) + if err != nil { + return nil, fmt.Errorf("invalid OLLAMA_HOST: %w", err) + } + + localCtx.host = host + localCtx.port = port + } + c := &OllamaContainer{ - localCtx: &localContext{ - env: cmdEnv, - }, + localCtx: localCtx, } c.localCtx.mx.Lock() @@ -115,7 +135,7 @@ func (c *OllamaContainer) waitForOllama(ctx context.Context) error { ctx, cancel := context.WithTimeout(ctx, 10*time.Second) defer cancel() - err := wait.ForLog("Listening on "+localIP+":11434").WaitUntilReady(ctx, c) + err := wait.ForLog("Listening on "+c.localCtx.host+":"+c.localCtx.port).WaitUntilReady(ctx, c) if err != nil { logs, err := c.Logs(ctx) if err != nil { @@ -204,7 +224,7 @@ func (c *OllamaContainer) Endpoint(ctx context.Context, port string) (string, er return c.Container.Endpoint(ctx, port) } - return localIP + ":11434", nil + return c.localCtx.host + ":" + c.localCtx.port, nil } // Exec executes a command using the local Ollama binary. @@ -302,7 +322,7 @@ func (c *OllamaContainer) Inspect(ctx context.Context) (*types.ContainerJSON, er Config: &container.Config{ Image: string(bs), ExposedPorts: nat.PortSet{ - "11434/tcp": struct{}{}, + nat.Port(c.localCtx.port + "/tcp"): struct{}{}, }, Hostname: "localhost", Entrypoint: []string{"ollama", "serve"}, @@ -312,13 +332,13 @@ func (c *OllamaContainer) Inspect(ctx context.Context) (*types.ContainerJSON, er NetworkSettingsBase: types.NetworkSettingsBase{ Bridge: "bridge", Ports: nat.PortMap{ - "11434/tcp": { - {HostIP: localIP, HostPort: "11434"}, + nat.Port(c.localCtx.port + "/tcp"): { + {HostIP: c.localCtx.host, HostPort: c.localCtx.port}, }, }, }, DefaultNetworkSettings: types.DefaultNetworkSettings{ - IPAddress: localIP, + IPAddress: c.localCtx.host, }, }, }, nil @@ -356,7 +376,7 @@ func (c *OllamaContainer) MappedPort(ctx context.Context, port nat.Port) (nat.Po } // Ollama typically uses port 11434 by default - return "11434/tcp", nil + return nat.Port(c.localCtx.port + "/tcp"), nil } // Networks returns the networks for local Ollama binary, which is empty. diff --git a/modules/ollama/local_test.go b/modules/ollama/local_test.go index 6e85814e9e..b555fee074 100644 --- a/modules/ollama/local_test.go +++ b/modules/ollama/local_test.go @@ -279,3 +279,64 @@ func TestRun_localWithCustomLogFile(t *testing.T) { require.Contains(t, string(bs), "Listening on 127.0.0.1:11434") } + +func TestRun_localWithCustomHost(t *testing.T) { + t.Setenv("OLLAMA_HOST", "127.0.0.1:1234") + + ctx := context.Background() + + ollamaContainer, err := ollama.Run(ctx, "ollama/ollama:0.1.25", ollama.WithUseLocal("FOO=BAR")) + require.NoError(t, err) + testcontainers.CleanupContainer(t, ollamaContainer) + + t.Run("connection-string", func(t *testing.T) { + connectionStr, err := ollamaContainer.ConnectionString(ctx) + require.NoError(t, err) + require.Equal(t, "http://127.0.0.1:1234", connectionStr) + }) + + t.Run("endpoint", func(t *testing.T) { + endpoint, err := ollamaContainer.Endpoint(ctx, "1234/tcp") + require.NoError(t, err) + require.Equal(t, "127.0.0.1:1234", endpoint) + }) + + t.Run("inspect", func(t *testing.T) { + inspect, err := ollamaContainer.Inspect(ctx) + require.NoError(t, err) + + require.Contains(t, string(inspect.Config.Image), "ollama version is") + _, exists := inspect.Config.ExposedPorts["1234/tcp"] + require.True(t, exists) + require.Equal(t, "localhost", inspect.Config.Hostname) + require.Equal(t, strslice.StrSlice(strslice.StrSlice{"ollama", "serve"}), inspect.Config.Entrypoint) + + require.Empty(t, inspect.NetworkSettings.Networks) + require.Equal(t, "bridge", inspect.NetworkSettings.NetworkSettingsBase.Bridge) + + ports := inspect.NetworkSettings.NetworkSettingsBase.Ports + _, exists = ports["1234/tcp"] + require.True(t, exists) + + require.Equal(t, "127.0.0.1", inspect.NetworkSettings.Ports["1234/tcp"][0].HostIP) + require.Equal(t, "1234", inspect.NetworkSettings.Ports["1234/tcp"][0].HostPort) + }) + + t.Run("logs", func(t *testing.T) { + logs, err := ollamaContainer.Logs(ctx) + require.NoError(t, err) + defer logs.Close() + + bs, err := io.ReadAll(logs) + require.NoError(t, err) + + require.Contains(t, string(bs), "Listening on 127.0.0.1:1234") + }) + + t.Run("mapped-port", func(t *testing.T) { + port, err := ollamaContainer.MappedPort(ctx, "1234/tcp") + require.NoError(t, err) + require.Equal(t, "1234", port.Port()) + require.Equal(t, "tcp", port.Proto()) + }) +} diff --git a/modules/ollama/ollama.go b/modules/ollama/ollama.go index 56acb95226..db573ed903 100644 --- a/modules/ollama/ollama.go +++ b/modules/ollama/ollama.go @@ -27,7 +27,7 @@ type OllamaContainer struct { // using the default port 11434. func (c *OllamaContainer) ConnectionString(ctx context.Context) (string, error) { if c.localCtx != nil { - return "http://127.0.0.1:11434", nil + return "http://" + c.localCtx.host + ":" + c.localCtx.port, nil } host, err := c.Host(ctx) From 644278f4ce5b90249e8f0d8e67cef46d01e36aed Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Manuel=20de=20la=20Pe=C3=B1a?= Date: Fri, 13 Dec 2024 09:01:58 +0100 Subject: [PATCH 19/35] chore: return error with copy APIs --- modules/ollama/local.go | 13 ++++++++----- modules/ollama/local_test.go | 8 ++++---- 2 files changed, 12 insertions(+), 9 deletions(-) diff --git a/modules/ollama/local.go b/modules/ollama/local.go index 9eaa2fed65..6f05a60cc5 100644 --- a/modules/ollama/local.go +++ b/modules/ollama/local.go @@ -29,7 +29,10 @@ const ( localPort = "11434" ) -var defaultStopTimeout = time.Second * 5 +var ( + defaultStopTimeout = time.Second * 5 + errCopyAPIsNotSupported = errors.New("copy APIs are not supported for local Ollama binary") +) // localContext is a type holding the context for local Ollama executions. type localContext struct { @@ -179,7 +182,7 @@ func (c *OllamaContainer) CopyToContainer(ctx context.Context, fileContent []byt return c.Container.CopyToContainer(ctx, fileContent, containerFilePath, fileMode) } - return nil + return errCopyAPIsNotSupported } // CopyDirToContainer is a no-op for the local Ollama binary. @@ -188,7 +191,7 @@ func (c *OllamaContainer) CopyDirToContainer(ctx context.Context, hostDirPath st return c.Container.CopyDirToContainer(ctx, hostDirPath, containerParentPath, fileMode) } - return nil + return errCopyAPIsNotSupported } // CopyFileToContainer is a no-op for the local Ollama binary. @@ -197,7 +200,7 @@ func (c *OllamaContainer) CopyFileToContainer(ctx context.Context, hostFilePath return c.Container.CopyFileToContainer(ctx, hostFilePath, containerFilePath, fileMode) } - return nil + return errCopyAPIsNotSupported } // CopyFileFromContainer is a no-op for the local Ollama binary. @@ -206,7 +209,7 @@ func (c *OllamaContainer) CopyFileFromContainer(ctx context.Context, filePath st return c.Container.CopyFileFromContainer(ctx, filePath) } - return nil, nil + return nil, errCopyAPIsNotSupported } // GetLogProductionErrorChannel returns a nil channel. diff --git a/modules/ollama/local_test.go b/modules/ollama/local_test.go index b555fee074..8dabf5e295 100644 --- a/modules/ollama/local_test.go +++ b/modules/ollama/local_test.go @@ -57,16 +57,16 @@ func TestRun_local(t *testing.T) { t.Run("copy", func(t *testing.T) { err := ollamaContainer.CopyToContainer(ctx, []byte("test"), "/tmp", 0o755) - require.NoError(t, err) + require.Error(t, err) err = ollamaContainer.CopyDirToContainer(ctx, ".", "/tmp", 0o755) - require.NoError(t, err) + require.Error(t, err) err = ollamaContainer.CopyFileToContainer(ctx, ".", "/tmp", 0o755) - require.NoError(t, err) + require.Error(t, err) reader, err := ollamaContainer.CopyFileFromContainer(ctx, "/tmp") - require.NoError(t, err) + require.Error(t, err) require.Nil(t, reader) }) From 25f7c56f2cf1e688ddc1a6de642a43e71d16b40b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Manuel=20de=20la=20Pe=C3=B1a?= Date: Fri, 13 Dec 2024 09:05:15 +0100 Subject: [PATCH 20/35] chore: simply execute the script --- .github/workflows/ci-test-go.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-test-go.yml b/.github/workflows/ci-test-go.yml index b2bedb6836..0d6af15880 100644 --- a/.github/workflows/ci-test-go.yml +++ b/.github/workflows/ci-test-go.yml @@ -112,7 +112,7 @@ jobs: run: | SCRIPT_PATH="./.github/scripts/${{ inputs.project-directory }}/install-dependencies.sh" if [ -f "$SCRIPT_PATH" ]; then - bash "$SCRIPT_PATH" + $SCRIPT_PATH else echo "No dependencies script found at $SCRIPT_PATH - skipping installation" fi From 0e6c3d027a412cdfaee9dbee01d2eb543b2b2768 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Manuel=20de=20la=20Pe=C3=B1a?= Date: Fri, 13 Dec 2024 09:06:41 +0100 Subject: [PATCH 21/35] chore: simplify var initialisation --- modules/ollama/local.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/ollama/local.go b/modules/ollama/local.go index 6f05a60cc5..8bdf7a4811 100644 --- a/modules/ollama/local.go +++ b/modules/ollama/local.go @@ -304,14 +304,14 @@ func (c *OllamaContainer) Inspect(ctx context.Context) (*types.ContainerJSON, er } // read the version from the ollama binary - buf := &bytes.Buffer{} - command := prepareExec(ctx, "ollama", []string{"-v"}, c.localCtx.env, buf) + var buf bytes.Buffer + command := prepareExec(ctx, "ollama", []string{"-v"}, c.localCtx.env, &buf) err = command.Run() if err != nil { return nil, fmt.Errorf("read ollama -v output: %w", err) } - bs, err := io.ReadAll(buf) + bs, err := io.ReadAll(&buf) if err != nil { return nil, fmt.Errorf("read ollama -v output: %w", err) } From cca27616addd28381dcf113ddc426b81922381e1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Manuel=20de=20la=20Pe=C3=B1a?= Date: Fri, 13 Dec 2024 09:08:20 +0100 Subject: [PATCH 22/35] chore: return nil --- modules/ollama/local.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/modules/ollama/local.go b/modules/ollama/local.go index 8bdf7a4811..9f0e155e2e 100644 --- a/modules/ollama/local.go +++ b/modules/ollama/local.go @@ -382,22 +382,22 @@ func (c *OllamaContainer) MappedPort(ctx context.Context, port nat.Port) (nat.Po return nat.Port(c.localCtx.port + "/tcp"), nil } -// Networks returns the networks for local Ollama binary, which is empty. +// Networks returns the networks for local Ollama binary, which is a nil slice. func (c *OllamaContainer) Networks(ctx context.Context) ([]string, error) { if c.localCtx == nil { return c.Container.Networks(ctx) } - return []string{}, nil + return nil, nil } -// NetworkAliases returns the network aliases for local Ollama binary, which is empty. +// NetworkAliases returns the network aliases for local Ollama binary, which is a nil map. func (c *OllamaContainer) NetworkAliases(ctx context.Context) (map[string][]string, error) { if c.localCtx == nil { return c.Container.NetworkAliases(ctx) } - return map[string][]string{}, nil + return nil, nil } // SessionID returns the session ID for local Ollama binary, which is the session ID From 5c5058e3b7ece9689ff90f76775d6a5ac39af552 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Manuel=20de=20la=20Pe=C3=B1a?= Date: Fri, 13 Dec 2024 09:09:50 +0100 Subject: [PATCH 23/35] fix: return errors on terminate --- modules/ollama/local.go | 36 +++++++++++++++++------------------- 1 file changed, 17 insertions(+), 19 deletions(-) diff --git a/modules/ollama/local.go b/modules/ollama/local.go index 9f0e155e2e..895a03dae9 100644 --- a/modules/ollama/local.go +++ b/modules/ollama/local.go @@ -493,38 +493,36 @@ func (c *OllamaContainer) Stop(ctx context.Context, d *time.Duration) error { } // Terminate stops the local Ollama process, removing the log file. -func (c *OllamaContainer) Terminate(ctx context.Context) (err error) { +func (c *OllamaContainer) Terminate(ctx context.Context) error { if c.localCtx == nil { return c.Container.Terminate(ctx) } // First try to stop gracefully - err = c.Stop(ctx, &defaultStopTimeout) + err := c.Stop(ctx, &defaultStopTimeout) if err != nil { return fmt.Errorf("stop ollama: %w", err) } - defer func() { - c.localCtx.mx.Lock() - defer c.localCtx.mx.Unlock() + c.localCtx.mx.Lock() + defer c.localCtx.mx.Unlock() + + if c.localCtx.logFile == nil { + return nil + } - if c.localCtx.logFile == nil { - return + // remove the log file if it exists + if _, err = os.Stat(c.localCtx.logFile.Name()); err == nil { + err = c.localCtx.logFile.Close() + if err != nil { + return err } - // remove the log file if it exists - if _, err := os.Stat(c.localCtx.logFile.Name()); err == nil { - err = c.localCtx.logFile.Close() - if err != nil { - return - } - - err = os.Remove(c.localCtx.logFile.Name()) - if err != nil { - return - } + err = os.Remove(c.localCtx.logFile.Name()) + if err != nil { + return err } - }() + } return nil } From 857a3781c4b439b70c4ff8cf11eaa5c72a5b3177 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Manuel=20de=20la=20Pe=C3=B1a?= Date: Fri, 13 Dec 2024 09:11:18 +0100 Subject: [PATCH 24/35] chore: remove options type --- modules/ollama/ollama.go | 6 +++--- modules/ollama/options.go | 8 -------- 2 files changed, 3 insertions(+), 11 deletions(-) diff --git a/modules/ollama/ollama.go b/modules/ollama/ollama.go index db573ed903..3d0cc6fa4e 100644 --- a/modules/ollama/ollama.go +++ b/modules/ollama/ollama.go @@ -103,17 +103,17 @@ func Run(ctx context.Context, img string, opts ...testcontainers.ContainerCustom // always request a GPU if the host supports it opts = append(opts, withGpu()) - options := defaultOptions() + useLocal := false for _, opt := range opts { if err := opt.Customize(&genericContainerReq); err != nil { return nil, fmt.Errorf("customize: %w", err) } if _, ok := opt.(UseLocal); ok { - options.useLocal = true + useLocal = true } } - if options.useLocal { + if useLocal { container, err := runLocal(ctx, req.Env) if err == nil { return container, nil diff --git a/modules/ollama/options.go b/modules/ollama/options.go index 82191e66f3..ed34326d71 100644 --- a/modules/ollama/options.go +++ b/modules/ollama/options.go @@ -10,14 +10,6 @@ import ( "github.com/testcontainers/testcontainers-go" ) -type options struct { - useLocal bool -} - -func defaultOptions() options { - return options{} -} - var noopCustomizeRequestOption = func(req *testcontainers.GenericContainerRequest) error { return nil } // withGpu requests a GPU for the container, which could improve performance for some models. From ccd19745f907150217f3173f8505281a6a73b98a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Manuel=20de=20la=20Pe=C3=B1a?= Date: Fri, 13 Dec 2024 09:18:00 +0100 Subject: [PATCH 25/35] chore: use a map --- modules/ollama/examples_test.go | 2 +- modules/ollama/local_test.go | 6 +++--- modules/ollama/options.go | 18 +++++------------- modules/ollama/options_test.go | 32 ++++++-------------------------- 4 files changed, 15 insertions(+), 43 deletions(-) diff --git a/modules/ollama/examples_test.go b/modules/ollama/examples_test.go index 188be45bbb..3601e0b120 100644 --- a/modules/ollama/examples_test.go +++ b/modules/ollama/examples_test.go @@ -178,7 +178,7 @@ func ExampleRun_withLocal() { ctx := context.Background() // localOllama { - ollamaContainer, err := tcollama.Run(ctx, "ollama/ollama:0.3.13", tcollama.WithUseLocal("OLLAMA_DEBUG=true")) + ollamaContainer, err := tcollama.Run(ctx, "ollama/ollama:0.3.13", tcollama.WithUseLocal(map[string]string{"OLLAMA_DEBUG": "true"})) defer func() { if err := testcontainers.TerminateContainer(ollamaContainer); err != nil { log.Printf("failed to terminate container: %s", err) diff --git a/modules/ollama/local_test.go b/modules/ollama/local_test.go index 8dabf5e295..3b95500b72 100644 --- a/modules/ollama/local_test.go +++ b/modules/ollama/local_test.go @@ -29,7 +29,7 @@ func TestRun_local(t *testing.T) { ollamaContainer, err := ollama.Run( ctx, "ollama/ollama:0.1.25", - ollama.WithUseLocal("FOO=BAR"), + ollama.WithUseLocal(map[string]string{"FOO": "BAR"}), ) testcontainers.CleanupContainer(t, ollamaContainer) require.NoError(t, err) @@ -266,7 +266,7 @@ func TestRun_localWithCustomLogFile(t *testing.T) { ctx := context.Background() - ollamaContainer, err := ollama.Run(ctx, "ollama/ollama:0.1.25", ollama.WithUseLocal("FOO=BAR")) + ollamaContainer, err := ollama.Run(ctx, "ollama/ollama:0.1.25", ollama.WithUseLocal(map[string]string{"FOO": "BAR"})) require.NoError(t, err) testcontainers.CleanupContainer(t, ollamaContainer) @@ -285,7 +285,7 @@ func TestRun_localWithCustomHost(t *testing.T) { ctx := context.Background() - ollamaContainer, err := ollama.Run(ctx, "ollama/ollama:0.1.25", ollama.WithUseLocal("FOO=BAR")) + ollamaContainer, err := ollama.Run(ctx, "ollama/ollama:0.1.25", ollama.WithUseLocal(nil)) require.NoError(t, err) testcontainers.CleanupContainer(t, ollamaContainer) diff --git a/modules/ollama/options.go b/modules/ollama/options.go index ed34326d71..4653b65169 100644 --- a/modules/ollama/options.go +++ b/modules/ollama/options.go @@ -2,8 +2,6 @@ package ollama import ( "context" - "fmt" - "strings" "github.com/docker/docker/api/types/container" @@ -44,13 +42,13 @@ var _ testcontainers.ContainerCustomizer = (*UseLocal)(nil) // UseLocal will use the local Ollama instance instead of pulling the Docker image. type UseLocal struct { - env []string + env map[string]string } // WithUseLocal the module will use the local Ollama instance instead of pulling the Docker image. // Pass the environment variables you need to set for the Ollama binary to be used, // in the format of "KEY=VALUE". KeyValue pairs with the wrong format will cause an error. -func WithUseLocal(keyVal ...string) UseLocal { +func WithUseLocal(keyVal map[string]string) UseLocal { return UseLocal{env: keyVal} } @@ -58,15 +56,9 @@ func WithUseLocal(keyVal ...string) UseLocal { // and setting them as environment variables for the Ollama binary. // In the case of an invalid key value pair, an error is returned. func (u UseLocal) Customize(req *testcontainers.GenericContainerRequest) error { - env := make(map[string]string) - for _, kv := range u.env { - parts := strings.SplitN(kv, "=", 2) - if len(parts) != 2 { - return fmt.Errorf("invalid environment variable: %s", kv) - } - - env[parts[0]] = parts[1] + if len(u.env) == 0 { + return nil } - return testcontainers.WithEnv(env)(req) + return testcontainers.WithEnv(u.env)(req) } diff --git a/modules/ollama/options_test.go b/modules/ollama/options_test.go index f842d15a17..46872d0dd4 100644 --- a/modules/ollama/options_test.go +++ b/modules/ollama/options_test.go @@ -12,38 +12,18 @@ import ( func TestWithUseLocal(t *testing.T) { req := testcontainers.GenericContainerRequest{} - t.Run("keyVal/valid", func(t *testing.T) { - opt := ollama.WithUseLocal("OLLAMA_MODELS=/path/to/models") + t.Run("empty", func(t *testing.T) { + opt := ollama.WithUseLocal(nil) err := opt.Customize(&req) require.NoError(t, err) - require.Equal(t, "/path/to/models", req.Env["OLLAMA_MODELS"]) - }) - - t.Run("keyVal/invalid", func(t *testing.T) { - opt := ollama.WithUseLocal("OLLAMA_MODELS") - err := opt.Customize(&req) - require.Error(t, err) - }) - - t.Run("keyVal/valid/multiple", func(t *testing.T) { - opt := ollama.WithUseLocal("OLLAMA_MODELS=/path/to/models", "OLLAMA_HOST=localhost") - err := opt.Customize(&req) - require.NoError(t, err) - require.Equal(t, "/path/to/models", req.Env["OLLAMA_MODELS"]) - require.Equal(t, "localhost", req.Env["OLLAMA_HOST"]) + require.Empty(t, req.Env) }) - t.Run("keyVal/valid/multiple-equals", func(t *testing.T) { - opt := ollama.WithUseLocal("OLLAMA_MODELS=/path/to/models", "OLLAMA_HOST=localhost=127.0.0.1") + t.Run("valid", func(t *testing.T) { + opt := ollama.WithUseLocal(map[string]string{"OLLAMA_MODELS": "/path/to/models", "OLLAMA_HOST": "localhost:1234"}) err := opt.Customize(&req) require.NoError(t, err) require.Equal(t, "/path/to/models", req.Env["OLLAMA_MODELS"]) - require.Equal(t, "localhost=127.0.0.1", req.Env["OLLAMA_HOST"]) - }) - - t.Run("keyVal/invalid/multiple", func(t *testing.T) { - opt := ollama.WithUseLocal("OLLAMA_MODELS=/path/to/models", "OLLAMA_HOST") - err := opt.Customize(&req) - require.Error(t, err) + require.Equal(t, "localhost:1234", req.Env["OLLAMA_HOST"]) }) } From eab5fb25decbae731caad7794aa12e04fc0c71a6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Manuel=20de=20la=20Pe=C3=B1a?= Date: Fri, 13 Dec 2024 11:51:40 +0100 Subject: [PATCH 26/35] chor: simplify error on wait --- modules/ollama/local.go | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/modules/ollama/local.go b/modules/ollama/local.go index 895a03dae9..adfe07b6f0 100644 --- a/modules/ollama/local.go +++ b/modules/ollama/local.go @@ -145,14 +145,9 @@ func (c *OllamaContainer) waitForOllama(ctx context.Context) error { return fmt.Errorf("wait for ollama to start: %w", err) } - bs, err := io.ReadAll(logs) - if err != nil { - return fmt.Errorf("read ollama logs: %w", err) - } - - testcontainers.Logger.Printf("ollama logs:\n%s", string(bs)) - - return fmt.Errorf("wait for ollama to start: %w", err) + // ignore error as we already have an error and the output is already logged + bs, _ := io.ReadAll(logs) + return fmt.Errorf("wait for ollama to start: %w. Container logs:\n%s", err, string(bs)) } return nil From ddc96b49c4202b10f0d57647f564af172373e731 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Manuel=20de=20la=20Pe=C3=B1a?= Date: Mon, 16 Dec 2024 07:49:37 +0100 Subject: [PATCH 27/35] chore: wrap start logic around the localContext --- modules/ollama/local.go | 40 ++++++++++++++++------------------------ 1 file changed, 16 insertions(+), 24 deletions(-) diff --git a/modules/ollama/local.go b/modules/ollama/local.go index adfe07b6f0..f1458f8f9f 100644 --- a/modules/ollama/local.go +++ b/modules/ollama/local.go @@ -72,18 +72,11 @@ func runLocal(ctx context.Context, env map[string]string) (*OllamaContainer, err localCtx: localCtx, } - c.localCtx.mx.Lock() - - serveCmd, logFile, err := startOllama(ctx, c.localCtx) + err := c.localCtx.startOllama(ctx) if err != nil { - c.localCtx.mx.Unlock() return nil, fmt.Errorf("start ollama: %w", err) } - c.localCtx.serveCmd = serveCmd - c.localCtx.logFile = logFile - c.localCtx.mx.Unlock() - err = c.waitForOllama(ctx) if err != nil { return nil, fmt.Errorf("wait for ollama to start: %w", err) @@ -110,14 +103,21 @@ func logFile() (*os.File, error) { // startOllama starts the Ollama serve command in the background, writing to the // provided log file. -func startOllama(ctx context.Context, localCtx *localContext) (*exec.Cmd, *os.File, error) { +func (localCtx *localContext) startOllama(ctx context.Context) error { + localCtx.mx.Lock() + defer localCtx.mx.Unlock() // unlock before waiting for the process to be ready + + if localCtx.serveCmd != nil { + return nil + } + serveCmd := exec.CommandContext(ctx, "ollama", "serve") serveCmd.Env = append(serveCmd.Env, localCtx.env...) serveCmd.Env = append(serveCmd.Env, os.Environ()...) logFile, err := logFile() if err != nil { - return nil, nil, fmt.Errorf("ollama log file: %w", err) + return fmt.Errorf("ollama log file: %w", err) } serveCmd.Stdout = logFile @@ -126,10 +126,13 @@ func startOllama(ctx context.Context, localCtx *localContext) (*exec.Cmd, *os.Fi // Run the ollama serve command in background err = serveCmd.Start() if err != nil { - return nil, nil, fmt.Errorf("start ollama serve: %w", err) + return fmt.Errorf("start ollama serve: %w", err) } - return serveCmd, logFile, nil + localCtx.serveCmd = serveCmd + localCtx.logFile = logFile + + return nil } // waitForOllama Wait until the Ollama process is ready, checking that the log file contains @@ -411,21 +414,10 @@ func (c *OllamaContainer) Start(ctx context.Context) error { return c.Container.Start(ctx) } - c.localCtx.mx.Lock() - - if c.localCtx.serveCmd != nil { - c.localCtx.mx.Unlock() - return nil - } - - serveCmd, logFile, err := startOllama(ctx, c.localCtx) + err := c.localCtx.startOllama(ctx) if err != nil { - c.localCtx.mx.Unlock() return fmt.Errorf("start ollama: %w", err) } - c.localCtx.serveCmd = serveCmd - c.localCtx.logFile = logFile - c.localCtx.mx.Unlock() // unlock before waiting for the process to be ready waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second) defer cancel() From 299e514c19f941dc4db5b09bb18a728c43969012 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Manuel=20de=20la=20Pe=C3=B1a?= Date: Mon, 16 Dec 2024 10:24:43 +0100 Subject: [PATCH 28/35] chor: fold --- modules/ollama/local.go | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/modules/ollama/local.go b/modules/ollama/local.go index f1458f8f9f..66b49a34d7 100644 --- a/modules/ollama/local.go +++ b/modules/ollama/local.go @@ -304,8 +304,7 @@ func (c *OllamaContainer) Inspect(ctx context.Context) (*types.ContainerJSON, er // read the version from the ollama binary var buf bytes.Buffer command := prepareExec(ctx, "ollama", []string{"-v"}, c.localCtx.env, &buf) - err = command.Run() - if err != nil { + if err := command.Run(); err != nil { return nil, fmt.Errorf("read ollama -v output: %w", err) } From 6ab96ae0f9abe04cc8adf866bdc089ffe90f90a5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Manuel=20de=20la=20Pe=C3=B1a?= Date: Mon, 16 Dec 2024 11:25:08 +0100 Subject: [PATCH 29/35] chore: merge wait into start --- modules/ollama/local.go | 47 ++++++++++++++++++++--------------------- 1 file changed, 23 insertions(+), 24 deletions(-) diff --git a/modules/ollama/local.go b/modules/ollama/local.go index 66b49a34d7..82eaa17678 100644 --- a/modules/ollama/local.go +++ b/modules/ollama/local.go @@ -72,16 +72,11 @@ func runLocal(ctx context.Context, env map[string]string) (*OllamaContainer, err localCtx: localCtx, } - err := c.localCtx.startOllama(ctx) + err := c.startLocalOllama(ctx) if err != nil { return nil, fmt.Errorf("start ollama: %w", err) } - err = c.waitForOllama(ctx) - if err != nil { - return nil, fmt.Errorf("wait for ollama to start: %w", err) - } - return c, nil } @@ -101,22 +96,22 @@ func logFile() (*os.File, error) { return file, nil } -// startOllama starts the Ollama serve command in the background, writing to the +// startLocalOllama starts the Ollama serve command in the background, writing to the // provided log file. -func (localCtx *localContext) startOllama(ctx context.Context) error { - localCtx.mx.Lock() - defer localCtx.mx.Unlock() // unlock before waiting for the process to be ready - - if localCtx.serveCmd != nil { +func (c *OllamaContainer) startLocalOllama(ctx context.Context) error { + if c.localCtx.serveCmd != nil { return nil } + c.localCtx.mx.Lock() + serveCmd := exec.CommandContext(ctx, "ollama", "serve") - serveCmd.Env = append(serveCmd.Env, localCtx.env...) + serveCmd.Env = append(serveCmd.Env, c.localCtx.env...) serveCmd.Env = append(serveCmd.Env, os.Environ()...) logFile, err := logFile() if err != nil { + c.localCtx.mx.Unlock() return fmt.Errorf("ollama log file: %w", err) } @@ -126,11 +121,23 @@ func (localCtx *localContext) startOllama(ctx context.Context) error { // Run the ollama serve command in background err = serveCmd.Start() if err != nil { + c.localCtx.mx.Unlock() return fmt.Errorf("start ollama serve: %w", err) } - localCtx.serveCmd = serveCmd - localCtx.logFile = logFile + c.localCtx.serveCmd = serveCmd + c.localCtx.logFile = logFile + + // unlock before waiting for the process to be ready + c.localCtx.mx.Unlock() + + waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second) + defer cancel() + + err = c.waitForOllama(waitCtx) + if err != nil { + return fmt.Errorf("wait for ollama to start: %w", err) + } return nil } @@ -413,19 +420,11 @@ func (c *OllamaContainer) Start(ctx context.Context) error { return c.Container.Start(ctx) } - err := c.localCtx.startOllama(ctx) + err := c.startLocalOllama(ctx) if err != nil { return fmt.Errorf("start ollama: %w", err) } - waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second) - defer cancel() - - err = c.waitForOllama(waitCtx) - if err != nil { - return fmt.Errorf("wait for ollama to start: %w", err) - } - return nil } From 5cdeb2d446ca6b89a693994fe6d89ad78a3b0847 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Manuel=20de=20la=20Pe=C3=B1a?= Date: Mon, 16 Dec 2024 11:30:44 +0100 Subject: [PATCH 30/35] fix: use proper ContainersState --- modules/ollama/local.go | 4 ++-- modules/ollama/local_test.go | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/modules/ollama/local.go b/modules/ollama/local.go index 82eaa17678..7607817b36 100644 --- a/modules/ollama/local.go +++ b/modules/ollama/local.go @@ -439,7 +439,7 @@ func (c *OllamaContainer) State(ctx context.Context) (*types.ContainerState, err defer c.localCtx.mx.Unlock() if c.localCtx.serveCmd == nil { - return &types.ContainerState{Status: "stopped"}, nil + return &types.ContainerState{Status: "exited"}, nil } // Check if process is still running. Signal(0) is a special case in Unix-like systems. @@ -447,7 +447,7 @@ func (c *OllamaContainer) State(ctx context.Context) (*types.ContainerState, err // - It performs all the normal error checking (permissions, process existence, etc.) // - But it doesn't actually send any signal to the process if err := c.localCtx.serveCmd.Process.Signal(syscall.Signal(0)); err != nil { - return &types.ContainerState{Status: "stopped"}, nil + return &types.ContainerState{Status: "created"}, nil } // Setting the Running field because it's required by the wait strategy diff --git a/modules/ollama/local_test.go b/modules/ollama/local_test.go index 3b95500b72..bb063fb361 100644 --- a/modules/ollama/local_test.go +++ b/modules/ollama/local_test.go @@ -220,7 +220,7 @@ func TestRun_local(t *testing.T) { state, err := ollamaContainer.State(ctx) require.NoError(t, err) - require.Equal(t, "stopped", state.Status) + require.Equal(t, "exited", state.Status) err = ollamaContainer.Start(ctx) require.NoError(t, err) @@ -257,7 +257,7 @@ func TestRun_local(t *testing.T) { state, err := ollamaContainer.State(ctx) require.NoError(t, err) - require.Equal(t, "stopped", state.Status) + require.Equal(t, "exited", state.Status) }) } From a8824c055936f10e084cb0d0d07d707d469f1fee Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Manuel=20de=20la=20Pe=C3=B1a?= Date: Mon, 16 Dec 2024 11:33:33 +0100 Subject: [PATCH 31/35] fix: remove extra conversion --- modules/ollama/local.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/ollama/local.go b/modules/ollama/local.go index 7607817b36..f957b24361 100644 --- a/modules/ollama/local.go +++ b/modules/ollama/local.go @@ -468,7 +468,7 @@ func (c *OllamaContainer) Stop(ctx context.Context, d *time.Duration) error { return nil } - if err := c.localCtx.serveCmd.Process.Signal(syscall.Signal(syscall.SIGTERM)); err != nil { + if err := c.localCtx.serveCmd.Process.Signal(syscall.SIGTERM); err != nil { return fmt.Errorf("signal ollama: %w", err) } From 953518e90b982ceca84ea0924ea0054bdad1382d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Manuel=20de=20la=20Pe=C3=B1a?= Date: Mon, 16 Dec 2024 13:13:14 +0100 Subject: [PATCH 32/35] chore: handle remove log file errors properly --- modules/ollama/local.go | 19 +++++------ modules/ollama/local_unit_test.go | 55 +++++++++++++++++++++++++++++++ 2 files changed, 63 insertions(+), 11 deletions(-) create mode 100644 modules/ollama/local_unit_test.go diff --git a/modules/ollama/local.go b/modules/ollama/local.go index f957b24361..ce6be0cd0a 100644 --- a/modules/ollama/local.go +++ b/modules/ollama/local.go @@ -6,6 +6,7 @@ import ( "errors" "fmt" "io" + "io/fs" "net" "os" "os/exec" @@ -496,18 +497,14 @@ func (c *OllamaContainer) Terminate(ctx context.Context) error { return nil } - // remove the log file if it exists - if _, err = os.Stat(c.localCtx.logFile.Name()); err == nil { - err = c.localCtx.logFile.Close() - if err != nil { - return err - } + var errs []error + if err = c.localCtx.logFile.Close(); err != nil { + errs = append(errs, fmt.Errorf("close log: %w", err)) + } - err = os.Remove(c.localCtx.logFile.Name()) - if err != nil { - return err - } + if err = os.Remove(c.localCtx.logFile.Name()); err != nil && !errors.Is(err, fs.ErrNotExist) { + errs = append(errs, fmt.Errorf("remove log: %w", err)) } - return nil + return errors.Join(errs...) } diff --git a/modules/ollama/local_unit_test.go b/modules/ollama/local_unit_test.go new file mode 100644 index 0000000000..95d9b93638 --- /dev/null +++ b/modules/ollama/local_unit_test.go @@ -0,0 +1,55 @@ +package ollama + +import ( + "context" + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestRun_localWithCustomLogFileError(t *testing.T) { + t.Run("terminate/close-log-error", func(t *testing.T) { + // Create a temporary file for testing + f, err := os.CreateTemp(t.TempDir(), "test-log-*") + require.NoError(t, err) + + // Close the file before termination to force a "file already closed" error + err = f.Close() + require.NoError(t, err) + + c := &OllamaContainer{ + localCtx: &localContext{ + logFile: f, + }, + } + err = c.Terminate(context.Background()) + require.Error(t, err) + require.ErrorContains(t, err, "close log:") + }) + + t.Run("terminate/log-file-not-removable", func(t *testing.T) { + // Create a temporary file for testing + f, err := os.CreateTemp(t.TempDir(), "test-log-*") + require.NoError(t, err) + defer func() { + // Cleanup: restore permissions + os.Chmod(filepath.Dir(f.Name()), 0700) + }() + + // Make the file read-only and its parent directory read-only + // This should cause removal to fail on most systems + dir := filepath.Dir(f.Name()) + require.NoError(t, os.Chmod(dir, 0500)) + + c := &OllamaContainer{ + localCtx: &localContext{ + logFile: f, + }, + } + err = c.Terminate(context.Background()) + require.Error(t, err) + require.ErrorContains(t, err, "remove log:") + }) +} From 1a2ec6b31f8cc8a5db6e5f43f29d7cd10d023776 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Manuel=20de=20la=20Pe=C3=B1a?= Date: Mon, 16 Dec 2024 13:20:21 +0100 Subject: [PATCH 33/35] chore: go back to string in env vars --- modules/ollama/examples_test.go | 2 +- modules/ollama/local_test.go | 6 +++--- modules/ollama/options.go | 20 ++++++++++++++------ modules/ollama/options_test.go | 32 ++++++++++++++++++++++++++------ 4 files changed, 44 insertions(+), 16 deletions(-) diff --git a/modules/ollama/examples_test.go b/modules/ollama/examples_test.go index 3601e0b120..188be45bbb 100644 --- a/modules/ollama/examples_test.go +++ b/modules/ollama/examples_test.go @@ -178,7 +178,7 @@ func ExampleRun_withLocal() { ctx := context.Background() // localOllama { - ollamaContainer, err := tcollama.Run(ctx, "ollama/ollama:0.3.13", tcollama.WithUseLocal(map[string]string{"OLLAMA_DEBUG": "true"})) + ollamaContainer, err := tcollama.Run(ctx, "ollama/ollama:0.3.13", tcollama.WithUseLocal("OLLAMA_DEBUG=true")) defer func() { if err := testcontainers.TerminateContainer(ollamaContainer); err != nil { log.Printf("failed to terminate container: %s", err) diff --git a/modules/ollama/local_test.go b/modules/ollama/local_test.go index bb063fb361..7bd073ca5e 100644 --- a/modules/ollama/local_test.go +++ b/modules/ollama/local_test.go @@ -29,7 +29,7 @@ func TestRun_local(t *testing.T) { ollamaContainer, err := ollama.Run( ctx, "ollama/ollama:0.1.25", - ollama.WithUseLocal(map[string]string{"FOO": "BAR"}), + ollama.WithUseLocal("FOO=BAR"), ) testcontainers.CleanupContainer(t, ollamaContainer) require.NoError(t, err) @@ -266,7 +266,7 @@ func TestRun_localWithCustomLogFile(t *testing.T) { ctx := context.Background() - ollamaContainer, err := ollama.Run(ctx, "ollama/ollama:0.1.25", ollama.WithUseLocal(map[string]string{"FOO": "BAR"})) + ollamaContainer, err := ollama.Run(ctx, "ollama/ollama:0.1.25", ollama.WithUseLocal("FOO=BAR")) require.NoError(t, err) testcontainers.CleanupContainer(t, ollamaContainer) @@ -285,7 +285,7 @@ func TestRun_localWithCustomHost(t *testing.T) { ctx := context.Background() - ollamaContainer, err := ollama.Run(ctx, "ollama/ollama:0.1.25", ollama.WithUseLocal(nil)) + ollamaContainer, err := ollama.Run(ctx, "ollama/ollama:0.1.25", ollama.WithUseLocal()) require.NoError(t, err) testcontainers.CleanupContainer(t, ollamaContainer) diff --git a/modules/ollama/options.go b/modules/ollama/options.go index 4653b65169..4761a28530 100644 --- a/modules/ollama/options.go +++ b/modules/ollama/options.go @@ -2,6 +2,8 @@ package ollama import ( "context" + "fmt" + "strings" "github.com/docker/docker/api/types/container" @@ -42,23 +44,29 @@ var _ testcontainers.ContainerCustomizer = (*UseLocal)(nil) // UseLocal will use the local Ollama instance instead of pulling the Docker image. type UseLocal struct { - env map[string]string + env []string } // WithUseLocal the module will use the local Ollama instance instead of pulling the Docker image. // Pass the environment variables you need to set for the Ollama binary to be used, // in the format of "KEY=VALUE". KeyValue pairs with the wrong format will cause an error. -func WithUseLocal(keyVal map[string]string) UseLocal { - return UseLocal{env: keyVal} +func WithUseLocal(values ...string) UseLocal { + return UseLocal{env: values} } // Customize implements the ContainerCustomizer interface, taking the key value pairs // and setting them as environment variables for the Ollama binary. // In the case of an invalid key value pair, an error is returned. func (u UseLocal) Customize(req *testcontainers.GenericContainerRequest) error { - if len(u.env) == 0 { - return nil + env := make(map[string]string) + for _, kv := range u.env { + parts := strings.SplitN(kv, "=", 2) + if len(parts) != 2 { + return fmt.Errorf("invalid environment variable: %s", kv) + } + + env[parts[0]] = parts[1] } - return testcontainers.WithEnv(u.env)(req) + return testcontainers.WithEnv(env)(req) } diff --git a/modules/ollama/options_test.go b/modules/ollama/options_test.go index 46872d0dd4..f842d15a17 100644 --- a/modules/ollama/options_test.go +++ b/modules/ollama/options_test.go @@ -12,18 +12,38 @@ import ( func TestWithUseLocal(t *testing.T) { req := testcontainers.GenericContainerRequest{} - t.Run("empty", func(t *testing.T) { - opt := ollama.WithUseLocal(nil) + t.Run("keyVal/valid", func(t *testing.T) { + opt := ollama.WithUseLocal("OLLAMA_MODELS=/path/to/models") err := opt.Customize(&req) require.NoError(t, err) - require.Empty(t, req.Env) + require.Equal(t, "/path/to/models", req.Env["OLLAMA_MODELS"]) + }) + + t.Run("keyVal/invalid", func(t *testing.T) { + opt := ollama.WithUseLocal("OLLAMA_MODELS") + err := opt.Customize(&req) + require.Error(t, err) + }) + + t.Run("keyVal/valid/multiple", func(t *testing.T) { + opt := ollama.WithUseLocal("OLLAMA_MODELS=/path/to/models", "OLLAMA_HOST=localhost") + err := opt.Customize(&req) + require.NoError(t, err) + require.Equal(t, "/path/to/models", req.Env["OLLAMA_MODELS"]) + require.Equal(t, "localhost", req.Env["OLLAMA_HOST"]) }) - t.Run("valid", func(t *testing.T) { - opt := ollama.WithUseLocal(map[string]string{"OLLAMA_MODELS": "/path/to/models", "OLLAMA_HOST": "localhost:1234"}) + t.Run("keyVal/valid/multiple-equals", func(t *testing.T) { + opt := ollama.WithUseLocal("OLLAMA_MODELS=/path/to/models", "OLLAMA_HOST=localhost=127.0.0.1") err := opt.Customize(&req) require.NoError(t, err) require.Equal(t, "/path/to/models", req.Env["OLLAMA_MODELS"]) - require.Equal(t, "localhost:1234", req.Env["OLLAMA_HOST"]) + require.Equal(t, "localhost=127.0.0.1", req.Env["OLLAMA_HOST"]) + }) + + t.Run("keyVal/invalid/multiple", func(t *testing.T) { + opt := ollama.WithUseLocal("OLLAMA_MODELS=/path/to/models", "OLLAMA_HOST") + err := opt.Customize(&req) + require.Error(t, err) }) } From f2f98678289cd3cb5eb39f4319fc752bfba38c42 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Manuel=20de=20la=20Pe=C3=B1a?= Date: Mon, 16 Dec 2024 16:42:41 +0100 Subject: [PATCH 34/35] fix: lint --- modules/ollama/local_unit_test.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/ollama/local_unit_test.go b/modules/ollama/local_unit_test.go index 95d9b93638..8489bf1aeb 100644 --- a/modules/ollama/local_unit_test.go +++ b/modules/ollama/local_unit_test.go @@ -35,13 +35,13 @@ func TestRun_localWithCustomLogFileError(t *testing.T) { require.NoError(t, err) defer func() { // Cleanup: restore permissions - os.Chmod(filepath.Dir(f.Name()), 0700) + require.NoError(t, os.Chmod(filepath.Dir(f.Name()), 0o700)) }() // Make the file read-only and its parent directory read-only // This should cause removal to fail on most systems dir := filepath.Dir(f.Name()) - require.NoError(t, os.Chmod(dir, 0500)) + require.NoError(t, os.Chmod(dir, 0o500)) c := &OllamaContainer{ localCtx: &localContext{ From 158dc2e18d30120477a8bd474d9f88e0d6a6e752 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Manuel=20de=20la=20Pe=C3=B1a?= Date: Mon, 16 Dec 2024 17:37:34 +0100 Subject: [PATCH 35/35] fix: set logFile to nil on terminate --- modules/ollama/local.go | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/modules/ollama/local.go b/modules/ollama/local.go index ce6be0cd0a..72a1787405 100644 --- a/modules/ollama/local.go +++ b/modules/ollama/local.go @@ -506,5 +506,11 @@ func (c *OllamaContainer) Terminate(ctx context.Context) error { errs = append(errs, fmt.Errorf("remove log: %w", err)) } - return errors.Join(errs...) + if len(errs) > 0 { + return errors.Join(errs...) + } + + c.localCtx.logFile = nil + + return nil }