Skip to content

Commit

Permalink
Use Testcontainers to start the Ollama locally.
Browse files Browse the repository at this point in the history
  • Loading branch information
samie committed Jul 8, 2024
1 parent 1114c3f commit 733489f
Show file tree
Hide file tree
Showing 3 changed files with 70 additions and 2 deletions.
13 changes: 13 additions & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,19 @@
<artifactId>viritin</artifactId>
<version>2.8.14</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-testcontainers</artifactId>
</dependency>
<dependency>
<groupId>org.testcontainers</groupId>
<artifactId>ollama</artifactId>
</dependency>
<dependency> <!-- upgrade to avoid CVE -->
<groupId>org.apache.commons</groupId>
<artifactId>commons-compress</artifactId>
<version>1.26.0</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
Expand Down
9 changes: 7 additions & 2 deletions src/main/java/com/example/demo/MainView.java
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,16 @@
import com.vaadin.flow.component.orderedlayout.Scroller;
import com.vaadin.flow.component.orderedlayout.VerticalLayout;
import com.vaadin.flow.router.Route;
import jakarta.annotation.PostConstruct;
import jakarta.annotation.PreDestroy;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.ai.chat.messages.*;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.stereotype.Service;
import org.testcontainers.ollama.OllamaContainer;
import org.vaadin.firitin.components.messagelist.MarkdownMessage;

import java.io.IOException;
import java.util.ArrayList;

@Route("") // map view to the root
Expand All @@ -20,7 +25,7 @@ public class MainView extends VerticalLayout {
Scroller messageScroller = new Scroller(messageList);
MessageInput messageInput = new MessageInput();

public MainView(ChatClient.Builder chatClientBuilder) {
public MainView(OllamaService ollamaService) {
add(messageScroller, messageInput);
setSizeFull();
setMargin(false);
Expand All @@ -31,7 +36,7 @@ public MainView(ChatClient.Builder chatClientBuilder) {
chatHistory.add(new SystemMessage("Answer politely to user. When user asks you about Vaadin, reply in bro style. Always show a piece a code."));

// Init the client
ChatClient chatClient = chatClientBuilder.build();
ChatClient chatClient = ollamaService.getChatClient();

// Pass user input to chatClient
messageInput.addSubmitListener(ev -> {
Expand Down
50 changes: 50 additions & 0 deletions src/main/java/com/example/demo/OllamaService.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
package com.example.demo;

import com.github.dockerjava.api.model.*;
import jakarta.annotation.PostConstruct;
import jakarta.annotation.PreDestroy;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.stereotype.Service;
import org.springframework.web.context.annotation.ApplicationScope;
import org.testcontainers.ollama.OllamaContainer;

import java.io.IOException;

@Service
@ApplicationScope
public class OllamaService {

private final OllamaContainer ollama;

private final ChatClient.Builder chatClientBuilder;

public OllamaService(ChatClient.Builder chatClientBuilder) {
ollama = new OllamaContainer("ollama/ollama:0.1.48")
.withCreateContainerCmdModifier(cmd ->
{
cmd.withBinds(Bind.parse("ollama:/root/.ollama"));
cmd.withPortBindings(new PortBinding(Ports.Binding.bindPort(11434), new ExposedPort(11434)));
});

this.chatClientBuilder = chatClientBuilder;
}

@PostConstruct
public void start() throws IOException, InterruptedException {
ollama.start();
ollama.execInContainer("ollama", "pull", "mistral");
}
@PreDestroy
public void stop() {
if (ollama.isRunning()) {
ollama.stop();
}
}

public ChatClient getChatClient() {
if (ollama.isRunning()) {
return chatClientBuilder.build();
}
return null;
}
}

0 comments on commit 733489f

Please sign in to comment.