Spring Ai Simple Vector Store
is an in-memory
implementation of the VectorStore interface
. The Spring Ai Simple Vector Store
is good for learning and educational purposes. Spring Ai Simple Vector Store
provides all the methods to save the current state of the vectors
to a file
, and to load vectors
from a file
.
package com.example.springai.controller;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.ai.chat.client.advisor.QuestionAnswerAdvisor;
import org.springframework.ai.chat.model.ChatResponse;
import org.springframework.ai.vectorstore.VectorStore;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
@RestController
public class SpringAiController {
private final ChatClient chatClient;
@Autowired
private VectorStore simpleVectorStore;
public SpringAiController(ChatClient.Builder builder) {
this.chatClient = builder.build();
}
@GetMapping("/simpleVectorStore")
public String simpleVectorStore(@RequestParam(value = "question", defaultValue = "What are Mono and Flux") String question) {
ChatResponse response = chatClient.prompt()
.advisors(new QuestionAnswerAdvisor(simpleVectorStore))
.user(question)
.call()
.chatResponse();
return response.getResult().getOutput().getContent();
}
}
package com.example.springai;
import org.springframework.ai.document.Document;
import org.springframework.ai.embedding.EmbeddingModel;
import org.springframework.ai.reader.TextReader;
import org.springframework.ai.transformer.splitter.TokenTextSplitter;
import org.springframework.ai.vectorstore.SimpleVectorStore;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import org.springframework.core.io.Resource;
import java.io.File;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
@SpringBootApplication
public class SpringAiApplication {
@Value("vectorStore.json")
private String vectorStoreFileName;
@Value("classpath:/faq/spring-faq.txt")
private Resource springFaq;
public static void main(String[] args) {
SpringApplication.run(SpringAiApplication.class, args);
}
@Bean
SimpleVectorStore simpleVectorStore(EmbeddingModel embeddingModel) {
System.out.println("SpringAiApplication.simpleVectorStore");
SimpleVectorStore simpleVectorStore = SimpleVectorStore.builder(embeddingModel).build();
File vectorStoreFile = getVectorStoreFile();
if (vectorStoreFile.exists() && vectorStoreFile.length() != 0) {
simpleVectorStore.load(vectorStoreFile);
} else {
TextReader textReader = new TextReader(springFaq);
textReader.getCustomMetadata().put("filename", "spring-faq.txt");
List<Document> documents = textReader.get();
TokenTextSplitter tokenTextSplitter = new TokenTextSplitter();
List<Document> splitDocuments = tokenTextSplitter.apply(documents);
simpleVectorStore.add(splitDocuments);
simpleVectorStore.save(vectorStoreFile);
}
return simpleVectorStore;
}
private File getVectorStoreFile() {
System.out.println("SpringAiApplication.getVectorStoreFile");
Path path = Paths.get("src/main/resources/data");
var vectorStoreFile = path.toFile().getAbsolutePath() + "\\" + vectorStoreFileName;
return new File(vectorStoreFile);
}
}
Create a file named spring-faq.txt
in src/main/resources/faq/
directory and add Spring Boot
related content.
spring.application.name=SpringAi
spring.docker.compose.lifecycle-management=start-only
spring.threads.virtual.enabled=true
# The default Ollama Model in Spring Ai is mistral, but it can be changed by setting the below property. make sure to download the same model in entrypoint.sh file
#spring.ai.ollama.chat.options.model=llama3.1
# If running the Ollama Docker Instance separately, then set this property
spring.docker.compose.enabled=false
spring.ai.ollama.embedding.model=mistral
services:
ollama-model:
image: ollama/ollama:latest
container_name: ollama_container
ports:
- 11434:11434/tcp
healthcheck:
test: ollama --version || exit 1
command: serve
volumes:
- ./ollama/ollama:/root/.ollama
- ./entrypoint.sh:/entrypoint.sh
pull_policy: missing
tty: true
restart: no
entrypoint: [ "/usr/bin/bash", "/entrypoint.sh" ]
open-webui:
image: ghcr.io/open-webui/open-webui:main
container_name: open_webui_container
environment:
WEBUI_AUTH: false
ports:
- "8081:8080"
extra_hosts:
- "host.docker.internal:host-gateway"
volumes:
- open-webui:/app/backend/data
restart: no
volumes:
open-webui:
#!/bin/bash
# Start Ollama in the background.
/bin/ollama serve &
# Record Process ID.
pid=$!
# Pause for Ollama to start.
sleep 5
# The default Ollama Model in Spring Ai is mistral, but it can be changed in the applications property file. Make sure to download the same Model here
echo "🔴 Retrieve mistral model..."
ollama pull mistral
echo "🟢 Done!"
# Wait for the Ollama process to finish.
wait $pid
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://maven.apache.org/POM/4.0.0"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>3.3.2</version>
<relativePath/>
</parent>
<groupId>com.example.springai</groupId>
<artifactId>simple-vector-store</artifactId>
<version>0.0.1-SNAPSHOT</version>
<name>Simple Vector Store</name>
<description>Demo project for Spring Boot</description>
<properties>
<java.version>21</java.version>
<spring-ai.version>1.0.0-SNAPSHOT</spring-ai.version>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.ai</groupId>
<artifactId>spring-ai-ollama-spring-boot-starter</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-docker-compose</artifactId>
<scope>runtime</scope>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.springframework.ai</groupId>
<artifactId>spring-ai-bom</artifactId>
<version>${spring-ai.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
</dependencies>
</dependencyManagement>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<configuration>
<mainClass>com.example.springai.SpringAiApplication</mainClass>
<excludes>
<exclude>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
</exclude>
</excludes>
</configuration>
</plugin>
</plugins>
</build>
<repositories>
<repository>
<id>spring-milestones</id>
<name>Spring Milestones</name>
<url>https://repo.spring.io/milestone</url>
<snapshots>
<enabled>false</enabled>
</snapshots>
</repository>
<repository>
<id>spring-snapshots</id>
<name>Spring Snapshots</name>
<url>https://repo.spring.io/snapshot</url>
<releases>
<enabled>false</enabled>
</releases>
</repository>
</repositories>
</project>
Run the curl to see the Spring Ai Simple Vector Store
curl --location 'localhost:8080/simpleVectorStore'