diff --git a/applications/spring-ai-demo/README.md b/applications/spring-ai-demo/README.md
index 64f9b40..fb162df 100644
--- a/applications/spring-ai-demo/README.md
+++ b/applications/spring-ai-demo/README.md
@@ -5,20 +5,30 @@
- Java 21+
- Langfuse stack ([Cloud](https://cloud.langfuse.com/) or [Self-Hosted](https://langfuse.com/docs/deployment/self-host))
- Langfuse API Keys
-- An OpenAI Api Key
+- An OpenAI or Google AI Studio Api Key
## How to run
1. Configure environment variables to connect Spring AI demo app with Langfuse.
- ```
+ ```bash
export SPRING_AI_OPENAI_APIKEY="sk-proj-xxx"
+ # export SPRING_AI_GOOGLE_GENAI_APIKEY="AI..."
+ # export SPRING_PROFILES_ACTIVE="google" # openai is used by default
+
+ export OTEL_EXPORTER_OTLP_HEADERS="Authorization=Basic $(echo -n "pk-lf-xxx:sk-lf-xxx" | base64)"
export OTEL_EXPORTER_OTLP_ENDPOINT="https://cloud.langfuse.com/api/public/otel" # πͺπΊ EU data region
# export OTEL_EXPORTER_OTLP_ENDPOINT="https://us.cloud.langfuse.com/api/public/otel" # πΊπΈ US data region
# export OTEL_EXPORTER_OTLP_ENDPOINT="http://localhost:3000/api/public/otel" # π Local deployment (>= v3.22.0)
- export OTEL_EXPORTER_OTLP_HEADERS="Authorization=Basic $(echo -n "pk-lf-xxx:sk-lf-xxx" | base64)"
```
-2. Run the sample application with `./mvnw clean install spring-boot:run`.
-3. Call the chat endpoint with `curl localhost:8080/v1/chat`.
-4. Observe the new trace in the Langfuse web UI.
+2. Run the sample application with
+ ```bash
+ ./mvnw clean install spring-boot:run
-
+ # Or use docker compose from the docker/ folder
+ docker compose -f docker/docker-compose.yml up --build
+ ```
+3. Call the chat endpoint with
+ ```bash
+ curl localhost:8080/v1/chat
+ ```
+4. Observe the new trace in the Langfuse web UI.
diff --git a/applications/spring-ai-demo/docker/Dockerfile b/applications/spring-ai-demo/docker/Dockerfile
new file mode 100644
index 0000000..2a2cdde
--- /dev/null
+++ b/applications/spring-ai-demo/docker/Dockerfile
@@ -0,0 +1,14 @@
+# syntax=docker/dockerfile:1.6
+FROM eclipse-temurin:21-jdk AS builder
+WORKDIR /workspace
+COPY .mvn/ .mvn/
+COPY mvnw pom.xml ./
+RUN chmod +x mvnw && ./mvnw -B dependency:go-offline
+COPY src ./src
+RUN ./mvnw -B clean package -DskipTests
+
+FROM eclipse-temurin:21-jre
+WORKDIR /workspace
+COPY --from=builder /workspace/target/spring-ai-demo-0.0.1-SNAPSHOT.jar app.jar
+EXPOSE 8080
+ENTRYPOINT ["java","-jar","/workspace/app.jar"]
diff --git a/applications/spring-ai-demo/docker/docker-compose.yml b/applications/spring-ai-demo/docker/docker-compose.yml
new file mode 100644
index 0000000..b966c3f
--- /dev/null
+++ b/applications/spring-ai-demo/docker/docker-compose.yml
@@ -0,0 +1,14 @@
+services:
+ spring-ai-demo:
+ build:
+ context: ..
+ dockerfile: docker/Dockerfile
+ ports:
+ - "8080:8080"
+ environment:
+ OTEL_EXPORTER_OTLP_ENDPOINT: ${OTEL_EXPORTER_OTLP_ENDPOINT:-https://cloud.langfuse.com/api/public/otel}
+ OTEL_EXPORTER_OTLP_HEADERS: ${OTEL_EXPORTER_OTLP_HEADERS:-default_OTEL_EXPORTER_OTLP_HEADERS}
+ SPRING_AI_GOOGLE_GENAI_APIKEY: ${SPRING_AI_GOOGLE_GENAI_APIKEY:-default_SPRING_AI_GOOGLE_GENAI_APIKEY}
+ SPRING_AI_OPENAI_APIKEY: ${SPRING_AI_OPENAI_APIKEY:-default_SPRING_AI_OPENAI_APIKEY}
+ SPRING_PROFILES_ACTIVE: ${SPRING_PROFILES_ACTIVE:-openai}
+ restart: unless-stopped
diff --git a/applications/spring-ai-demo/pom.xml b/applications/spring-ai-demo/pom.xml
index cfb171b..0f8c749 100644
--- a/applications/spring-ai-demo/pom.xml
+++ b/applications/spring-ai-demo/pom.xml
@@ -17,7 +17,7 @@
21
- 1.0.0
+ 1.1.0
@@ -48,6 +48,10 @@
org.springframework.ai
spring-ai-starter-model-openai
+
+ org.springframework.ai
+ spring-ai-starter-model-google-genai
+
org.springframework.boot
diff --git a/applications/spring-ai-demo/src/main/java/com/langfuse/springai/ChatModelCompletionContentObservationFilter.java b/applications/spring-ai-demo/src/main/java/com/langfuse/springai/ChatModelCompletionContentObservationFilter.java
index 66535ab..84361a1 100644
--- a/applications/spring-ai-demo/src/main/java/com/langfuse/springai/ChatModelCompletionContentObservationFilter.java
+++ b/applications/spring-ai-demo/src/main/java/com/langfuse/springai/ChatModelCompletionContentObservationFilter.java
@@ -63,4 +63,3 @@ private List processCompletion(ChatModelObservationContext context) {
}
}
}
-
diff --git a/applications/spring-ai-demo/src/main/java/com/langfuse/springai/ChatModelProfileConfig.java b/applications/spring-ai-demo/src/main/java/com/langfuse/springai/ChatModelProfileConfig.java
new file mode 100644
index 0000000..251776a
--- /dev/null
+++ b/applications/spring-ai-demo/src/main/java/com/langfuse/springai/ChatModelProfileConfig.java
@@ -0,0 +1,26 @@
+package com.langfuse.springai;
+
+import org.springframework.ai.chat.model.ChatModel;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.context.annotation.Primary;
+import org.springframework.context.annotation.Profile;
+
+@Configuration
+public class ChatModelProfileConfig {
+
+ @Bean
+ @Primary
+ @Profile("openai")
+ public ChatModel openAiChatModel(@Qualifier("openAiChatModel") ChatModel delegate) {
+ return delegate;
+ }
+
+ @Bean
+ @Primary
+ @Profile("google")
+ public ChatModel googleChatModel(@Qualifier("googleGenAiChatModel") ChatModel delegate) {
+ return delegate;
+ }
+}
diff --git a/applications/spring-ai-demo/src/main/java/com/langfuse/springai/ChatService.java b/applications/spring-ai-demo/src/main/java/com/langfuse/springai/ChatService.java
index 74c0dbc..b19a63d 100644
--- a/applications/spring-ai-demo/src/main/java/com/langfuse/springai/ChatService.java
+++ b/applications/spring-ai-demo/src/main/java/com/langfuse/springai/ChatService.java
@@ -21,7 +21,7 @@ public ChatService(ChatClient.Builder builder) {
@EventListener(ApplicationReadyEvent.class)
public String testAiCall() {
LOGGER.info("Invoking LLM");
- String answer = chatClient.prompt("Reply with the word 'java'").call().content();
+ String answer = chatClient.prompt("Tell the current UTC time and tell a joke about java programmers.").call().content();
LOGGER.info("AI answered: {}", answer);
return answer;
}
diff --git a/applications/spring-ai-demo/src/main/resources/application.yml b/applications/spring-ai-demo/src/main/resources/application.yml
index ce31f9e..8d2faa2 100644
--- a/applications/spring-ai-demo/src/main/resources/application.yml
+++ b/applications/spring-ai-demo/src/main/resources/application.yml
@@ -6,6 +6,7 @@ spring:
observations:
log-prompt: true # Include prompt content in tracing (disabled by default for privacy)
log-completion: true # Include completion content in tracing (disabled by default)
+
management:
tracing:
sampling:
@@ -13,3 +14,32 @@ management:
observations:
annotations:
enabled: true # Enable @Observed (if you use observation annotations in code)
+
+otel:
+ logs:
+ exporter: none # Disable OTLP log export (Langfuse expects traces only)
+
+---
+
+spring:
+ config:
+ activate:
+ on-profile: google
+ ai:
+ google:
+ genai:
+ chat:
+ options:
+ model: gemini-2.0-flash
+
+---
+
+spring:
+ config:
+ activate:
+ on-profile: openai
+ ai:
+ openai:
+ chat:
+ options:
+ model: gpt-4o-mini