Table of Content
Connect to SpringAI to implement streaming conversation

Updated on:June-29th-2025
Recommendation
If you are a Java backend developer, you should not miss this article. This article will teach you how to use SpringAI technology to implement streaming conversations.
Core content:
1. Introduce SpringAI dependencies to simplify model use and management
2. Configure API and SpringBoot projects to start conversation applications with one click
3. Write AI applications to implement streaming conversations based on the DeepSeek model
Yang Fangxian
Founder of 53AI/Most Valuable Expert of Tencent Cloud (TVP)
<dependencyManagement> <dependencies> <!-- Spring AI's management dependencies --> <dependency> <groupId>org.springframework.ai</groupId> <artifactId>spring-ai-bom</artifactId> <version>${spring-ai.version}</version> <type>pom</type> <scope>import</scope> </dependency> </dependencies></dependencyManagement>
<dependency> <groupId>org.springframework.ai</groupId> <artifactId>spring-ai-openai-spring-boot-starter</artifactId> <version>${spring-ai.version}</version></dependency>
<properties> <java.version>17</java.version> <spring-ai.version>1.0.0-M5</spring-ai.version></properties>
server: port: 8080spring: application: name: ai-demo ai: openai: base-url: https://api.deepseek.com api-key: personal key chat: options: model: deepseek-chat temperature: 0.7
public class ChatConfig {
/**
* Default form
* @param model
* @return
*/
public ChatClient chatClient ( OpenAiChatModel model ) {
return ChatClient . builder (model) . build ();
}
}
public class ChatController {
private final ChatClient chatClient;
/**
* Chat Dialogue - Blocking
* @param message
* @return
*/
public String chat( String message) {
return chatClient.prompt()
.user(message)
.call()
.content();
}
}
/**
* Chat Conversation - Streaming
*
* @param message
* @return
*/
"/stream" ,produces = "text/html;charset=utf-8" ) (value =
public Flux < String > chatStream ( ( "message" ) String message ) {
log.info ( " Streaming test..." );
return chatClient.prompt ( )
. user (message)
.stream ( )
.content () ;
}
/** * Add prompt words passed * @param model * @return */ @Bean public ChatClient chatClient(OpenAiChatModel model) { return ChatClient .builder(model) .defaultSystem("Your name is Xiao Ming, and you are a student. Please answer the questions in a student's tone.") .build(); }
/**
* Session log
* @param model
* @return
*/
public ChatClient chatClient ( OpenAiChatModel model, ChatMemory chatMemory ) {
return ChatClient
. builder (model)
. defaultSystem ( "Your name is Xiao Ming, and you are a student. Please answer the questions in a student's tone." )
.defaultAdvisors (
new SimpleLoggerAdvisor (),
new MessageChatMemoryAdvisor (chatMemory)
)
.build () ;
}
/**
* Session memory is based on memory-cache
* @return
*/
public ChatMemory chatMemory () {
return new InMemoryChatMemory ();
}
/**
* Session memory has passed
* @param message
* @param chatId
* @return
*/
"/memoryChat" ,produces = "text/html;charset=utf-8" ) (value =
public Flux < String > memoryChat ( ( "message" ) String message, String chatId ) {
log.info ( " Streaming test..." );
return chatClient.prompt ( )
. user (message)
. advisors (a-> a. param ( CHAT_MEMORY_CONVERSATION_ID_KEY , chatId))
.stream ( )
.content () ;
}