https://ollama.com/download/windows
下载一个Ollama的客户端,在官网提供了各种不同版本的Ollama,大家可以根据自己的需要下载https://ollama.com/download/windows,
勾选web 和ollama
yml
#spring.application.name=demo9
spring:
application:
name: demo9
ai:
ollama:
base-url: http://localhost:11434 # Ollama 默认地址
chat:
model: deepseek-r1:1.5b # 使用的模型名称
configuration
package com.example.ai.config;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.ai.ollama.OllamaChatModel;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
public class configration {
public ChatClient chatClient(OllamaChatModel model)
{
return ChatClient.builder(model)
.build();
}
}
编写controller
package com.example.ai.controller;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.ai.chat.client.DefaultChatClient;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
@RestController
public class DeepSeekR1Controller {
private final ChatClient chatClient;
public DeepSeekR1Controller(ChatClient chatClient) {
this.chatClient = chatClient;
}
@GetMapping("/deepseek-chat")
public String chat(@RequestParam String message) {
return chatClient.prompt()
.user(message)
.call()
.content();
}
}
流式
@RequestMapping(value = "/deepseek-chat", produces = "text/html;charset=utf-8")
public Flux<String> chatStream(String ){
return chatClient.prompt()
.user(m)
.stream()
.content();
}
运行