Java通过ollama平台接入DeepSeek

1、配置适配jdk8的依赖

<dependency>
    <groupId>io.github.lnyo-cly</groupId>
    <artifactId>ai4j-spring-boot-stater</artifactId>
    <version>0.7.0</version>
</dependency>

2、配置bootstrap.yml

ai:
  ollama:
    api-host: http://localhost:11434

3、编写接口

package com.ronshi.ai;

import io.github.lnyocly.ai4j.listener.SseListener;
import io.github.lnyocly.ai4j.platform.openai.chat.entity.ChatCompletion;
import io.github.lnyocly.ai4j.platform.openai.chat.entity.ChatCompletionResponse;
import io.github.lnyocly.ai4j.platform.openai.chat.entity.ChatMessage;
import io.github.lnyocly.ai4j.service.IChatService;
import io.github.lnyocly.ai4j.service.PlatformType;
import io.github.lnyocly.ai4j.service.factor.AiService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;

import javax.servlet.http.HttpServletResponse;
import java.io.PrintWriter;

/**
 * @author ronshi
 * @date 2025/2/17 16:31
 */
@RestController
public class AiController {
    @Autowired
    private AiService aiService;
    
    @GetMapping("/chat")
    public String getChatMessage(@RequestParam String question) throws Exception {
        // 获取OLLAMA的聊天服务
        IChatService chatService = aiService.getChatService(PlatformType.OLLAMA);
        
        // 创建请求参数
        ChatCompletion chatCompletion = ChatCompletion.builder()
                .model("deepseek-r1:1.5b")
                .message(ChatMessage.withUser(question))
                .build();
        
        
        System.out.println(chatCompletion);
        
        // 发送chat请求
        ChatCompletionResponse chatCompletionResponse = chatService.chatCompletion(chatCompletion);
        
        // 获取聊天内容和token消耗
        String content = chatCompletionResponse.getChoices().get(0).getMessage().getContent();
        long totalTokens = chatCompletionResponse.getUsage().getTotalTokens();
        System.out.println("总token消耗: " + totalTokens);
        
        return content;
    }
    
    
    @GetMapping("/chatStream")
    public void getChatMessageStream(@RequestParam String question, HttpServletResponse response) throws Exception {
        // 中文乱码问题
        response.setCharacterEncoding("UTF-8");
        
        // 获取OLLAMA的聊天服务
        IChatService chatService = aiService.getChatService(PlatformType.OLLAMA);
        
        // 创建请求参数
        ChatCompletion chatCompletion = ChatCompletion.builder()
                .model("deepseek-r1:1.5b")
                .message(ChatMessage.withUser(question))
                .build();
        
        
        PrintWriter writer = response.getWriter();
        // 发送chat请求
        SseListener sseListener = new SseListener() {
            @Override
            protected void send() {
                writer.write(this.getCurrStr());
                writer.flush();
                System.out.println(this.getCurrStr());
            }
        };
        chatService.chatCompletionStream(chatCompletion, sseListener);
        writer.close();
        System.out.println(sseListener.getOutput());
        
    }
    
}

 4、普通输出

http://localhost:8080/chat?question=地球的组成

 5、流式输出

<!DOCTYPE html>
<html>
<head>
    <meta charset="UTF-8">
    <title>人工智能</title>
</head>
<body>
<input id="question" type="text" placeholder="输入需要提问的问题"/>

<button id="startButton">开始</button>

<div id="output"></div>


<script>
    const input = document.getElementById("question");
    const outputDiv = document.getElementById('output');
    const startButton = document.getElementById('startButton');

    async function getResponse(){
        const question = input.value;
        const resp = await fetch("http://localhost:8080/chatStream" + "?question=" + question,{
            method: 'GET'
        })

        const reader = resp.body.getReader();
        const textDecoder = new TextDecoder();
        while (1){
            const { done , value } = await reader.read()
            if(done) break;
            const str = textDecoder.decode(value);
            outputDiv.innerText += str;
            console.log(str)
        }
    }
    startButton.addEventListener("click", getResponse)
</script>
</body>
</html>

 

### JavaWeb 集成 Ollama DeepSeek 本地部署教程 #### 准备工作 为了成功在JavaWeb项目中集成并本地部署OllamaDeepSeek,需先确认环境配置满足最低需求。确保已安装JDK版本8及以上,并设置好JAVA_HOME环境变量;还需准备Maven作为构建工具。 #### 下载依赖库 创建一个新的Java Web应用程序工程,在`pom.xml`文件内加入必要的依赖项来调用RESTful API接口与处理JSON数据: ```xml <dependencies> <!-- JSON处理 --> <dependency> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-databind</artifactId> <version>2.13.0</version> </dependency> <!-- HTTP请求发送 --> <dependency> <groupId>org.apache.httpcomponents.client5</groupId> <artifactId>httpclient5</artifactId> <version>5.1</version> </dependency> ... </dependencies> ``` #### 构建HTTP客户端类 编写一个用于向远程服务器发起POST请求获取预测结果的服务类。此服务将负责序列化输入对象为JSON字符串并通过HTTP POST方法提交给指定URL地址[^1]。 ```java import org.apache.hc.client5.http.classic.methods.HttpPost; import org.apache.hc.client5.http.impl.classic.CloseableHttpClient; import org.apache.hc.client5.http.impl.classic.CloseableHttpResponse; import org.apache.hc.client5.http.impl.classic.HttpClients; import com.fasterxml.jackson.databind.ObjectMapper; public class PredictionService { private static final String DEEPSEEK_URL = "http://localhost:9000/predict"; // 假设这是DeepSeek服务端口 public String predict(Object input){ try (CloseableHttpClient httpClient = HttpClients.createDefault()) { ObjectMapper mapper = new ObjectMapper(); String jsonInputString = mapper.writeValueAsString(input); HttpPost postRequest = new HttpPost(DEEPSEEK_URL ); // 设置header信息 postRequest.setHeader("Content-Type", "application/json"); // 将json转换后的字符串放入post请求体中 postRequest.setEntity(new StringEntity(jsonInputString)); try(CloseableHttpResponse response = httpClient.execute(postRequest)){ return EntityUtils.toString(response.getEntity()); } } catch(Exception e){ throw new RuntimeException(e.getMessage(),e); } } } ``` #### 启动DeepSeek服务 按照蓝耘元生代容器平台上的指导完成deepseek-r1_32b模型的搭建过程。启动完成后,默认监听于`http://localhost:9000`路径下提供推理API访问入口。 #### 测试集成效果 最后一步是在JavaWeb应用里实例化上述定义好的PredictionService类,并传入适当格式的数据结构作为参数调用predict()函数测试整个链路是否通畅无阻。
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

ronshi

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值