SpringAi 接入本地的deepseek实现对话(操作简单对新手友好)

1.首先下载ollama

链接如下Ollama的安装以及大模型下载教程_ollama下载-优快云博客

这个地方就不在赘述

2.然后我们本地创建一个Springboot项目并且导入依赖

<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
	<modelVersion>4.0.0</modelVersion>
	<parent>
		<groupId>org.springframework.boot</groupId>
		<artifactId>spring-boot-starter-parent</artifactId>
		<version>3.3.0</version>
		<relativePath/> <!-- lookup parent from repository -->
	</parent>

	<groupId>com.example</groupId>
	<artifactId>springAi</artifactId>
	<version>0.0.1-SNAPSHOT</version>
	<name>springAi</name>
	<description>Demo project for Spring Boot</description>
	<properties>
		<java.version>17</java.version>
		<!--        快照版本-->
		<spring-ai.version>1.0.0-SNAPSHOT</spring-ai.version>
	</properties>
	<dependencies>
		<dependency>
			<groupId>org.springframework.boot</groupId>
			<artifactId>spring-boot-starter-web</artifactId>
		</dependency>
		<dependency>
			<groupId>group.springframework.ai</groupId>
			<artifactId>spring-ai-ollama-spring-boot-starter</artifactId>
			<version>1.0.4</version>
		</dependency>
		<!-- Spring AI Ollama 集成 -->
		<dependency>
			<groupId>org.springframework.ai</groupId>
			<artifactId>spring-ai-ollama-spring-boot-starter</artifactId>
			<version>1.0.4</version>
		</dependency>
		<dependency>
			<groupId>org.springframework.boot</groupId>
			<artifactId>spring-boot-devtools</artifactId>
			<scope>runtime</scope>
			<optional>true</optional>
		</dependency>
		<dependency>
			<groupId>org.projectlombok</groupId>
			<artifactId>lombok</artifactId>
			<optional>true</optional>
		</dependency>
		<dependency>
			<groupId>org.springframework.boot</groupId>
			<artifactId>spring-boot-starter-test</artifactId>
			<scope>test</scope>
		</dependency>
		<dependency>
			<groupId>org.springframework.boot</groupId>
			<artifactId>spring-boot-starter-webflux</artifactId>
		</dependency>

	</dependencies>
	<dependencyManagement>
		<dependencies>
			<dependency>
				<groupId>org.springframework.ai</groupId>
				<artifactId>spring-ai-bom</artifactId>
				<version>${spring-ai.version}</version>
				<type>pom</type>
				<scope>import</scope>
			</dependency>
		</dependencies>
	</dependencyManagement>

	<build>
		<plugins>
			<plugin>
				<groupId>org.springframework.boot</groupId>
				<artifactId>spring-boot-maven-plugin</artifactId>
				<configuration>
					<excludes>
						<exclude>
							<groupId>org.projectlombok</groupId>
							<artifactId>lombok</artifactId>
						</exclude>
					</excludes>
				</configuration>
			</plugin>
		</plugins>
	</build>
	<!--    快照版本-->
	<repositories>
		<repository>
			<id>spring-snapshot</id>
			<name>Spring Snapshots</name>
			<url>https://repo.spring.io/snapshot</url>
			<releases>
				<enabled>false</enabled>
			</releases>
		</repository>
	</repositories>
</project>

注意把自己的的名字要改一下

 

3.开始写java代码

项目结构如图:

以下是OllamaController代码:

package com.example.springAi;

import jakarta.annotation.Resource;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.*;

import org.springframework.ai.ollama.OllamaChatModel;

import reactor.core.publisher.Flux;

import java.nio.charset.StandardCharsets;

@RestController
public class OllamaController {

    @Resource
    private OllamaChatModel ollamaChatModel;


    // 原始同步接口:一次性返回完整回复
    @GetMapping("/ai")
    public String ollama(@RequestParam String msg) {
        System.out.println(ollamaChatModel.call(msg));
        return ollamaChatModel.call(msg);
    }
    @GetMapping(value = "/ai/stream", produces = MediaType.TEXT_EVENT_STREAM_VALUE)
    public Flux<String> streamResponse(@RequestParam String msg) {
        System.out.println(ollamaChatModel.stream(msg));
        return ollamaChatModel.stream(msg)
                .map(response -> new String(response.getBytes(), StandardCharsets.UTF_8));
    }

    // 测试接口
    @GetMapping("/test")
    public String index() {
        return "Greetings from Spring Boot!";
    }
}

 以下是SpringAiApplication代码:

package com.example.springAi;

import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;

@SpringBootApplication
public class SpringAiApplication {

	public static void main(String[] args) {
		SpringApplication.run(SpringAiApplication.class, args);
	}

}

4.一定要配置application.yml文件

需要注意的是 :

1.首先那个url里面的11434端口号如果你没有配置的话 默认就是11434 如果想要配置自己的端口号的话可以在环境和变量中加入变量名字:OLLAMA_HOST 值加入你想要的端口号 前提是端口没被占用

2.要修改你自己对应的模型就是你刚才在下载ollama打开了他的cmd窗口里面有个 

5.可以看看效果了

第一种是流式输出 :调用方法如下

第二种是等他输出完了才会输出

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值