SprinBoot+HDFS

springboot集成hdfs文件

pom依赖

io.springfox springfox-swagger2 2.6.1 io.springfox springfox-swagger-ui 2.6.1 org.springframework.boot spring-boot-starter-web 2.1.2.RELEASE org.apache.hadoop hadoop-common 3.1.1 org.apache.hadoop hadoop-hdfs 3.1.1 org.apache.hadoop hadoop-client 3.1.1

工具包

在这里插入图片描述

各种api调用

package com.example.demo.Controller;

import com.example.demo.Util.HadoopUtil;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.fs.;
import org.springframework.web.bind.annotation.
;
import org.springframework.web.multipart.MultipartFile;

import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

@RestController
@RequestMapping("/hadoop")
public class HadoopController {

/**
 * 创建文件夹
 *
 */
@PostMapping("/mkdir")
@ApiOperation("创建文件夹")
public String mkdir(@RequestParam("path") String path) throws Exception {
    if (StringUtils.isEmpty(path)) {
        return "请求参数为空";
    }
    // 文件对象
    FileSystem fs = HadoopUtil.getFileSystem();
    // 目标路径
    Path newPath = new Path(path);
    // 创建空文件夹
    boolean isOk = fs.mkdirs(newPath);
    fs.close();
    if (isOk) {
        return "create dir success";
    } else {
        return "create dir fail";
    }
}


/**
 * 上传文件
 *
 */
@PostMapping("/createFile")
@ApiOperation("上传文件")
public String createFile(@RequestParam("path")  @ApiParam("hdfs指定目录") String path, @RequestParam("file") MultipartFile file) throws Exception {
    if (StringUtils.isEmpty(path) ) {
        return "请求参数为空";
    }
    String fileName = file.getOriginalFilename();
    FileSystem fs = HadoopUtil.getFileSystem();
    // 上传时默认当前目录,后面自动拼接文件的目录
    Path newPath = new Path(path + "/" + fileName);
    // 打开一个输出流
    FSDataOutputStream outputStream = fs.create(newPath);
    outputStream.write(file.getBytes());
    outputStream.close();
    fs.close();
    return "create file success";

}



/**
 * 上传文件夹下面的所有文件
 *
 */
@PostMapping("/createFiles")
@ApiOperation("上传本地文件夹内的所有文件")
public String createFiles(@RequestParam("path")  @ApiParam("hdfs指定目录") String path, @RequestParam("file") String file) throws Exception {
    if (StringUtils.isEmpty(path)) {
        return "请求参数为空";
    }
    FileSystem fs = HadoopUtil.getFileSystem();
    File file1 = new File(file);
    //获取本地文件夹里的所有内容
    File[] files = file1.listFiles();
    for (File file2 :files){
        String path1 = file2.getName();
        // 打开一个输出流
        // 上传时默认当前目录,后面自动拼接文件的目录
        Path newPath = new Path(path + "/" +path1);
        FSDataOutputStream outputStream = fs.create(newPath);
        outputStream.write(file.getBytes());
        outputStream.close();
    }

    fs.close();
    return "create file success";

}


/**
 * 下载文件
 *
 */
@PostMapping("/downloadFile")
@ApiOperation("下载文件")
public String downloadFile(@RequestParam("path")  @ApiParam("hdfs指定目录带具体名称") String path, @RequestParam("downloadPath")  @ApiParam("本地目录") String downloadPath) throws Exception {
    FileSystem fs = HadoopUtil.getFileSystem();
    // 上传路径
    Path clientPath = new Path(path);
    // 目标路径
    Path serverPath = new Path(downloadPath);

    // 调用文件系统的文件复制方法,第一个参数是否删除原文件true为删除,默认为false
    fs.copyToLocalFile(false, clientPath, serverPath);
    fs.close();
    return "download file success";
}

/**
 * 下载文件
 */
@PostMapping("/downloadFileBymkdir")
@ApiOperation("下载文件夹内的所有文件返回该文件夹内的所有文件名")
public  List<Map<String, String>> downloadFileBymkdir(@RequestParam("path") @ApiParam("hdfs指定目录") String path, @RequestParam("downloadPath") @ApiParam("本地目录") String downloadPath) throws Exception {
    FileSystem fs = HadoopUtil.getFileSystem();
    // 上传路径
    Path clientPath = new Path(path);
    // 目标路径
    Path serverPath = new Path(downloadPath);

    RemoteIterator<LocatedFileStatus> filesList = fs.listFiles(clientPath, true);
    List<Map<String, String>> returnList = new ArrayList<>();
    while (filesList.hasNext()) {
        LocatedFileStatus next = filesList.next();
        String fileName = next.getPath().getName();
        Path filePath = next.getPath();
        Map<String, String> map = new HashMap<>();
        map.put("fileName", fileName);
        map.put("filePath", filePath.toString());
        returnList.add(map);
        // 调用文件系统的文件复制方法,第一个参数是否删除原文件true为删除,默认为false
        fs.copyToLocalFile(false, filePath, serverPath);
    }

    fs.close();
    return returnList;
}


/**
 * 删除文件
 *
 */
@DeleteMapping("/deleteFile")
@ApiOperation("删除文件夹内的所有文件")
public  String deleteFile(@RequestParam("hdfspath") @ApiParam("hdfs指定目录") String hdfspath) throws  Exception{
    FileSystem fs = HadoopUtil.getFileSystem();
    Path path = new Path(hdfspath);
    RemoteIterator<LocatedFileStatus> filesList = fs.listFiles(path, true);
    while (filesList.hasNext()){
        LocatedFileStatus next = filesList.next();
        Path path1 = next.getPath();
        fs.delete(path1,true);
    }
    fs.close();

    return  "delete file success";
}

}

要实现Spring Boot与HDFS和MySQL的文件上传和下载,需要先配置Hadoop和MySQL环境。然后,需要添加相应的依赖项并编写以下代码: 1. 配置HDFS 在application.properties文件中添加以下配置: ``` # HDFS配置 hadoop.hdfs.path=hdfs://localhost:9000 hadoop.hdfs.username=hadoop ``` 2. 配置MySQL 在application.properties文件中添加以下配置: ``` # MySQL配置 spring.datasource.url=jdbc:mysql://localhost:3306/test spring.datasource.username=root spring.datasource.password=root spring.datasource.driver-class-name=com.mysql.cj.jdbc.Driver ``` 3. 添加依赖项 在pom.xml文件中添加以下依赖项: ``` <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-hdfs</artifactId> <version>3.2.1</version> </dependency> <dependency> <groupId>com.mysql.cj</groupId> <artifactId>mysql-connector-java</artifactId> <version>8.0.23</version> </dependency> ``` 4. 编写上传和下载代码 上传代码: ```java @Service public class HdfsService { @Value("${hadoop.hdfs.path}") private String hdfsPath; @Value("${hadoop.hdfs.username}") private String hdfsUsername; @Value("${spring.servlet.multipart.location}") private String uploadPath; @Autowired private FileSystem fileSystem; @Autowired private JdbcTemplate jdbcTemplate; public void upload(MultipartFile file) throws IOException { String fileName = file.getOriginalFilename(); String filePath = "/upload/" + fileName; Path path = new Path(hdfsPath + filePath); FSDataOutputStream outputStream = fileSystem.create(path); outputStream.write(file.getBytes()); outputStream.close(); jdbcTemplate.update("INSERT INTO file (name, path) VALUES (?, ?)", fileName, filePath); } } ``` 下载代码: ```java @Service public class HdfsService { @Value("${hadoop.hdfs.path}") private String hdfsPath; @Value("${hadoop.hdfs.username}") private String hdfsUsername; @Value("${spring.servlet.multipart.location}") private String uploadPath; @Autowired private FileSystem fileSystem; @Autowired private JdbcTemplate jdbcTemplate; public void download(HttpServletResponse response, String fileName) throws IOException { String filePath = jdbcTemplate.queryForObject("SELECT path FROM file WHERE name = ?", String.class, fileName); Path path = new Path(hdfsPath + filePath); FSDataInputStream inputStream = fileSystem.open(path); response.setContentType("application/octet-stream"); response.setHeader("Content-Disposition", "attachment; filename=\"" + fileName + "\""); IOUtils.copy(inputStream, response.getOutputStream()); response.flushBuffer(); } } ``` 以上代码将文件存储在HDFS中,并将文件名和路径保存到MySQL中。下载时,从MySQL中查询文件路径并将文件流发送到响应中。注意,在这里我们使用了Apache Commons IO库的IOUtils类来将文件流复制到响应中。 同时,我们还需要在控制器中编写上传和下载的端点: ```java @RestController public class FileController { @Autowired private HdfsService hdfsService; @PostMapping("/upload") public void upload(@RequestParam("file") MultipartFile file) throws IOException { hdfsService.upload(file); } @GetMapping("/download") public void download(HttpServletResponse response, @RequestParam("fileName") String fileName) throws IOException { hdfsService.download(response, fileName); } } ``` 现在,我们已经完成了Spring Boot与HDFS和MySQL的文件上传和下载。
评论 2
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值