pom依赖
io.springfox springfox-swagger2 2.6.1 io.springfox springfox-swagger-ui 2.6.1 org.springframework.boot spring-boot-starter-web 2.1.2.RELEASE org.apache.hadoop hadoop-common 3.1.1 org.apache.hadoop hadoop-hdfs 3.1.1 org.apache.hadoop hadoop-client 3.1.1工具包

各种api调用
package com.example.demo.Controller;
import com.example.demo.Util.HadoopUtil;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.fs.;
import org.springframework.web.bind.annotation.;
import org.springframework.web.multipart.MultipartFile;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@RestController
@RequestMapping("/hadoop")
public class HadoopController {
/**
* 创建文件夹
*
*/
@PostMapping("/mkdir")
@ApiOperation("创建文件夹")
public String mkdir(@RequestParam("path") String path) throws Exception {
if (StringUtils.isEmpty(path)) {
return "请求参数为空";
}
// 文件对象
FileSystem fs = HadoopUtil.getFileSystem();
// 目标路径
Path newPath = new Path(path);
// 创建空文件夹
boolean isOk = fs.mkdirs(newPath);
fs.close();
if (isOk) {
return "create dir success";
} else {
return "create dir fail";
}
}
/**
* 上传文件
*
*/
@PostMapping("/createFile")
@ApiOperation("上传文件")
public String createFile(@RequestParam("path") @ApiParam("hdfs指定目录") String path, @RequestParam("file") MultipartFile file) throws Exception {
if (StringUtils.isEmpty(path) ) {
return "请求参数为空";
}
String fileName = file.getOriginalFilename();
FileSystem fs = HadoopUtil.getFileSystem();
// 上传时默认当前目录,后面自动拼接文件的目录
Path newPath = new Path(path + "/" + fileName);
// 打开一个输出流
FSDataOutputStream outputStream = fs.create(newPath);
outputStream.write(file.getBytes());
outputStream.close();
fs.close();
return "create file success";
}
/**
* 上传文件夹下面的所有文件
*
*/
@PostMapping("/createFiles")
@ApiOperation("上传本地文件夹内的所有文件")
public String createFiles(@RequestParam("path") @ApiParam("hdfs指定目录") String path, @RequestParam("file") String file) throws Exception {
if (StringUtils.isEmpty(path)) {
return "请求参数为空";
}
FileSystem fs = HadoopUtil.getFileSystem();
File file1 = new File(file);
//获取本地文件夹里的所有内容
File[] files = file1.listFiles();
for (File file2 :files){
String path1 = file2.getName();
// 打开一个输出流
// 上传时默认当前目录,后面自动拼接文件的目录
Path newPath = new Path(path + "/" +path1);
FSDataOutputStream outputStream = fs.create(newPath);
outputStream.write(file.getBytes());
outputStream.close();
}
fs.close();
return "create file success";
}
/**
* 下载文件
*
*/
@PostMapping("/downloadFile")
@ApiOperation("下载文件")
public String downloadFile(@RequestParam("path") @ApiParam("hdfs指定目录带具体名称") String path, @RequestParam("downloadPath") @ApiParam("本地目录") String downloadPath) throws Exception {
FileSystem fs = HadoopUtil.getFileSystem();
// 上传路径
Path clientPath = new Path(path);
// 目标路径
Path serverPath = new Path(downloadPath);
// 调用文件系统的文件复制方法,第一个参数是否删除原文件true为删除,默认为false
fs.copyToLocalFile(false, clientPath, serverPath);
fs.close();
return "download file success";
}
/**
* 下载文件
*/
@PostMapping("/downloadFileBymkdir")
@ApiOperation("下载文件夹内的所有文件返回该文件夹内的所有文件名")
public List<Map<String, String>> downloadFileBymkdir(@RequestParam("path") @ApiParam("hdfs指定目录") String path, @RequestParam("downloadPath") @ApiParam("本地目录") String downloadPath) throws Exception {
FileSystem fs = HadoopUtil.getFileSystem();
// 上传路径
Path clientPath = new Path(path);
// 目标路径
Path serverPath = new Path(downloadPath);
RemoteIterator<LocatedFileStatus> filesList = fs.listFiles(clientPath, true);
List<Map<String, String>> returnList = new ArrayList<>();
while (filesList.hasNext()) {
LocatedFileStatus next = filesList.next();
String fileName = next.getPath().getName();
Path filePath = next.getPath();
Map<String, String> map = new HashMap<>();
map.put("fileName", fileName);
map.put("filePath", filePath.toString());
returnList.add(map);
// 调用文件系统的文件复制方法,第一个参数是否删除原文件true为删除,默认为false
fs.copyToLocalFile(false, filePath, serverPath);
}
fs.close();
return returnList;
}
/**
* 删除文件
*
*/
@DeleteMapping("/deleteFile")
@ApiOperation("删除文件夹内的所有文件")
public String deleteFile(@RequestParam("hdfspath") @ApiParam("hdfs指定目录") String hdfspath) throws Exception{
FileSystem fs = HadoopUtil.getFileSystem();
Path path = new Path(hdfspath);
RemoteIterator<LocatedFileStatus> filesList = fs.listFiles(path, true);
while (filesList.hasNext()){
LocatedFileStatus next = filesList.next();
Path path1 = next.getPath();
fs.delete(path1,true);
}
fs.close();
return "delete file success";
}
}
3398

被折叠的 条评论
为什么被折叠?



